streaming on PC

I am using a jetson tx1 with a leopard image imx185 camera, I have installed the jetpack 2.3.1 modifying the .config.* files to install the drivers for that camera and I am able to display the video on a display connected to the jetson tx1. Now I want to stream the video to a pc connected to the same network of the jetson tx1. how can I do that?

I’m actually doing that right now using the Ridgerun UDP Streaming example:
https://developer.ridgerun.com/wiki/index.php?title=Gstreamer_pipelines_for_Tegra_X1#H264_UDP_Streaming

Note that they have h266parse in their command, I had to replace it with h264parse.

I run this on my TX1:

CLIENT_IP=10.100.0.70
gst-launch-1.0 nvcamerasrc fpsRange="30 30" intent=3 ! nvvidconv flip-method=6 \
! 'video/x-raw(memory:NVMM), width=(int)1920, height=(int)1080, format=(string)I420, framerate=(fraction)30/1' ! \
omxh264enc control-rate=2 bitrate=4000000 ! 'video/x-h264, stream-format=(string)byte-stream' ! \
h264parse ! rtph264pay mtu=1400 ! udpsink host=$CLIENT_IP port=5000 sync=false async=false

and this on my laptop (IP 10.100.0.70)

gst-launch-1.0 udpsrc port=5000 !
application/x-rtp,encoding-name=H264,payload=96 !
rtph264depay !
h264parse !
queue !
avdec_h264 !
xvimagesink sync=false async=false -e

If you prefer Python here’s the client example in Python. (Still trying to figure out the server)

"""
gst-launch-1.0 udpsrc port=5000 !
application/x-rtp,encoding-name=H264,payload=96 !
rtph264depay !
h264parse !
queue !
avdec_h264 !
xvimagesink sync=false async=false -e
"""

import gi
gi.require_version("Gst", "1.0")
from gi.repository import Gst

Gst.init(None)

# Create elements
udpsrc = Gst.ElementFactory.make('udpsrc')
depayload = Gst.ElementFactory.make('rtph264depay')
parser = Gst.ElementFactory.make('h264parse')
queue = Gst.ElementFactory.make('queue')
decoder = Gst.ElementFactory.make('avdec_h264')
sink = Gst.ElementFactory.make('xvimagesink')

# Configure elements
caps = Gst.caps_from_string('application/x-rtp,encoding-name=H264,payload=96')
udpsrc.set_property('caps', caps)
udpsrc.set_property('port', 5000)

sink.set_property('sync', 'false')
sink.set_property('async', 'false')

# Create pipeline
pipeline = Gst.Pipeline.new('pipeline')
pipeline.add(udpsrc)
pipeline.add(depayload)
pipeline.add(parser)
pipeline.add(queue)
pipeline.add(decoder)
pipeline.add(sink)

Gst.Element.link(udpsrc, depayload)
Gst.Element.link(depayload, parser)
Gst.Element.link(parser, queue)
Gst.Element.link(queue, decoder)
Gst.Element.link(decoder, sink)

ret = pipeline.set_state(Gst.State.PLAYING)
if ret == Gst.StateChangeReturn.FAILURE:
    print "Unable to set pipeline to playing"
    exit(-1)

# Wait until error or EOS.
bus = pipeline.get_bus()
try:
    while True:
        message = bus.timed_pop_filtered(10000, Gst.MessageType.ANY)
        # print "image_arr: ", image_arr
        if message:
            if message.type == Gst.MessageType.ERROR:
                err, debug = message.parse_error()
                print("Error received from element %s: %s" % (
                    message.src.get_name(), err))
                print("Debugging information: %s" % debug)
                break
            elif message.type == Gst.MessageType.EOS:
                print("End-Of-Stream reached.")
                break
            elif message.type == Gst.MessageType.STATE_CHANGED:
                if isinstance(message.src, Gst.Pipeline):
                    old_state, new_state, pending_state = message.parse_state_changed()
                    print("Pipeline state changed from %s to %s." %
                           (old_state.value_nick, new_state.value_nick))
            else:
                print("Unexpected message received.")
finally:
    # Free resources.
    pipeline.set_state(Gst.State.NULL)

It worked! Thanks a lot, Atrer.

I want to this work using USB camera. How can I do that?