RTSP not working with DeepStream python bindings

Please provide complete information as applicable to your setup.

• Hardware Platform (Jetson / GPU) Jetson
• DeepStream Version 6.2
• JetPack Version (valid for Jetson only) JetPack 5.1.1
• TensorRT Version 8.5.2
• NVIDIA GPU Driver Version (valid for GPU only) -
• Issue Type( questions, new requirements, bugs) bug
• How to reproduce the issue ? (This is for bugs. Including which sample app is using, the configuration files content, the command line used and other details for reproducing)
• Requirement details( This is for new requirement. Including the module name-for which plugin or for which sample application, the function description)

Hello,

We are trying to make a DeepStream pipeline having an RTSP camera as an input. We are using Python bindings.
The gstreamer command is working normally:

gst-launch-1.0 rtspsrc location=rtsp://192.168.1.254:554/stream0 user-id=admin user-pw=xxxxx ! rtph264depay ! queue ! h264parse ! nvv4l2decoder ! nvvideoconvert ! nv3dsink 

This is the script snippet for defining the pipeline and its elements:


def create_source_bin(index):
    print(f"Creating source bin {index}  \n")

    # Create a source GstBin to abstract this bin's content from the rest of the
    # pipeline
    bin_name = "source-bin-%01d" % index
    nbin = Gst.Bin.new(bin_name)
    if not nbin:
        sys.stderr.write(" Unable to create source bin \n")

    # Source element for reading from the camera 
    source = Gst.ElementFactory.make("rtspsrc", bin_name)
    if not source:
        sys.stderr.write(" Unable to create rtsp source \n")
    source.set_property("location", "rtsp://192.168.1.254:554/stream0")
    source.set_property("user-id", "admin")
    source.set_property("user-pw", "xxxx")
    
    depay = Gst.ElementFactory.make("rtph264depay", "rtph264depay")
    if not depay:
        sys.stderr.write(" Unable to create rtph264depay \n")
        
    queue=Gst.ElementFactory.make("queue","queue1")
    if not queue:
        sys.stderr.write(" Unable to create queue \n")
        
    parser = Gst.ElementFactory.make("h264parse", "h264parse")
    if not parser:
        sys.stderr.write(" Unable to create h264deparse \n")
    
    decoder = Gst.ElementFactory.make("nvv4l2decoder", "decoder")
    if not decoder:
        sys.stderr.write(" Unable to create nvv4l2decoder \n")
            
    # Video/x-raw,format=(string)RGB
    src_caps = Gst.Caps.from_string("video/x-raw,format=(string)RGB")
    source_filter = Gst.ElementFactory.make('capsfilter', bin_name + "-filter")
    source_filter.set_property('caps', src_caps)

    # nvvideoconvert_src_1 on GPU
    # nvvideoconvert can also replace videoflip using GPU acceleration
    nvvideoconvert_src = Gst.ElementFactory.make("nvvideoconvert", bin_name + "-nvvidconv")
    nvvideoconvert_src.set_property("compute-hw", 1) # GPU
    if not nvvideoconvert_src:
        sys.stderr.write(" Unable to create nvvideoconvert \n")

    # 'video/x-raw(memory:NVMM),format=(string)RGBA'
    #nvmm_caps = Gst.Caps.from_string("video/x-raw(memory:NVMM),format=(string)RGBA") 
    #nvvideoconvert_filter = Gst.ElementFactory.make('capsfilter', bin_name + "-nvvidconv-filter")
    #nvvideoconvert_filter.set_property('caps', nvmm_caps)

    # Add Elements to the bin 
    Gst.Bin.add(nbin, source)
    Gst.Bin.add(nbin, depay)
    Gst.Bin.add(nbin, queue)
    #Gst.Bin.add(nbin, source_filter)
    Gst.Bin.add(nbin, parser)
    Gst.Bin.add(nbin, decoder)
    #Gst.Bin.add(nbin, source_filter)
    Gst.Bin.add(nbin, nvvideoconvert_src)
    #Gst.Bin.add(nbin, nvvideoconvert_filter)

    # Link elements inside the bin
    if not source.link(depay):
        print("Failed to link source")
    if not depay.link(queue):
        print("Failed to link depay")
    if not queue.link(parser):
        print("Failed to link queue")    
    if not parser.link(decoder):
        print("Failed to link parser")    
    if not decoder.link(nvvideoconvert_src):
        print("Failed to link decoder")
    

    # We need to create a ghost pad for the source bin which will act as a proxy
    # for the streammux src pad. 
    
    # Get the 'src' pad of the last element in the bin (nvvideoconvert_filter)
    nvvideoconvert_src_pad = nvvideoconvert_src.get_static_pad("src")
    if not nvvideoconvert_src_pad:
        sys.stderr.write("Failed to get src pad of nvvideoconvert_filter \n")

    bin_pad = nbin.add_pad(Gst.GhostPad.new("src" , nvvideoconvert_src_pad ))
    if not bin_pad:
        sys.stderr.write(" Failed to add ghost pad in source bin \n")
        return None
    
    return nbin
        

def main(args):

    # Standard GStreamer initialization
    Gst.init(None)

    # Create gstreamer elements
    # Create Pipeline element that will form a connection of other elements
    print("Creating Pipeline \n ")
    pipeline = Gst.Pipeline()
    #source.set_property("settingsfile", "../configs/video_demonstrator_final.xml")

    if not pipeline:
        sys.stderr.write(" Unable to create Pipeline \n")


    print("Creating nvinfer \n")
    pgie = Gst.ElementFactory.make("nvinfer", "primary-inference")
    if not pgie:
        sys.stderr.write(" Unable to create pgie \n")
    
    #pgie.set_property('config-file-path', MODEL_CONFIG_FILE)

    # nvvideoconvert_post_nvinfer
    print("Creating nvvideoconvert post nvinfer \n")
    nvvideoconvert_post_nvinfer = Gst.ElementFactory.make("nvvideoconvert", "nvvideoconvert-post-nvinfer")
    nvvideoconvert_post_nvinfer.set_property("compute-hw", 1) # GPU
    if not nvvideoconvert_post_nvinfer:
        sys.stderr.write(" Unable to create nvvideoconvert \n")

    # 'video/x-raw(memory:NVMM),format=(string)RGBA'
    post_mux_caps = Gst.Caps.from_string("video/x-raw(memory:NVMM),format=(string)RGBA") 
    nvvideoconvert_post_nvinfer_filter = Gst.ElementFactory.make('capsfilter', 'nvvideoconvert-post-nvinfer-filter')
    nvvideoconvert_post_nvinfer_filter.set_property('caps', post_mux_caps)

    print("Creating nvdosd \n")
    osd = Gst.ElementFactory.make("nvdsosd", "osd")
    if not osd:
        sys.stderr.write(" Unable to create osd sink \n")
    osd.set_property("gpu-id", 0)
    osd.set_property("process-mode", 1) #GPU
    
    print("Creating nvtracker \n")
    tracker = Gst.ElementFactory.make("nvtracker", "tracker")
    if not tracker:
        sys.stderr.write(" Unable to create tracker sink \n")
    tracker.set_property("compute-hw", 1)
    tracker.set_property("gpu-id", 0)
    tracker.set_property("ll-lib-file", "Add library from /opt")
    tracker.set_property("tracking-id-reset-mode", 0)
    
    # Finally render the output
    print("Creating nv3dsink \n")
    sink = Gst.ElementFactory.make("nv3dsink", "nv3dsink")
 
    if not sink:
        sys.stderr.write(" Unable to create egl sink \n")

    
    print("Adding elements to Pipeline \n")
 
    source_bin =  create_source_bin(index=0)
    if not source_bin:
            sys.stderr.write("Unable to create source bin \n")
        
    pipeline.add(source_bin)
    #pipeline.add(pgie)
    pipeline.add(osd)
    pipeline.add(nvvideoconvert_post_nvinfer)
    pipeline.add(nvvideoconvert_post_nvinfer_filter)
    pipeline.add(sink)

    
    print("Linking elements")
    
    source_bin.link(nvvideoconvert_post_nvinfer) # pgie
    #pgie.link(osd)
    #osd.link(nvvideoconvert_post_nvinfer)
    nvvideoconvert_post_nvinfer.link(nvvideoconvert_post_nvinfer_filter)
    nvvideoconvert_post_nvinfer_filter.link(sink)

    # create an event loop and feed gstreamer bus mesages to it
    loop = GLib.MainLoop()
    bus = pipeline.get_bus()
    bus.add_signal_watch()
    #bus.connect ("message", bus_call, loop)

    # Lets add probe to get informed of the meta data generated, we add probe to
    # the sink pad of the tracker element, since by that time, the buffer would have
    # had got all the metadata.
    trackersinkpad = tracker.get_static_pad("sink")
    if not trackersinkpad:
        sys.stderr.write(" Unable to get sink pad of nvosd \n")

    trackersinkpad.add_probe(Gst.PadProbeType.BUFFER, tracker_sink_pad_buffer_probe, 0)


    # Debugging: Exporting Gstreamer pipeline to PNG
    Gst.debug_bin_to_dot_file(pipeline, Gst.DebugGraphDetails.ALL, "pipeline")
    

    # Start play back and listen to events
    print("Starting pipeline \n")
    pipeline.set_state(Gst.State.PLAYING)


    try:  
        loop.run()
    except:
        pass
    # cleanup
    pipeline.set_state(Gst.State.NULL)

if __name__ == '__main__':
    sys.exit(main(sys.argv))

There is a problem with linking rtspcsrc element with rtph264depay.
Attached is the output log using `GST_DEBUG=4.
log.txt (104.7 KB)

Looking forward to hearing from you!

Hi @ahmed.louati

You need to request the src pad of rtspsrc and link it to the sink pad of depay

Regards,
Allan Navarro

Embedded SW Engineer at RidgeRun

Contact us: support@ridgerun.com
Developers wiki: https://developer.ridgerun.com/
Website: www.ridgerun.com

Hello Allan,

Thank you for your reply, I have applied the logic you suggested to me but still fails:

    srcpad = source.get_request_pad('stream_0')
    if not srcpad:
        sys.stderr.write("Unable to create source pad \n")
    sinkpad = depay.get_static_pad("sink")
    if not sinkpad:
        sys.stderr.write("Unable to get sink pad \n")
    srcpad.link(sinkpad)

The error I am getting is srcpad is NoneType Object.

I generated the pipeline graph and rtspsrc is not even added to the src bin!

Problem solved with introducing dynamic linking:

   def on_pad_added(rtspsrc, pad):
       print("New pad added: ", pad.get_name())
       depay_pad = depay.get_static_pad("sink")
       pad.link(depay_pad)
    
   source.connect("pad-added", on_pad_added)

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.