Deepstream python app fakesink does not work

Please provide complete information as applicable to your setup.

• Hardware Platform (Jetson / GPU)
Jetson AGX Orin
• DeepStream Version
7
• JetPack Version (valid for Jetson only)
6
• TensorRT Version
L4T 36.3.0
• NVIDIA GPU Driver Version (valid for GPU only)

• Issue Type( questions, new requirements, bugs)>

**PERF: {‘stream0’: 0.0}

0:00:07.847983587 6322 0xffff5427ff60 WARN nvinfer gstnvinfer.cpp:2420:gst_nvinfer_output_loop: error: Internal data stream error.
0:00:07.848031844 6322 0xffff5427ff60 WARN nvinfer gstnvinfer.cpp:2420:gst_nvinfer_output_loop: error: streaming stopped, reason error (-5)
Error: gst-stream-error-quark: Internal data stream error. (1): /dvs/git/dirty/git-master_linux/deepstream/sdk/src/gst-plugins/gst-nvinfer/gstnvinfer.cpp(2420): gst_nvinfer_output_loop (): /GstPipeline:pipeline0/GstNvInfer:primary-inference:
streaming stopped, reason error (-5)
nvstreammux: Successfully handled EOS for source_id=0

• How to reproduce the issue ? (This is for bugs. Including which sample app is using, the configuration files content, the command line used and other details for reproducing)
Run a deestream app, like deepstream-app1 with multiple camera sources for example 1280x720x1 gray8 60fps code that works with screen. Change to fakesink and it stops working the error above occurs.

• Requirement details( This is for new requirement. Including the module name-for which plugin or for which sample application, the function description)

global perf_data, pgie_classes_str, LOT_ID
        number_sources=self.number_sources
        pgie_classes_str = readClassesFile(self.CLASS_LABEL_FILE)
        perf_data = PERF_DATA(number_sources)
        logging.debug("Starting deepstream")
        # Standard GStreamer initialization
        Gst.init(None)

        logging.debug("Creating Pipeline \n ")
        pipeline = Gst.Pipeline()
        logging.debug("Creating sources pipeline")
        sources=[]
        caps_v4l2srcs=[]
        vidconvsrcs=[] 
        nvvidconvsrcs=[]
        caps_vidconvsrcs=[]
        for s in range(number_sources):
            source = Gst.ElementFactory.make("v4l2src", f"usb-cam-source-"+str(s*2))
            source.set_property('device', '/dev/video'+str(s*2))
            caps_v4l2src = Gst.ElementFactory.make("capsfilter", "v4l2src_caps"+str(s*2))
            caps_v4l2src.set_property('caps', Gst.Caps.from_string(f"video/x-raw, framerate={self.INPUT_FPS}/1"))
            vidconvsrc = Gst.ElementFactory.make("videoconvert", "convertor_src1_"+str(s*2))
            nvvidconvsrc = Gst.ElementFactory.make("nvvideoconvert", "convertor_src2_"+str(s*2))
            caps_vidconvsrc = Gst.ElementFactory.make("capsfilter", "nvmm_caps"+str(s*2))
            caps_vidconvsrc.set_property('caps', Gst.Caps.from_string("video/x-raw(memory:NVMM), format=RGBA"))
            
            pipeline.add(source)
            pipeline.add(caps_v4l2src)
            pipeline.add(vidconvsrc)
            pipeline.add(nvvidconvsrc)
            pipeline.add(caps_vidconvsrc)

            sources.append(source)
            caps_v4l2srcs.append(caps_v4l2src)
            vidconvsrcs.append(vidconvsrc)
            nvvidconvsrcs.append(nvvidconvsrc)
            caps_vidconvsrcs.append(caps_vidconvsrc)
                
        streammux = Gst.ElementFactory.make("nvstreammux", "Stream-muxer")    
        queue1=Gst.ElementFactory.make("queue","queue1")
        queue2=Gst.ElementFactory.make("queue","queue2")
        queue3=Gst.ElementFactory.make("queue","queue3")
        queue4=Gst.ElementFactory.make("queue","queue4")
        queue5=Gst.ElementFactory.make("queue","queue5")
            
        pgie = Gst.ElementFactory.make("nvinfer", "primary-inference")
        tiler=Gst.ElementFactory.make("nvmultistreamtiler", "nvtiler")
        nvvidconv = Gst.ElementFactory.make("nvvideoconvert", "convertor")    
        nvosd = Gst.ElementFactory.make("nvdsosd", "onscreendisplay")
        transform = Gst.ElementFactory.make("nvegltransform", "nvegl-transform")
        sink = None
        if self.NO_DISPLAY:
            logging.debug("Creating Fakesink \n")
            sink = Gst.ElementFactory.make("fakesink", "nvvideo-renderer")
            sink.set_property('enable-last-sample', 0)
            sink.set_property('sync', 0)
        else:        
            sink = Gst.ElementFactory.make("nveglglessink", "nvvideo-renderer")
            sink.set_property('sync', False)

        logging.debug("Setting common properties")
        streammux.set_property('width', self.STREAMMUX_WIDTH)
        streammux.set_property('height', self.STREAMMUX_HEIGHT)
        streammux.set_property('batch-size', number_sources)
        streammux.set_property('batched-push-timeout', 4000000)
        streammux.set_property('live-source',1)
        pgie.set_property('config-file-path', "config_infer_primary_yoloV8.txt")
        pgie.set_property("batch-size",number_sources)
        nvosd.set_property('process-mode',self.OSD_PROCESS_MODE)
        nvosd.set_property('display-text',self.OSD_DISPLAY_TEXT)
        tiler_rows=int(math.sqrt(number_sources))
        tiler_columns=int(math.ceil((1.0*number_sources)/tiler_rows))
        tiler.set_property("rows",tiler_rows)
        tiler.set_property("columns",tiler_columns)
        tiler.set_property("width", self.TILED_OUTPUT_WIDTH)
        tiler.set_property("height", self.TILED_OUTPUT_HEIGHT)
        sink.set_property("qos",0)

        logging.debug(f"Playing {number_sources} cameras")

        pipeline.add(streammux)

        pipeline.add(queue1)
        pipeline.add(queue2)
        pipeline.add(queue3)
        pipeline.add(queue4)
        pipeline.add(queue5)

        pipeline.add(pgie)
        pipeline.add(tiler)
        pipeline.add(nvvidconv)
        pipeline.add(nvosd)
        pipeline.add(sink)    
        pipeline.add(transform)    

        srcpads = []
        sinkpads = []
        for s in range(number_sources):
            srcpad = caps_vidconvsrcs[s].get_static_pad("src")    
            sinkpad = streammux.request_pad_simple("sink_"+str(s))
            logging.debug("sink_number:"+str(s))
            srcpads.append(srcpad)
            sinkpads.append(sinkpad)
        
        for i in range(number_sources):    
            sources[i].link(caps_v4l2srcs[i])
            caps_v4l2srcs[i].link(vidconvsrcs[i])
            vidconvsrcs[i].link(nvvidconvsrcs[i])
            nvvidconvsrcs[i].link(caps_vidconvsrcs[i])
            srcpads[i].link(sinkpads[i])
        
        streammux.link(queue1)
        queue1.link(pgie)
        pgie.link(queue2) #pgie.link(tiler)
        queue2.link(tiler)
        tiler.link(queue3) #tiler.link(nvvidconv)    
        queue3.link(nvvidconv)
        nvvidconv.link(queue4) #nvvidconv.link(nvosd)
        queue4.link(nvosd)       
        nvosd.link(queue5) #nvosd.link(sink)
        queue5.link(transform)
        transform.link(sink)    

        # create an event loop and feed gstreamer bus mesages to it
        self.loop = GLib.MainLoop()
        bus = pipeline.get_bus()
        bus.add_signal_watch()
        bus.connect ("message", bus_call, self.loop)

        # Lets add probe to get informed of the meta data generated, we add probe to
        # the sink pad of the osd element, since by that time, the buffer would have
        # had got all the metadata.
        pgie_src_pad = pgie.get_static_pad("src")
        if not pgie_src_pad:
            sys.stderr.write(" Unable to get sink pad of nvosd \n")

        pgie_src_pad.add_probe(Gst.PadProbeType.BUFFER, pgie_src_pad_buffer_probe, 0)
        GLib.timeout_add(5000, perf_data.perf_print_callback)

        # start play back and listen to events
        logging.debug("Starting pipeline \n")
        pipeline.set_state(Gst.State.PLAYING)
        try:
            self.loop.run()
        except:
            pass
        # cleanup
        pipeline.set_state(Gst.State.NULL)

I dont know if I did this right but I change the pipeline:

Summary of the changes:

Creating the sink

        if self.NO_DISPLAY:
            logging.debug("Creating Fakesink \n")
            sink = Gst.ElementFactory.make("fakesink", "nvvideo-renderer")
            sink.set_property('enable-last-sample', 0)
            sink.set_property('sync', 0)

Adding pipeline elements:

        pipeline.add(sink)    
        if not self.NO_DISPLAY:
            pipeline.add(transform)

linking the elements:

        if self.NO_DISPLAY:
            queue5.link(sink)
        else:
            queue5.link(transform)
            transform.link(sink)

Why do I have to remove transform?
Secondly. Will this have any impact on performance?

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.