• Jetson Xavier NX
• DeepStream 6.2
• JetPack SDK 5.0.2
• Issue Type: Bug/Question
I am trying to use the new nvstreammux with sync-inputs=1 and multiple rtsp sources. It doesn’t drop frames when nvinfer is not able to keep up with rtsp sources rate of 25fps.
Can you elaborate the details of which case you have run and how did you check the frame dropping?
source bin
def create_source_bin(uri):
nbin = Gst.parse_bin_from_description(
f"rtspsrc location={uri} latency=0 ! decodebin ! identity name=identity",
False
)
ret = nbin.add_pad(
Gst.GhostPad.new("src", nbin.get_by_name("identity").get_static_pad("src"))
)
assert ret
return nbin
processing bin
def create_propcessing_bin(num_sources):
nbin = Gst.Bin.new("processing-bin")
assert nbin
streammux = Gst.ElementFactory.make("nvstreammux", "Stream-muxer")
assert streammux
streammux.set_property("width", 640)
streammux.set_property("height", 640)
streammux.set_property("batch-size", num_sources)
streammux.set_property("batched-push-timeout", 4000000)
# streammux.set_property("nvbuf-memory-type", 0)
streammux.set_property("sync-inputs", 1)
pgie = Gst.ElementFactory.make("nvinfer", "primary-inference")
assert pgie
pgie.set_property("config-file-path", "cfg/config_infer_primary_yoloV7.txt")
pgie.set_property("batch-size", num_sources)
tracker = Gst.ElementFactory.make("nvtracker", "tracker")
assert tracker
tracker.set_property("tracker-width", 320)
tracker.set_property("tracker-height", 320)
tracker.set_property("gpu_id", 0)
tracker.set_property(
"ll-lib-file",
"/opt/nvidia/deepstream/deepstream/lib/libnvds_nvmultiobjecttracker.so"
)
tracker.set_property(
"ll-config-file",
"/opt/nvidia/deepstream/deepstream/samples/configs/deepstream-app/config_tracker_NvDCF_perf.yml"
)
tracker.set_property("enable_batch_process", 1)
tracker.set_property("enable_past_frame", 1)
nvanalytics = Gst.ElementFactory.make("nvdsanalytics", "analytics")
assert nvanalytics
nvanalytics.set_property("config-file", "cfg/config_nvdsanalytics.txt")
nvdslogger = Gst.ElementFactory.make("nvdslogger", "logger")
assert nvdslogger
nbin.add(streammux)
nbin.add(pgie)
nbin.add(tracker)
nbin.add(nvanalytics)
nbin.add(nvdslogger)
streammux.link(pgie)
pgie.link(tracker)
tracker.link(nvanalytics)
nvanalytics.link(nvdslogger)
def get_request_pad(name):
pad = Gst.GhostPad.new(name, streammux.get_request_pad(name))
nbin.add_pad(pad)
return pad
nbin.get_request_pad = get_request_pad
bin_src_pad = nbin.add_pad(Gst.GhostPad.new("src", nvdslogger.get_static_pad("src")))
assert bin_src_pad
return nbin
display bin
def create_display_bin(num_sources, udpsink_port_num):
nbin = Gst.Bin.new("display-bin")
assert nbin
# Use convertor to convert from NV12 to RGBA as required by nvosd
nvvidconv = Gst.ElementFactory.make("nvvideoconvert", "convertor")
assert nvvidconv
tiler = Gst.ElementFactory.make("nvmultistreamtiler", "nvtiler")
assert tiler
tiler_rows = int(math.sqrt(num_sources))
tiler_columns = int(math.ceil((1.0 * num_sources) / tiler_rows))
tiler.set_property("rows", tiler_rows)
tiler.set_property("columns", tiler_columns)
tiler.set_property("width", 1280)
tiler.set_property("height", 720)
# Create OSD to draw on the converted RGBA buffer
nvosd = Gst.ElementFactory.make("nvdsosd", "onscreendisplay")
assert nvosd
nvvidconv_postosd = Gst.ElementFactory.make("nvvideoconvert", "convertor_postosd")
assert nvvidconv_postosd
caps = Gst.ElementFactory.make("capsfilter", "filter")
assert caps
caps.set_property("caps", Gst.Caps.from_string("video/x-raw(memory:NVMM), format=I420"))
encoder = Gst.ElementFactory.make("nvv4l2h264enc", "encoder")
assert encoder
encoder.set_property("bitrate", 4000000)
if is_aarch64():
encoder.set_property("preset-level", 1)
encoder.set_property("insert-sps-pps", 1)
#encoder.set_property("bufapi-version", 1)
rtppay = Gst.ElementFactory.make("rtph264pay", "rtppay")
assert rtppay
sink = Gst.ElementFactory.make("udpsink", "udpsink")
assert sink
sink.set_property("host", "224.224.255.255")
sink.set_property("port", udpsink_port_num)
sink.set_property("async", False)
sink.set_property("sync", True)
sink.set_property("qos", 0)
nbin.add(nvvidconv)
nbin.add(tiler)
nbin.add(nvosd)
nbin.add(nvvidconv_postosd)
nbin.add(caps)
nbin.add(encoder)
nbin.add(rtppay)
nbin.add(sink)
nvvidconv.link(tiler)
tiler.link(nvosd)
nvosd.link(nvvidconv_postosd)
nvvidconv_postosd.link(caps)
caps.link(encoder)
encoder.link(rtppay)
rtppay.link(sink)
bin_sink_pad = nbin.add_pad(Gst.GhostPad.new("sink", nvvidconv.get_static_pad("sink")))
assert bin_sink_pad
return nbin
main code
Gst.init(None)
pipeline = Gst.Pipeline()
assert pipeline
source_bins = [create_source_bin(uri_name) for uri_name in uri_names]
num_sources = len(uri_names)
processing_bin = create_propcessing_bin(num_sources)
udpsink_port_num = 5400
display_bin = create_display_bin(num_sources, udpsink_port_num)
for source_bin in source_bins:
pipeline.add(source_bin)
pipeline.add(processing_bin)
pipeline.add(display_bin)
for i, source_bin in enumerate(source_bins):
source_bin.get_static_pad("src").link(processing_bin.get_request_pad("sink_%u" % i))
processing_bin.link(display_bin)
loop = GLib.MainLoop()
bus = pipeline.get_bus()
bus.add_signal_watch()
bus.connect("message", bus_call, loop)
pipeline.set_state(Gst.State.PLAYING)
server = GstRtspServer.RTSPServer.new()
rtsp_port_num = 8554
server.props.service = "%d" % rtsp_port_num
server.attach(None)
factory = GstRtspServer.RTSPMediaFactory.new()
factory.set_launch(
"( udpsrc name=pay0 port=%d buffer-size=524288 caps=\"application/x-rtp, media=video, clock-rate=90000, encoding-name=(string)H264, payload=96 \" )"
% (udpsink_port_num,)
)
factory.set_shared(True)
server.get_mount_points().add_factory("/ds-test", factory)
try:
loop.run()
finally:
# loop.quit()
pipeline.set_state(Gst.State.NULL)
I run this command to watch the output stream:
gst-launch-1.0 rtspsrc location=rtsp://localhost:8554/ds-test latency=0 ! decodebin ! nvvidconv ! autovideosink
Result
There is an increasing discrepancy between the time displayed in the output stream frames and the current time.
To prove that the issue is only with nvstreammux element:
Test 1
- I removed display bin
- I added probe callback to processing bin
def processing_src_pad_buffer_probe(pad, info, u_data):
gst_buffer = info.get_buffer()
if not gst_buffer:
print("Unable to get GstBuffer ")
return
batch_meta = pyds.gst_buffer_get_nvds_batch_meta(hash(gst_buffer))
l_frame = batch_meta.frame_meta_list
while l_frame is not None:
try:
frame_meta = pyds.NvDsFrameMeta.cast(l_frame.data)
except StopIteration:
break
l_user = frame_meta.frame_user_meta_list
while l_user:
try:
user_meta = pyds.NvDsUserMeta.cast(l_user.data)
if user_meta.base_meta.meta_type == pyds.nvds_get_user_meta_type("NVIDIA.DSANALYTICSFRAME.USER_META"):
user_meta_data = pyds.NvDsAnalyticsFrameMeta.cast(user_meta.user_meta_data)
if user_meta_data.objInROIcnt:
if user_meta_data.objInROIcnt["RF"] > 0:
print("You appeared now")
else:
pass
except StopIteration:
break
try:
l_user = l_user.next
except StopIteration:
break
try:
l_frame = l_frame.next
except StopIteration:
break
return Gst.PadProbeReturn.OK
processing_bin_src_pad = processing_bin.get_static_pad("src")
assert processing_bin_src_pad
processing_bin_src_pad.add_probe(Gst.PadProbeType.BUFFER, processing_src_pad_buffer_probe, 0)
Result
There is a delay between my appearance in front of one of the cameras and logging of the message “You appeared now”.
The issue is not with display bin.
Test 2
- I added probe callback to nvstreammux element:
def streammux_src_pad_buffer_probe(pad, info, u_data):
time.sleep(0.5)
return True
- I removed pgie, tracker, nvanalytics elements from processing bin.
- I returned back display bin, I watched the output stream and I found the same delay issue.
Result
The issue is not with pgie, tracker, nvanalytics elements
Test 3
def dummy_sleep(pad, info, u_data):
import time
time.sleep(1)
return Gst.PadProbeReturn.OK
pipeline = Gst.parse_launch(
f"rtspsrc location={location} latency=0 ! decodebin ! identity name=identity ! nvvidconv ! autovideosink"
)
assert pipeline
pipeline.get_by_name("identity").get_static_pad("src").add_probe(Gst.PadProbeType.BUFFER, dummy_sleep, 0)
Although I added a sleep time in the middle of the pipeline, there is no delay between the output stream and the current time.
Result
The delay issue is not with the source bin and it is only with the nvstreammux element which probably caches frames.
Since you are using RTSP sources, please add “streammux.set_property(“live-source”, 1)” in the source bin. And you can set a proper “batched-push-timeout” value according to your actual RTSP streaming FPS.
I am working with DeepStream 6.2 and streammux live-source is deprecated.
I added “streammux.set_property(“live-source”, 1)” and I set “batched-push-timeout” to 50000 and 50 and I got same result.
Note also that I am working with Dahua Network video recorder (NVR) H265 Codec.
Do you mean the output delay will increase with time?
Yes
I found that I used the default nvstreammux. With the new nvstreammux, the delay disappears.
This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.