Need help on adding RTSP support in nvdsanalytics example

Please provide complete information as applicable to your setup.

• Hardware Platform (Jetson / GPU) dGPU
• DeepStream Version 5.0
• JetPack Version (valid for Jetson only)
• TensorRT Version 7
• NVIDIA GPU Driver Version (valid for GPU only) 440.60

I am trying to create a RTSP output in NVdsanalytics example. I used the deepstream_sink_bin.c to implement the same. I need help on the pipeline.
Currently I am trying to use

gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics, cap_filter, transform1, encoder, codecparse, rtppay, sink, NULL);
gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics, cap_filter, transform1, encoder, codecparse, rtppay, sink, NULL)

Currently it fails with the following error:
0:00:07.526672937 10323 0x7f44c8004f70 INFO GST_EVENT gstevent.c:1546:gst_event_new_sink_message: creating sink-message event
0:00:07.527038091 10323 0x7f44c8004f70 WARN queue gstqueue.c:988:gst_queue_handle_sink_event:<sink_sub_bin_queue01> error: Internal data stream error.
0:00:07.527050453 10323 0x7f44c8004f70 WARN queue gstqueue.c:988:gst_queue_handle_sink_event:<sink_sub_bin_queue01> error: streaming stopped, reason not-linked (-1)
0:00:07.527065739 10323 0x7f44c8004f70 INFO GST_ERROR_SYSTEM gstelement.c:2145:gst_element_message_full_with_details:<sink_sub_bin_queue01> posting message: Internal data stream error.
0:00:07.527099125 10323 0x7f44c8004f70 INFO GST_ERROR_SYSTEM gstelement.c:2172:gst_element_message_full_with_details:<sink_sub_bin_queue01> posted error message: Internal data stream error.
ERROR from element sink_sub_bin_queue01: Internal data stream error.
Error details: gstqueue.c(988): gst_queue_handle_sink_event (): /GstPipeline:nvdsanalytics-test-pipeline/GstQueue:sink_sub_bin_queue01:
streaming stopped, reason not-linked (-1)
Returned, stopping playback

Code*****

int
main (int argc, char *argv)
{
GMainLoop *loop = NULL;
gboolean ret = FALSE;
GstCaps *caps = NULL;
GstElement *pipeline = NULL, *streammux = NULL, *sink = NULL, *pgie = NULL, *codecparse = NULL, *rtppay = NULL, *encoder = NULL,
*nvtracker = NULL, *nvdsanalytics = NULL, *queue = NULL, *transform1 = NULL, * cap_filter = NULL,
*nvvidconv = NULL, *nvosd = NULL, *tiler = NULL;
#ifdef PLATFORM_TEGRA
GstElement *transform = NULL;
#endif
GstBus *bus = NULL;
guint bus_watch_id;
GstPad *nvdsanalytics_src_pad = NULL;
guint i, num_sources;
guint tiler_rows, tiler_columns;
guint pgie_batch_size;
char inputPGIEfile[40], inputROIfile[40], inputTrackerfile[40];

/* Check input arguments */
if (argc < 6) {
g_printerr (“Usage: %s \n”, argv[0]);
return -1;
}
strcpy(inputPGIEfile, argv[3]);
strcpy(inputROIfile, argv[4]);
strcpy(inputTrackerfile, argv[5]);

num_sources = 1;

/* Standard GStreamer initialization */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);

/* Create gstreamer elements /
/
Create Pipeline element that will form a connection of other elements */
pipeline = gst_pipeline_new (“nvdsanalytics-test-pipeline”);

/* Create nvstreammux instance to form batches from one or more sources. */
streammux = gst_element_factory_make (“nvstreammux”, “stream-muxer”);

if (!pipeline || !streammux) {
g_printerr (“One element could not be created. Exiting.\n”);
return -1;
}
gst_bin_add (GST_BIN (pipeline), streammux);

for (i = 0; i < num_sources; i++) {
GstPad *sinkpad, *srcpad;
gchar pad_name[16] = { };
GstElement *source_bin = create_source_bin (i, argv[i + 1]);

if (!source_bin) {
  g_printerr ("Failed to create source bin. Exiting.\n");
  return -1;
}

gst_bin_add (GST_BIN (pipeline), source_bin);

g_snprintf (pad_name, 15, "sink_%u", i);
sinkpad = gst_element_get_request_pad (streammux, pad_name);
if (!sinkpad) {
  g_printerr ("Streammux request sink pad failed. Exiting.\n");
  return -1;
}

srcpad = gst_element_get_static_pad (source_bin, "src");
if (!srcpad) {
  g_printerr ("Failed to get src pad of source bin. Exiting.\n");
  return -1;
}

if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
  g_printerr ("Failed to link source bin to stream muxer. Exiting.\n");
  return -1;
}

gst_object_unref (srcpad);
gst_object_unref (sinkpad);

}

/* Use nvinfer to infer on batched frame. */
pgie = gst_element_factory_make (“nvinfer”, “primary-nvinference-engine”);

/* Use nvtracker to track detections on batched frame. */
nvtracker = gst_element_factory_make (“nvtracker”, “nvtracker”);

/* Use nvdsanalytics to perform analytics on object */
nvdsanalytics = gst_element_factory_make (“nvdsanalytics”, “nvdsanalytics”);

/* Use nvtiler to composite the batched frames into a 2D tiled array based

  • on the source of the frames. */
    tiler = gst_element_factory_make (“nvmultistreamtiler”, “nvtiler”);

/* Use convertor to convert from NV12 to RGBA as required by nvosd */
nvvidconv = gst_element_factory_make (“nvvideoconvert”, “nvvideo-converter”);

/* Create OSD to draw on the converted RGBA buffer */
nvosd = gst_element_factory_make (“nvdsosd”, “nv-onscreendisplay”);

/* Finally render the osd output */
#ifdef PLATFORM_TEGRA
transform = gst_element_factory_make (“nvegltransform”, “nvegl-transform”);
#endif
caps = gst_caps_from_string (“video/x-raw(memory:NVMM), format=I420”);
queue = gst_element_factory_make (“queue”, “sink_sub_bin_queue01”);
transform1 = gst_element_factory_make (“nvvideoconvert”, “sink_sub_bin_transform01”);
codecparse = gst_element_factory_make (“h264parse”, “h264-parser”);
rtppay = gst_element_factory_make (“rtph264pay”, “sink_sub_bin_rtppay01”);
encoder = gst_element_factory_make (“x264enc”, “sink_sub_bin_encoder01”);
sink = gst_element_factory_make (“udpsink”, “sink_sub_bin_udpsink01”);
g_object_set (G_OBJECT (sink), “host”, “224.224.255.255”, “port”, 8554, “async”, FALSE, “sync”, 0, NULL);
//sink = gst_element_factory_make (“nveglglessink”, “nvvideo-renderer”);

if (!pgie || !nvtracker || !nvdsanalytics || !tiler || !nvvidconv ||
!nvosd || !sink || !caps || !queue || !transform1 || !codecparse || !rtppay || !encoder ) {
g_printerr (“One element could not be created. Exiting.\n”);
return -1;
}

#ifdef PLATFORM_TEGRA
if(!transform) {
g_printerr (“One tegra element could not be created. Exiting.\n”);
return -1;
}
#endif

g_object_set (G_OBJECT (streammux), “width”, MUXER_OUTPUT_WIDTH, “height”,
MUXER_OUTPUT_HEIGHT, “batch-size”, num_sources,
“batched-push-timeout”, MUXER_BATCH_TIMEOUT_USEC, NULL);

/* Configure the nvinfer element using the nvinfer config file. */
g_object_set (G_OBJECT (pgie), “config-file-path”, inputPGIEfile, NULL);

/* Configure the nvtracker element for using the particular tracker algorithm. */
g_object_set (G_OBJECT (nvtracker),
“ll-lib-file”, “/opt/nvidia/deepstream/deepstream-5.0/lib/libnvds_nvdcf.so”,
“ll-config-file”, inputTrackerfile, “tracker-width”, 640, “tracker-height”, 480,
NULL);

/* Configure the nvdsanalytics element for using the particular analytics config file*/
/* g_object_set (G_OBJECT (nvdsanalytics),
“config-file”, “config_nvdsanalytics.txt”,
NULL); */
g_object_set (G_OBJECT (nvdsanalytics),
“config-file”, inputROIfile,
NULL);

/* Override the batch-size set in the config file with the number of sources. */
g_object_get (G_OBJECT (pgie), “batch-size”, &pgie_batch_size, NULL);
if (pgie_batch_size != num_sources) {
g_printerr
(“WARNING: Overriding infer-config batch-size (%d) with number of sources (%d)\n”,
pgie_batch_size, num_sources);
g_object_set (G_OBJECT (pgie), “batch-size”, num_sources, NULL);
}

tiler_rows = (guint) sqrt (num_sources);
tiler_columns = (guint) ceil (1.0 * num_sources / tiler_rows);
/* we set the tiler properties here */
g_object_set (G_OBJECT (tiler), “rows”, tiler_rows, “columns”, tiler_columns,
“width”, TILED_OUTPUT_WIDTH, “height”, TILED_OUTPUT_HEIGHT, NULL);

/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);

/* Set up the pipeline /
/
we add all elements into the pipeline */
#ifdef PLATFORM_TEGRA
gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics , tiler,
nvvidconv, nvosd, transform, queue, cap_filter, transform1, encoder, codecparse, rtppay, sink,
NULL);

/* we link the elements together

  • nvstreammux -> nvinfer -> nvtracker -> nvdsanalytics -> nvtiler ->
  • nvvideoconvert -> nvosd -> transform -> sink
    /
    if (!gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics, tiler,
    nvvidconv, nvosd, transform, queue, cap_filter, transform1, encoder, codecparse, rtppay, sink, NULL)) {
    g_printerr (“Elements could not be linked. Exiting.\n”);
    return -1;
    }
    #else
    gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics, cap_filter, transform1, encoder, codecparse, rtppay, sink, NULL);
    /
    we link the elements together
  • nvstreammux -> nvinfer -> nvtracker -> nvdsanalytics -> nvtiler ->
  • nvvideoconvert -> nvosd -> sink
    */
    if (!gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics, cap_filter, transform1, encoder, codecparse, rtppay, sink, NULL)) {
    g_printerr (“Elements could not be linked. Exiting.\n”);
    return -1;
    }
    #endif

/* NVGSTDS_LINK_ELEMENT (queue, transform1);
NVGSTDS_LINK_ELEMENT (transform1, cap_filter);
NVGSTDS_LINK_ELEMENT (cap_filter, encoder);
NVGSTDS_LINK_ELEMENT (encoder, codecparse);
NVGSTDS_LINK_ELEMENT (codecparse, rtppay);
NVGSTDS_LINK_ELEMENT (rtppay, sink);

NVGSTDS_BIN_ADD_GHOST_PAD (queue, “sink”); */

ret = TRUE;

ret = start_rtsp_streaming (8554, 8554, NV_DS_ENCODER_H264, 100000);
if (ret != TRUE) {
g_print ("%s: start_rtsp_straming function failed\n", func);
}

/* Lets add probe to get informed of the meta data generated, we add probe to

  • the sink pad of the nvdsanalytics element, since by that time, the buffer
  • would have had got all the metadata.
    */
    nvdsanalytics_src_pad = gst_element_get_static_pad (nvdsanalytics, “src”);
    if (!nvdsanalytics_src_pad)
    g_print (“Unable to get src pad\n”);
    else
    gst_pad_add_probe (nvdsanalytics_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
    nvdsanalytics_src_pad_buffer_probe, NULL, NULL);

/* Set the pipeline to “playing” state */
g_print (“Now playing:”);
for (i = 0; i < num_sources; i++) {
g_print (" %s,", argv[i + 1]);
}
g_print ("\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);

/* Wait till pipeline encounters an error or EOS */
g_print (“Running…\n”);
g_main_loop_run (loop);

/* Out of the main loop, clean up nicely */
done:
if (caps) {
gst_caps_unref (caps);
}
if (!ret) {
g_print (“Failed”);
}
g_print (“Returned, stopping playback\n”);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print (“Deleting pipeline\n”);
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}

static gboolean
start_rtsp_streaming (guint rtsp_port_num, guint updsink_port_num,
NvDsEncoderType enctype, guint64 udp_buffer_size)
{
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
char udpsrc_pipeline[512];

char port_num_Str[64] = { 0 };
char *encoder_name;

if (enctype == NV_DS_ENCODER_H264) {
encoder_name = “H264”;
} else if (enctype == NV_DS_ENCODER_H265) {
encoder_name = “H265”;
} else {
g_printerr (“Failed. Exiting.\n”);
return FALSE;
}

if (udp_buffer_size == 0)
udp_buffer_size = 512 * 1024;

sprintf (udpsrc_pipeline,
"( udpsrc name=pay0 port=%d buffer-size=%lu caps="application/x-rtp, media=video, "
“clock-rate=90000, encoding-name=%s, payload=96 " )”,
updsink_port_num, udp_buffer_size, encoder_name);

sprintf (port_num_Str, “%d”, rtsp_port_num);

g_mutex_lock (&server_cnt_lock);

server [server_count] = gst_rtsp_server_new ();
g_object_set (server [server_count], “service”, port_num_Str, NULL);

mounts = gst_rtsp_server_get_mount_points (server [server_count]);

factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, udpsrc_pipeline);

gst_rtsp_mount_points_add_factory (mounts, “/ds-test”, factory);

g_object_unref (mounts);

gst_rtsp_server_attach (server [server_count], NULL);

server_count++;

g_mutex_unlock (&server_cnt_lock);

g_print
("\n *** DeepStream: Launched RTSP Streaming at rtsp://localhost:%d/ds-test ***\n\n",
rtsp_port_num);

return TRUE;
}

Hi,
The implementation is create_udpsink_bin() in

deepstream-5.0\sources\apps\apps-common\src\deepstream_sink_bin.c

And you can refer to


It is python sample of deepstream_test1 + RTSP streaming. It should be same modification in C code.

I tried using the hints you provided but it still does not work. The linking error has gone but the RTSP Stream does not play. The moment I try to connect using VLC I get the error as seen in the last few lines of the error log. I have also attached the modified code.
------------------------Error-----------------------------------------
(deepstream-nvdsanalytics-test:24367): GLib-GObject-WARNING **: 11:52:17.023: g_object_set_is_valid_property: object class ‘nvv4l2h264enc’ has no property named ‘preset-level’

(deepstream-nvdsanalytics-test:24367): GLib-GObject-WARNING **: 11:52:17.023: g_object_set_is_valid_property: object class ‘nvv4l2h264enc’ has no property named ‘insert-sps-pps’

(deepstream-nvdsanalytics-test:24367): GLib-GObject-WARNING **: 11:52:17.023: g_object_set_is_valid_property: object class ‘nvv4l2h264enc’ has no property named ‘bufapi-version’
RTSP Server Starting

*** DeepStream: Launched RTSP Streaming at rtsp://localhost:8554/ds-test ***

RTSP Server Started
Now playing: file:///home/deepak/work/dsdk5/final_model_data/PV/dd2.mp4,
gstnvtracker: Loading low-level lib at /opt/nvidia/deepstream/deepstream-5.0/lib/libnvds_nvdcf.so
gstnvtracker: Optional NvMOT_RemoveStreams not implemented
gstnvtracker: Batch processing is ON
!! [WARNING][NvDCF] Unknown param found: minMatchingScore4Motion
!! [WARNING][NvDCF] Unknown param found: matchingScoreWeight4Motion
[NvDCF] Initialized
WARNING: …/nvdsinfer/nvdsinfer_func_utils.cpp:34 [TRT]: Current optimization profile is: 0. Please ensure there are no enqueued operations pending in this context prior to switching profiles
0:00:01.745902470 24367 0x555f0e893890 INFO nvinfer gstnvinfer.cpp:602:gst_nvinfer_logger: NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::deserializeEngineAndBackend() <nvdsinfer_context_impl.cpp:1577> [UID = 1]: deserialized trt engine from :/opt/nvidia/deepstream/deepstream-5.0/sources/apps/sample_apps/PV/INT8_m1.plan
INFO: …/nvdsinfer/nvdsinfer_model_builder.cpp:685 [Implicit Engine Info]: layers num: 3
0 INPUT kFLOAT input_1 3x1080x1920
1 OUTPUT kFLOAT output_bbox/BiasAdd 40x68x120
2 OUTPUT kFLOAT output_cov/Sigmoid 10x68x120

0:00:01.745953010 24367 0x555f0e893890 INFO nvinfer gstnvinfer.cpp:602:gst_nvinfer_logger: NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::generateBackendContext() <nvdsinfer_context_impl.cpp:1681> [UID = 1]: Use deserialized engine model: /opt/nvidia/deepstream/deepstream-5.0/sources/apps/sample_apps/PV/INT8_m1.plan
0:00:01.747899846 24367 0x555f0e893890 INFO nvinfer gstnvinfer_impl.cpp:311:notifyLoadModelStatus: [UID 1]: Load new model:nvdsanalytics_pgie_config.txt sucessfully
0:00:01.748555247 24367 0x555f0e893890 WARN basesrc gstbasesrc.c:3583:gst_base_src_start_complete: pad not activated yet
Decodebin child added: source
Decodebin child added: decodebin0
0:00:01.748826025 24367 0x555f0e893890 WARN basesrc gstbasesrc.c:3583:gst_base_src_start_complete: pad not activated yet
Running…
Decodebin child added: qtdemux0
0:00:01.752547669 24367 0x7ff08007d400 WARN qtdemux qtdemux.c:7283:qtdemux_parse_container: length too long (1507328 > 27)
0:00:01.752569245 24367 0x7ff08007d400 WARN qtdemux qtdemux.c:3031:qtdemux_parse_trex: failed to find fragment defaults for stream 1
0:00:01.752608283 24367 0x7ff08007d400 WARN qtdemux qtdemux.c:9142:qtdemux_parse_segments: Segment 0 extends to 0:00:45.360000000 past the end of the file duration 0:00:45.240000000 it will be truncated
Decodebin child added: multiqueue0
Decodebin child added: h264parse0
Decodebin child added: capsfilter0
Decodebin child added: nvv4l2decoder0
0:00:01.754834856 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:3035:gst_v4l2_object_get_nearest_size:nvv4l2decoder0:sink Unable to try format: Unknown error -1
0:00:01.754845873 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:2921:gst_v4l2_object_probe_caps_for_format:nvv4l2decoder0:sink Could not probe minimum capture size for pixelformat MJPG
0:00:01.754850867 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:3035:gst_v4l2_object_get_nearest_size:nvv4l2decoder0:sink Unable to try format: Unknown error -1
0:00:01.754856330 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:2927:gst_v4l2_object_probe_caps_for_format:nvv4l2decoder0:sink Could not probe maximum capture size for pixelformat MJPG
0:00:01.754866405 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:3035:gst_v4l2_object_get_nearest_size:nvv4l2decoder0:sink Unable to try format: Unknown error -1
0:00:01.754870641 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:2921:gst_v4l2_object_probe_caps_for_format:nvv4l2decoder0:sink Could not probe minimum capture size for pixelformat MPG4
0:00:01.754874435 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:3035:gst_v4l2_object_get_nearest_size:nvv4l2decoder0:sink Unable to try format: Unknown error -1
0:00:01.754881306 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:2927:gst_v4l2_object_probe_caps_for_format:nvv4l2decoder0:sink Could not probe maximum capture size for pixelformat MPG4
0:00:01.754915052 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:3035:gst_v4l2_object_get_nearest_size:nvv4l2decoder0:sink Unable to try format: Unknown error -1
0:00:01.754924203 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:2921:gst_v4l2_object_probe_caps_for_format:nvv4l2decoder0:sink Could not probe minimum capture size for pixelformat H265
0:00:01.754928720 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:3035:gst_v4l2_object_get_nearest_size:nvv4l2decoder0:sink Unable to try format: Unknown error -1
0:00:01.754932773 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:2927:gst_v4l2_object_probe_caps_for_format:nvv4l2decoder0:sink Could not probe maximum capture size for pixelformat H265
0:00:01.754942428 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:3035:gst_v4l2_object_get_nearest_size:nvv4l2decoder0:sink Unable to try format: Unknown error -1
0:00:01.754946698 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:2921:gst_v4l2_object_probe_caps_for_format:nvv4l2decoder0:sink Could not probe minimum capture size for pixelformat H264
0:00:01.754950696 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:3035:gst_v4l2_object_get_nearest_size:nvv4l2decoder0:sink Unable to try format: Unknown error -1
0:00:01.754957995 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:2927:gst_v4l2_object_probe_caps_for_format:nvv4l2decoder0:sink Could not probe maximum capture size for pixelformat H264
0:00:01.755230999 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:3035:gst_v4l2_object_get_nearest_size:nvv4l2decoder0:src Unable to try format: Unknown error -1
0:00:01.755238513 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:2921:gst_v4l2_object_probe_caps_for_format:nvv4l2decoder0:src Could not probe minimum capture size for pixelformat NM12
0:00:01.755242790 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:3035:gst_v4l2_object_get_nearest_size:nvv4l2decoder0:src Unable to try format: Unknown error -1
0:00:01.755267342 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:2927:gst_v4l2_object_probe_caps_for_format:nvv4l2decoder0:src Could not probe maximum capture size for pixelformat NM12
0:00:01.755277459 24367 0x7ff018009800 WARN v4l2 gstv4l2object.c:2372:gst_v4l2_object_add_interlace_mode:0x7ff00c0476a0 Failed to determine interlace mode
0:00:01.859309050 24367 0x7ff018009800 ERROR v4l2 gstv4l2object.c:2074:gst_v4l2_object_get_interlace_mode: Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git

0:00:01.859377360 24367 0x7ff018009800 ERROR v4l2 gstv4l2object.c:2074:gst_v4l2_object_get_interlace_mode: Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git

0:00:01.859471859 24367 0x7ff018009800 ERROR v4l2 gstv4l2object.c:2074:gst_v4l2_object_get_interlace_mode: Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git

0:00:01.859496331 24367 0x7ff018009800 ERROR v4l2 gstv4l2object.c:2074:gst_v4l2_object_get_interlace_mode: Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git

0:00:01.859553425 24367 0x7ff018009800 ERROR v4l2 gstv4l2object.c:2074:gst_v4l2_object_get_interlace_mode: Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git

0:00:01.859572530 24367 0x7ff018009800 ERROR v4l2 gstv4l2object.c:2074:gst_v4l2_object_get_interlace_mode: Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git

0:00:01.859626616 24367 0x7ff018009800 ERROR v4l2 gstv4l2object.c:2074:gst_v4l2_object_get_interlace_mode: Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git

0:00:01.859644861 24367 0x7ff018009800 ERROR v4l2 gstv4l2object.c:2074:gst_v4l2_object_get_interlace_mode: Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git

0:00:01.859698842 24367 0x7ff018009800 ERROR v4l2 gstv4l2object.c:2074:gst_v4l2_object_get_interlace_mode: Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git

0:00:01.859717583 24367 0x7ff018009800 ERROR v4l2 gstv4l2object.c:2074:gst_v4l2_object_get_interlace_mode: Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git

0:00:01.859769188 24367 0x7ff018009800 ERROR v4l2 gstv4l2object.c:2074:gst_v4l2_object_get_interlace_mode: Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git

0:00:01.859787160 24367 0x7ff018009800 ERROR v4l2 gstv4l2object.c:2074:gst_v4l2_object_get_interlace_mode: Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git

In cb_newpad
0:00:01.872078394 24367 0x7ff018009800 WARN v4l2videodec gstv4l2videodec.c:1609:gst_v4l2_video_dec_decide_allocation: Duration invalid, not setting latency
0:00:01.872155341 24367 0x7ff018009800 WARN v4l2bufferpool gstv4l2bufferpool.c:1057:gst_v4l2_buffer_pool_start:nvv4l2decoder0:pool:src Uncertain or not enough buffers, enabling copy threshold
0:00:01.873814766 24367 0x7ff004004680 WARN v4l2bufferpool gstv4l2bufferpool.c:1535:gst_v4l2_buffer_pool_dqbuf:nvv4l2decoder0:pool:src Driver should never set v4l2_buffer.field to ANY

(deepstream-nvdsanalytics-test:24367): GLib-GObject-WARNING **: 11:52:25.347: g_object_get_is_valid_property: object class ‘GstUDPSrc’ has no property named ‘pt’
0:00:28.406401509 24367 0x7ff00c0038a0 WARN rtspmedia rtsp-media.c:2994:wait_preroll: failed to preroll pipeline
0:00:28.406416860 24367 0x7ff00c0038a0 WARN rtspmedia rtsp-media.c:3298:gst_rtsp_media_prepare: failed to preroll pipeline
0:00:28.406942033 24367 0x7ff00c0038a0 ERROR rtspclient rtsp-client.c:1054:find_media: client 0x555f5a56e460: can’t prepare media
0:00:28.407053772 24367 0x7ff00c0038a0 ERROR rtspclient rtsp-client.c:2910:handle_describe_request: client 0x555f5a56e460: no media

------------------------Code-----------------------------------------

static gboolean
start_rtsp_streaming (guint rtsp_port_num, guint updsink_port_num,
NvDsEncoderType enctype, guint64 udp_buffer_size)
{
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
char udpsrc_pipeline[512];

char port_num_Str[64] = { 0 };
char *encoder_name;

if (enctype == NV_DS_ENCODER_H264) {
encoder_name = “H264”;
} else if (enctype == NV_DS_ENCODER_H265) {
encoder_name = “H265”;
} else {
g_printerr (“Failed. Exiting.\n”);
return FALSE;
}

if (udp_buffer_size == 0)
udp_buffer_size = 512 * 1024;

sprintf (udpsrc_pipeline,
"( udpsrc name=pay0 port=%d buffer-size=%lu caps="application/x-rtp, media=video, "
“clock-rate=90000, encoding-name=%s, payload=96 " )”,
updsink_port_num, udp_buffer_size, encoder_name);

sprintf (port_num_Str, “%d”, rtsp_port_num);

g_mutex_lock (&server_cnt_lock);

server [server_count] = gst_rtsp_server_new ();
g_object_set (server [server_count], “service”, port_num_Str, NULL);

mounts = gst_rtsp_server_get_mount_points (server [server_count]);

factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, udpsrc_pipeline);

gst_rtsp_mount_points_add_factory (mounts, “/ds-test”, factory);

g_object_unref (mounts);

gst_rtsp_server_attach (server [server_count], NULL);

server_count++;

g_mutex_unlock (&server_cnt_lock);

g_print
("\n *** DeepStream: Launched RTSP Streaming at rtsp://localhost:%d/ds-test ***\n\n",
rtsp_port_num);

return TRUE;
}

int
main (int argc, char *argv)
{
GMainLoop *loop = NULL;
gboolean ret = FALSE;
GstCaps *caps = NULL;
GstElement *pipeline = NULL, *streammux = NULL, *sink = NULL, *pgie = NULL, *codecparse = NULL, *rtppay = NULL, *encoder = NULL,
*nvtracker = NULL, *nvdsanalytics = NULL, * cap_filter = NULL, *nvvidconvpost = NULL, *nvvidconv = NULL, *nvosd = NULL, *tiler = NULL;
#ifdef PLATFORM_TEGRA
GstElement *transform = NULL;
#endif
GstBus *bus = NULL;
guint bus_watch_id;
GstPad *nvdsanalytics_src_pad = NULL;
guint i, num_sources;
guint tiler_rows, tiler_columns;
guint pgie_batch_size;
char inputPGIEfile[40], inputROIfile[40], inputTrackerfile[40];

/* Check input arguments */
if (argc < 6) {
g_printerr (“Usage: %s \n”, argv[0]);
return -1;
}
strcpy(inputPGIEfile, argv[3]);
strcpy(inputROIfile, argv[4]);
strcpy(inputTrackerfile, argv[5]);

num_sources = 1;

/* Standard GStreamer initialization */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);

/* Create gstreamer elements /
/
Create Pipeline element that will form a connection of other elements */
pipeline = gst_pipeline_new (“nvdsanalytics-test-pipeline”);

/* Create nvstreammux instance to form batches from one or more sources. */
streammux = gst_element_factory_make (“nvstreammux”, “stream-muxer”);

if (!pipeline || !streammux) {
g_printerr (“One element could not be created. Exiting.\n”);
return -1;
}
gst_bin_add (GST_BIN (pipeline), streammux);

for (i = 0; i < num_sources; i++) {
GstPad *sinkpad, *srcpad;
gchar pad_name[16] = { };
GstElement *source_bin = create_source_bin (i, argv[i + 1]);

if (!source_bin) {
  g_printerr ("Failed to create source bin. Exiting.\n");
  return -1;
}

gst_bin_add (GST_BIN (pipeline), source_bin);

g_snprintf (pad_name, 15, "sink_%u", i);
sinkpad = gst_element_get_request_pad (streammux, pad_name);
if (!sinkpad) {
  g_printerr ("Streammux request sink pad failed. Exiting.\n");
  return -1;
}

srcpad = gst_element_get_static_pad (source_bin, "src");
if (!srcpad) {
  g_printerr ("Failed to get src pad of source bin. Exiting.\n");
  return -1;
}

if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
  g_printerr ("Failed to link source bin to stream muxer. Exiting.\n");
  return -1;
}

gst_object_unref (srcpad);
gst_object_unref (sinkpad);

}

/* Use nvinfer to infer on batched frame. */
pgie = gst_element_factory_make (“nvinfer”, “primary-nvinference-engine”);

/* Use nvtracker to track detections on batched frame. */
nvtracker = gst_element_factory_make (“nvtracker”, “nvtracker”);

/* Use nvdsanalytics to perform analytics on object */
nvdsanalytics = gst_element_factory_make (“nvdsanalytics”, “nvdsanalytics”);

/* Use nvtiler to composite the batched frames into a 2D tiled array based

  • on the source of the frames. */
    tiler = gst_element_factory_make (“nvmultistreamtiler”, “nvtiler”);

/* Use convertor to convert from NV12 to RGBA as required by nvosd */
nvvidconv = gst_element_factory_make (“nvvideoconvert”, “nvvideo-converter”);

/* Create OSD to draw on the converted RGBA buffer */
nvosd = gst_element_factory_make (“nvdsosd”, “nv-onscreendisplay”);

/* Use convertor to convert from NV12 to RGBA as required by nvosd */
nvvidconvpost = gst_element_factory_make (“nvvideoconvert”, “nvvideo-converterpost”);

/* Finally render the osd output */
#ifdef PLATFORM_TEGRA
transform = gst_element_factory_make (“nvegltransform”, “nvegl-transform”);
#endif

caps = gst_caps_from_string (“video/x-raw(memory:NVMM), format=I420”);
rtppay = gst_element_factory_make (“rtph264pay”, “sink_sub_bin_rtppay01”);
encoder = gst_element_factory_make (“nvv4l2h264enc”, “sink_sub_bin_encoder01”);
g_object_set (G_OBJECT (encoder), “preset-level”, 1, NULL);
g_object_set (G_OBJECT (encoder), “insert-sps-pps”, 1, NULL);
g_object_set (G_OBJECT (encoder), “bufapi-version”, 1, NULL);
sink = gst_element_factory_make (“udpsink”, “sink_sub_bin_udpsink01”);
g_object_set (G_OBJECT (sink), “host”, “224.224.255.255”, “port”, 5400, “async”, FALSE, “sync”, 1, NULL);
//sink = gst_element_factory_make (“nveglglessink”, “nvvideo-renderer”);

if (!pgie || !nvtracker || !nvdsanalytics || !tiler || !nvvidconv || !nvosd || !nvvidconvpost ||
!sink || !caps || !rtppay || !encoder ) {
g_printerr (“One element could not be created. Exiting.\n”);
return -1;
}

#ifdef PLATFORM_TEGRA
if(!transform) {
g_printerr (“One tegra element could not be created. Exiting.\n”);
return -1;
}
#endif

g_object_set (G_OBJECT (streammux), “width”, MUXER_OUTPUT_WIDTH, “height”,
MUXER_OUTPUT_HEIGHT, “batch-size”, num_sources,
“batched-push-timeout”, MUXER_BATCH_TIMEOUT_USEC, NULL);

/* Configure the nvinfer element using the nvinfer config file. */
g_object_set (G_OBJECT (pgie), “config-file-path”, inputPGIEfile, NULL);

/* Configure the nvtracker element for using the particular tracker algorithm. */
g_object_set (G_OBJECT (nvtracker),
“ll-lib-file”, “/opt/nvidia/deepstream/deepstream-5.0/lib/libnvds_nvdcf.so”,
“ll-config-file”, inputTrackerfile, “tracker-width”, 640, “tracker-height”, 480,
NULL);

/* Configure the nvdsanalytics element for using the particular analytics config file*/
/* g_object_set (G_OBJECT (nvdsanalytics),
“config-file”, “config_nvdsanalytics.txt”,
NULL); */
g_object_set (G_OBJECT (nvdsanalytics),
“config-file”, inputROIfile,
NULL);

/* Override the batch-size set in the config file with the number of sources. */
g_object_get (G_OBJECT (pgie), “batch-size”, &pgie_batch_size, NULL);
if (pgie_batch_size != num_sources) {
g_printerr
(“WARNING: Overriding infer-config batch-size (%d) with number of sources (%d)\n”,
pgie_batch_size, num_sources);
g_object_set (G_OBJECT (pgie), “batch-size”, num_sources, NULL);
}

tiler_rows = (guint) sqrt (num_sources);
tiler_columns = (guint) ceil (1.0 * num_sources / tiler_rows);
/* we set the tiler properties here */
g_object_set (G_OBJECT (tiler), “rows”, tiler_rows, “columns”, tiler_columns,
“width”, TILED_OUTPUT_WIDTH, “height”, TILED_OUTPUT_HEIGHT, NULL);

/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);

/* Set up the pipeline /
/
we add all elements into the pipeline */
#ifdef PLATFORM_TEGRA
gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics , tiler,
nvvidconv, nvosd, transform, nvvidconvpost, cap_filter, encoder, rtppay, sink,
NULL);

/* we link the elements together

  • nvstreammux -> nvinfer -> nvtracker -> nvdsanalytics -> nvtiler ->
  • nvvideoconvert -> nvosd -> transform -> sink
    /
    if (!gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics, tiler,
    nvvidconv, nvosd, transform, nvvidconvpost, cap_filter, encoder, rtppay, sink, NULL)) {
    g_printerr (“Elements could not be linked. Exiting.\n”);
    return -1;
    }
    #else
    gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics, tiler, nvvidconv, nvosd, nvvidconvpost, cap_filter, encoder, rtppay, sink, NULL);
    /
    we link the elements together
  • nvstreammux -> nvinfer -> nvtracker -> nvdsanalytics -> nvtiler ->
  • nvvideoconvert -> nvosd -> sink
    */
    if (!gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics, tiler, nvvidconv, nvosd, nvvidconvpost, cap_filter, encoder, rtppay, sink, NULL)) {
    g_printerr (“Elements could not be linked. Exiting.\n”);
    return -1;
    }
    #endif

ret = TRUE;
g_print (“RTSP Server Starting\n”);
ret = start_rtsp_streaming (8554, 5400, NV_DS_ENCODER_H264, 100000);
if (ret != TRUE) {
g_print ("%s: start_rtsp_straming function failed\n", func);
}
g_print (“RTSP Server Started\n”);
/* Lets add probe to get informed of the meta data generated, we add probe to

  • the sink pad of the nvdsanalytics element, since by that time, the buffer
  • would have had got all the metadata.
    */
    nvdsanalytics_src_pad = gst_element_get_static_pad (nvdsanalytics, “src”);
    if (!nvdsanalytics_src_pad)
    g_print (“Unable to get src pad\n”);
    else
    gst_pad_add_probe (nvdsanalytics_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
    nvdsanalytics_src_pad_buffer_probe, NULL, NULL);

/* Set the pipeline to “playing” state */
g_print (“Now playing:”);
for (i = 0; i < num_sources; i++) {
g_print (" %s,", argv[i + 1]);
}
g_print ("\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);

/* Wait till pipeline encounters an error or EOS */
g_print (“Running…\n”);
g_main_loop_run (loop);

/* Out of the main loop, clean up nicely */
done:
if (caps) {
gst_caps_unref (caps);
}
if (!ret) {
g_print (“Failed”);
}
g_print (“Returned, stopping playback\n”);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print (“Deleting pipeline\n”);
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}

Hi,
Ideally it should work if you port the deviation between deepstream-test1 and deepstream-test1-rtsp-out.

Or you may try to integrate nvdsanalytics to deepstream-test1 or deepstream-app. The two samples can run with RTSP. You may implement your usecase based on the samples, too.

The problem with deepstream-test1 is that it does not have RTSP sink. The problem with deepstream-app is that it is a complex code.

Any help towards identifying any issue in the code would be highly appreciated.

Same feeling here. deepstream-app is so complex, the coding logic is quiet different from deepstream-test1234, test5 is complex too.

I want to create this pipeline: nvdsanalytics -> result message send to kafka -> rtsp ouput, but no luck yet.

Hello Nvidia Team, Any help towards identifying any issue in the code would be highly appreciated. Have exhausted all options.

Hi,
Please refer to the patch:

diff --git a/apps/deepstream/sample_apps/deepstream-nvdsanalytics-test/deepstream_nvdsanalytics_test.cpp b/apps/deepstream/sample_apps/deepstream-nvdsanalytics-test/deepstream_nvdsanalytics_test.cpp
index c7ae52d..2a38c1e 100644
--- a/apps/deepstream/sample_apps/deepstream-nvdsanalytics-test/deepstream_nvdsanalytics_test.cpp
+++ b/apps/deepstream/sample_apps/deepstream-nvdsanalytics-test/deepstream_nvdsanalytics_test.cpp
@@ -21,6 +21,7 @@
  */
 
 #include <gst/gst.h>
+#include <gst/rtsp-server/rtsp-server.h>
 #include <glib.h>
 #include <stdio.h>
 #include <math.h>
@@ -278,22 +279,86 @@ create_source_bin (guint index, gchar * uri)
   return bin;
 }
 
+static GstRTSPServer *server;
+static gboolean
+start_rtsp_streaming (guint rtsp_port_num, guint updsink_port_num,
+                      guint64 udp_buffer_size)
+{
+  GstRTSPMountPoints *mounts;
+  GstRTSPMediaFactory *factory;
+  char udpsrc_pipeline[512];
+
+  char port_num_Str[64] = { 0 };
+  char *encoder_name;
+
+  if (udp_buffer_size == 0)
+    udp_buffer_size = 512 * 1024;
+
+  sprintf (udpsrc_pipeline,
+      "( udpsrc name=pay0 port=%d buffer-size=%lu caps=\"application/x-rtp, media=video, "
+      "clock-rate=90000, encoding-name=H265, payload=96 \" )",
+      updsink_port_num, udp_buffer_size);
+
+  sprintf (port_num_Str, "%d", rtsp_port_num);
+
+  server = gst_rtsp_server_new ();
+  g_object_set (server, "service", port_num_Str, NULL);
+
+  mounts = gst_rtsp_server_get_mount_points (server);
+
+  factory = gst_rtsp_media_factory_new ();
+  gst_rtsp_media_factory_set_launch (factory, udpsrc_pipeline);
+
+  gst_rtsp_mount_points_add_factory (mounts, "/ds-test", factory);
+
+  g_object_unref (mounts);
+
+  gst_rtsp_server_attach (server, NULL);
+
+  g_print
+      ("\n *** DeepStream: Launched RTSP Streaming at rtsp://localhost:%d/ds-test ***\n\n",
+      rtsp_port_num);
+
+  return TRUE;
+}
+
+static GstRTSPFilterResult
+client_filter (GstRTSPServer * server, GstRTSPClient * client,
+    gpointer user_data)
+{
+  return GST_RTSP_FILTER_REMOVE;
+}
+
+static void
+destroy_sink_bin ()
+{
+  GstRTSPMountPoints *mounts;
+  GstRTSPSessionPool *pool;
+
+  mounts = gst_rtsp_server_get_mount_points (server);
+  gst_rtsp_mount_points_remove_factory (mounts, "/ds-test");
+  g_object_unref (mounts);
+  gst_rtsp_server_client_filter (server, client_filter, NULL);
+  pool = gst_rtsp_server_get_session_pool (server);
+  gst_rtsp_session_pool_cleanup (pool);
+  g_object_unref (pool);
+}
+
 int
 main (int argc, char *argv[])
 {
   GMainLoop *loop = NULL;
   GstElement *pipeline = NULL, *streammux = NULL, *sink = NULL, *pgie = NULL,
              *nvtracker = NULL, *nvdsanalytics = NULL,
-      *nvvidconv = NULL, *nvosd = NULL, *tiler = NULL;
-#ifdef PLATFORM_TEGRA
-  GstElement *transform = NULL;
-#endif
+      *nvvidconv = NULL, *nvosd = NULL, *tiler = NULL,
+      *transform = NULL, *encoder = NULL, *parse= NULL, *rtppay = NULL;
   GstBus *bus = NULL;
   guint bus_watch_id;
   GstPad *nvdsanalytics_src_pad = NULL;
   guint i, num_sources;
   guint tiler_rows, tiler_columns;
   guint pgie_batch_size;
+  guint udp_port = 5400;
 
   /* Check input arguments */
   if (argc < 2) {
@@ -372,11 +437,11 @@ main (int argc, char *argv[])
   /* Create OSD to draw on the converted RGBA buffer */
   nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");
 
-  /* Finally render the osd output */
-#ifdef PLATFORM_TEGRA
-  transform = gst_element_factory_make ("nvegltransform", "nvegl-transform");
-#endif
-  sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
+  transform = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter2");
+  encoder = gst_element_factory_make ("nvv4l2h265enc", "hardware-encoder");
+  parse = gst_element_factory_make ("h265parse", "h265-parser");
+  rtppay = gst_element_factory_make ("rtph265pay", "rtp-payer");
+  sink = gst_element_factory_make ("udpsink", "udp-sink");
 
   if (!pgie || !nvtracker || !nvdsanalytics || !tiler || !nvvidconv ||
       !nvosd || !sink) {
@@ -384,12 +449,10 @@ main (int argc, char *argv[])
     return -1;
   }
 
-#ifdef PLATFORM_TEGRA
   if(!transform) {
     g_printerr ("One tegra element could not be created. Exiting.\n");
     return -1;
   }
-#endif
 
   g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",
       MUXER_OUTPUT_HEIGHT, "batch-size", num_sources,
@@ -430,35 +493,23 @@ main (int argc, char *argv[])
   bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
   gst_object_unref (bus);
 
-  /* Set up the pipeline */
-  /* we add all elements into the pipeline */
-#ifdef PLATFORM_TEGRA
-  gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics , tiler,
-          nvvidconv, nvosd, transform, sink,
-      NULL);
 
-  /* we link the elements together
-   * nvstreammux -> nvinfer -> nvtracker -> nvdsanalytics -> nvtiler ->
-   * nvvideoconvert -> nvosd -> transform -> sink
-   */
-  if (!gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics, tiler,
-                              nvvidconv, nvosd, transform, sink, NULL)) {
-    g_printerr ("Elements could not be linked. Exiting.\n");
-    return -1;
-  }
-#else
-  gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics, tiler,
-                    nvvidconv, nvosd, sink, NULL);
-  /* we link the elements together
-   * nvstreammux -> nvinfer -> nvtracker -> nvdsanalytics -> nvtiler ->
-   * nvvideoconvert -> nvosd -> sink
-   */
-  if (!gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics,
-      tiler, nvvidconv, nvosd, sink, NULL)) {
-    g_printerr ("Elements could not be linked. Exiting.\n");
-    return -1;
-  }
-#endif
+
+
+    gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics , tiler,
+            nvvidconv, nvosd, transform, encoder, parse, rtppay, sink,
+        NULL);
+    if (!gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics, tiler,
+                                nvvidconv, nvosd, transform, encoder, parse, rtppay, sink, NULL)) {
+      g_printerr ("Elements could not be linked. Exiting.\n");
+      return -1;
+    }
+    g_object_set (G_OBJECT (encoder), "preset-level", 1, NULL);
+    g_object_set (G_OBJECT (encoder), "insert-sps-pps", 1, NULL);
+    g_object_set (G_OBJECT (encoder), "bufapi-version", 1, NULL);
+    g_object_set (G_OBJECT (sink), "host", "224.224.255.255", "port",
+        udp_port, "async", FALSE, "sync", 0, NULL);
+    start_rtsp_streaming (8554/*rtsp_port*/, udp_port, 0);
 
   /* Lets add probe to get informed of the meta data generated, we add probe to
    * the sink pad of the nvdsanalytics element, since by that time, the buffer
@@ -487,6 +538,7 @@ main (int argc, char *argv[])
   g_print ("Returned, stopping playback\n");
   gst_element_set_state (pipeline, GST_STATE_NULL);
   g_print ("Deleting pipeline\n");
+  destroy_sink_bin();
   gst_object_unref (GST_OBJECT (pipeline));
   g_source_remove (bus_watch_id);
   g_main_loop_unref (loop);

1 Like

Thanks a ton. Your help is highly appreciated. It worked.

@DaneLLL @Mike Hi, I copied your code in my application, where should I place the start_rtsp_streaming function? Before or after gst_element_set_state (pipeline, GST_STATE_PLAYING); ? My app can run detection smoothly, but the rtsp frame is black.

My gst elements pipeline:

/*
  nvstreammux -> nvinfer -> nvtracker -> nvdsanalytics-> 
  tiler -> nvvidconv -> nvosd -> tee -> transform -> encoder -> parse -> rtppay -> sink
                                  |
                                  |-> msgconv -> msgbroker  */

My video source is rtsp source, I want to do the line-crossing detection and send the result via kafka, and then streaming the output via rtsp. But the rtsp frame is black, nothing shows.

Did you patch the original cpp file? PM me in case you need help. I have it working.

What do you mean ‘patch’? @Mike

Hi,
start_rtsp_streaming is called before switching to GST_STATE_PLAYING.

Is there a possibility of having multiple sinks just like we have multiple sources?

Is there a possibility of having multiple sinks just like we have multiple sources?

Maybe you should check deepstream-test5? It can configure multiple sinks type, for example, sink0 is for osd, sink1 is for kafka, sink2 is for rtsp streamming.