Please provide complete information as applicable to your setup.
• Hardware Platform (Jetson / GPU) dGPU
• DeepStream Version 5.0
• JetPack Version (valid for Jetson only)
• TensorRT Version 7
• NVIDIA GPU Driver Version (valid for GPU only) 440.60
I am trying to create a RTSP output in NVdsanalytics example. I used the deepstream_sink_bin.c to implement the same. I need help on the pipeline.
Currently I am trying to use
gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics, cap_filter, transform1, encoder, codecparse, rtppay, sink, NULL);
gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics, cap_filter, transform1, encoder, codecparse, rtppay, sink, NULL)
Currently it fails with the following error:
0:00:07.526672937 10323 0x7f44c8004f70 INFO GST_EVENT gstevent.c:1546:gst_event_new_sink_message: creating sink-message event
0:00:07.527038091 10323 0x7f44c8004f70 WARN queue gstqueue.c:988:gst_queue_handle_sink_event:<sink_sub_bin_queue01> error: Internal data stream error.
0:00:07.527050453 10323 0x7f44c8004f70 WARN queue gstqueue.c:988:gst_queue_handle_sink_event:<sink_sub_bin_queue01> error: streaming stopped, reason not-linked (-1)
0:00:07.527065739 10323 0x7f44c8004f70 INFO GST_ERROR_SYSTEM gstelement.c:2145:gst_element_message_full_with_details:<sink_sub_bin_queue01> posting message: Internal data stream error.
0:00:07.527099125 10323 0x7f44c8004f70 INFO GST_ERROR_SYSTEM gstelement.c:2172:gst_element_message_full_with_details:<sink_sub_bin_queue01> posted error message: Internal data stream error.
ERROR from element sink_sub_bin_queue01: Internal data stream error.
Error details: gstqueue.c(988): gst_queue_handle_sink_event (): /GstPipeline:nvdsanalytics-test-pipeline/GstQueue:sink_sub_bin_queue01:
streaming stopped, reason not-linked (-1)
Returned, stopping playback
Code*****
int
main (int argc, char *argv)
{
GMainLoop *loop = NULL;
gboolean ret = FALSE;
GstCaps *caps = NULL;
GstElement *pipeline = NULL, *streammux = NULL, *sink = NULL, *pgie = NULL, *codecparse = NULL, *rtppay = NULL, *encoder = NULL,
*nvtracker = NULL, *nvdsanalytics = NULL, *queue = NULL, *transform1 = NULL, * cap_filter = NULL,
*nvvidconv = NULL, *nvosd = NULL, *tiler = NULL;
#ifdef PLATFORM_TEGRA
GstElement *transform = NULL;
#endif
GstBus *bus = NULL;
guint bus_watch_id;
GstPad *nvdsanalytics_src_pad = NULL;
guint i, num_sources;
guint tiler_rows, tiler_columns;
guint pgie_batch_size;
char inputPGIEfile[40], inputROIfile[40], inputTrackerfile[40];
/* Check input arguments */
if (argc < 6) {
g_printerr (“Usage: %s \n”, argv[0]);
return -1;
}
strcpy(inputPGIEfile, argv[3]);
strcpy(inputROIfile, argv[4]);
strcpy(inputTrackerfile, argv[5]);
num_sources = 1;
/* Standard GStreamer initialization */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements /
/ Create Pipeline element that will form a connection of other elements */
pipeline = gst_pipeline_new (“nvdsanalytics-test-pipeline”);
/* Create nvstreammux instance to form batches from one or more sources. */
streammux = gst_element_factory_make (“nvstreammux”, “stream-muxer”);
if (!pipeline || !streammux) {
g_printerr (“One element could not be created. Exiting.\n”);
return -1;
}
gst_bin_add (GST_BIN (pipeline), streammux);
for (i = 0; i < num_sources; i++) {
GstPad *sinkpad, *srcpad;
gchar pad_name[16] = { };
GstElement *source_bin = create_source_bin (i, argv[i + 1]);
if (!source_bin) {
g_printerr ("Failed to create source bin. Exiting.\n");
return -1;
}
gst_bin_add (GST_BIN (pipeline), source_bin);
g_snprintf (pad_name, 15, "sink_%u", i);
sinkpad = gst_element_get_request_pad (streammux, pad_name);
if (!sinkpad) {
g_printerr ("Streammux request sink pad failed. Exiting.\n");
return -1;
}
srcpad = gst_element_get_static_pad (source_bin, "src");
if (!srcpad) {
g_printerr ("Failed to get src pad of source bin. Exiting.\n");
return -1;
}
if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
g_printerr ("Failed to link source bin to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref (srcpad);
gst_object_unref (sinkpad);
}
/* Use nvinfer to infer on batched frame. */
pgie = gst_element_factory_make (“nvinfer”, “primary-nvinference-engine”);
/* Use nvtracker to track detections on batched frame. */
nvtracker = gst_element_factory_make (“nvtracker”, “nvtracker”);
/* Use nvdsanalytics to perform analytics on object */
nvdsanalytics = gst_element_factory_make (“nvdsanalytics”, “nvdsanalytics”);
/* Use nvtiler to composite the batched frames into a 2D tiled array based
- on the source of the frames. */
tiler = gst_element_factory_make (“nvmultistreamtiler”, “nvtiler”);
/* Use convertor to convert from NV12 to RGBA as required by nvosd */
nvvidconv = gst_element_factory_make (“nvvideoconvert”, “nvvideo-converter”);
/* Create OSD to draw on the converted RGBA buffer */
nvosd = gst_element_factory_make (“nvdsosd”, “nv-onscreendisplay”);
/* Finally render the osd output */
#ifdef PLATFORM_TEGRA
transform = gst_element_factory_make (“nvegltransform”, “nvegl-transform”);
#endif
caps = gst_caps_from_string (“video/x-raw(memory:NVMM), format=I420”);
queue = gst_element_factory_make (“queue”, “sink_sub_bin_queue01”);
transform1 = gst_element_factory_make (“nvvideoconvert”, “sink_sub_bin_transform01”);
codecparse = gst_element_factory_make (“h264parse”, “h264-parser”);
rtppay = gst_element_factory_make (“rtph264pay”, “sink_sub_bin_rtppay01”);
encoder = gst_element_factory_make (“x264enc”, “sink_sub_bin_encoder01”);
sink = gst_element_factory_make (“udpsink”, “sink_sub_bin_udpsink01”);
g_object_set (G_OBJECT (sink), “host”, “224.224.255.255”, “port”, 8554, “async”, FALSE, “sync”, 0, NULL);
//sink = gst_element_factory_make (“nveglglessink”, “nvvideo-renderer”);
if (!pgie || !nvtracker || !nvdsanalytics || !tiler || !nvvidconv ||
!nvosd || !sink || !caps || !queue || !transform1 || !codecparse || !rtppay || !encoder ) {
g_printerr (“One element could not be created. Exiting.\n”);
return -1;
}
#ifdef PLATFORM_TEGRA
if(!transform) {
g_printerr (“One tegra element could not be created. Exiting.\n”);
return -1;
}
#endif
g_object_set (G_OBJECT (streammux), “width”, MUXER_OUTPUT_WIDTH, “height”,
MUXER_OUTPUT_HEIGHT, “batch-size”, num_sources,
“batched-push-timeout”, MUXER_BATCH_TIMEOUT_USEC, NULL);
/* Configure the nvinfer element using the nvinfer config file. */
g_object_set (G_OBJECT (pgie), “config-file-path”, inputPGIEfile, NULL);
/* Configure the nvtracker element for using the particular tracker algorithm. */
g_object_set (G_OBJECT (nvtracker),
“ll-lib-file”, “/opt/nvidia/deepstream/deepstream-5.0/lib/libnvds_nvdcf.so”,
“ll-config-file”, inputTrackerfile, “tracker-width”, 640, “tracker-height”, 480,
NULL);
/* Configure the nvdsanalytics element for using the particular analytics config file*/
/* g_object_set (G_OBJECT (nvdsanalytics),
“config-file”, “config_nvdsanalytics.txt”,
NULL); */
g_object_set (G_OBJECT (nvdsanalytics),
“config-file”, inputROIfile,
NULL);
/* Override the batch-size set in the config file with the number of sources. */
g_object_get (G_OBJECT (pgie), “batch-size”, &pgie_batch_size, NULL);
if (pgie_batch_size != num_sources) {
g_printerr
(“WARNING: Overriding infer-config batch-size (%d) with number of sources (%d)\n”,
pgie_batch_size, num_sources);
g_object_set (G_OBJECT (pgie), “batch-size”, num_sources, NULL);
}
tiler_rows = (guint) sqrt (num_sources);
tiler_columns = (guint) ceil (1.0 * num_sources / tiler_rows);
/* we set the tiler properties here */
g_object_set (G_OBJECT (tiler), “rows”, tiler_rows, “columns”, tiler_columns,
“width”, TILED_OUTPUT_WIDTH, “height”, TILED_OUTPUT_HEIGHT, NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* Set up the pipeline /
/ we add all elements into the pipeline */
#ifdef PLATFORM_TEGRA
gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics , tiler,
nvvidconv, nvosd, transform, queue, cap_filter, transform1, encoder, codecparse, rtppay, sink,
NULL);
/* we link the elements together
- nvstreammux → nvinfer → nvtracker → nvdsanalytics → nvtiler →
- nvvideoconvert → nvosd → transform → sink
/
if (!gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics, tiler,
nvvidconv, nvosd, transform, queue, cap_filter, transform1, encoder, codecparse, rtppay, sink, NULL)) {
g_printerr (“Elements could not be linked. Exiting.\n”);
return -1;
}
#else
gst_bin_add_many (GST_BIN (pipeline), pgie, nvtracker, nvdsanalytics, cap_filter, transform1, encoder, codecparse, rtppay, sink, NULL);
/ we link the elements together - nvstreammux → nvinfer → nvtracker → nvdsanalytics → nvtiler →
- nvvideoconvert → nvosd → sink
*/
if (!gst_element_link_many (streammux, pgie, nvtracker, nvdsanalytics, cap_filter, transform1, encoder, codecparse, rtppay, sink, NULL)) {
g_printerr (“Elements could not be linked. Exiting.\n”);
return -1;
}
#endif
/* NVGSTDS_LINK_ELEMENT (queue, transform1);
NVGSTDS_LINK_ELEMENT (transform1, cap_filter);
NVGSTDS_LINK_ELEMENT (cap_filter, encoder);
NVGSTDS_LINK_ELEMENT (encoder, codecparse);
NVGSTDS_LINK_ELEMENT (codecparse, rtppay);
NVGSTDS_LINK_ELEMENT (rtppay, sink);
NVGSTDS_BIN_ADD_GHOST_PAD (queue, “sink”); */
ret = TRUE;
ret = start_rtsp_streaming (8554, 8554, NV_DS_ENCODER_H264, 100000);
if (ret != TRUE) {
g_print (“%s: start_rtsp_straming function failed\n”, func);
}
/* Lets add probe to get informed of the meta data generated, we add probe to
- the sink pad of the nvdsanalytics element, since by that time, the buffer
- would have had got all the metadata.
*/
nvdsanalytics_src_pad = gst_element_get_static_pad (nvdsanalytics, “src”);
if (!nvdsanalytics_src_pad)
g_print (“Unable to get src pad\n”);
else
gst_pad_add_probe (nvdsanalytics_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
nvdsanalytics_src_pad_buffer_probe, NULL, NULL);
/* Set the pipeline to “playing” state */
g_print (“Now playing:”);
for (i = 0; i < num_sources; i++) {
g_print (" %s,“, argv[i + 1]);
}
g_print (”\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait till pipeline encounters an error or EOS */
g_print (“Running…\n”);
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
done:
if (caps) {
gst_caps_unref (caps);
}
if (!ret) {
g_print (“Failed”);
}
g_print (“Returned, stopping playback\n”);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print (“Deleting pipeline\n”);
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
static gboolean
start_rtsp_streaming (guint rtsp_port_num, guint updsink_port_num,
NvDsEncoderType enctype, guint64 udp_buffer_size)
{
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
char udpsrc_pipeline[512];
char port_num_Str[64] = { 0 };
char *encoder_name;
if (enctype == NV_DS_ENCODER_H264) {
encoder_name = “H264”;
} else if (enctype == NV_DS_ENCODER_H265) {
encoder_name = “H265”;
} else {
g_printerr (“Failed. Exiting.\n”);
return FALSE;
}
if (udp_buffer_size == 0)
udp_buffer_size = 512 * 1024;
sprintf (udpsrc_pipeline,
"( udpsrc name=pay0 port=%d buffer-size=%lu caps="application/x-rtp, media=video, "
“clock-rate=90000, encoding-name=%s, payload=96 " )”,
updsink_port_num, udp_buffer_size, encoder_name);
sprintf (port_num_Str, “%d”, rtsp_port_num);
g_mutex_lock (&server_cnt_lock);
server [server_count] = gst_rtsp_server_new ();
g_object_set (server [server_count], “service”, port_num_Str, NULL);
mounts = gst_rtsp_server_get_mount_points (server [server_count]);
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, udpsrc_pipeline);
gst_rtsp_mount_points_add_factory (mounts, “/ds-test”, factory);
g_object_unref (mounts);
gst_rtsp_server_attach (server [server_count], NULL);
server_count++;
g_mutex_unlock (&server_cnt_lock);
g_print
(“\n *** DeepStream: Launched RTSP Streaming at rtsp://localhost:%d/ds-test ***\n\n”,
rtsp_port_num);
return TRUE;
}