Please provide complete information as applicable to your setup.
• Hardware Platform (Jetson / GPU): 4070 ti
• DeepStream Version: 7.1
• TensorRT Version: 10.3.0.26
• NVIDIA GPU Driver Version (valid for GPU only): 560.35.05
• Issue Type( questions, new requirements, bugs): Question
Hi @junshengy ,
I have a code written to publish the tiled
output as rtsp
stream. However I need individual rtsp
output for each individual rtsp
input. Any insights on this highly appreciated. I’m attaching the code below:
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <math.h>
#include <string.h>
#include <iostream>
#include <typeinfo>
#include <fstream>
#include <sys/time.h>
#include <cuda_runtime_api.h>
#include "gstnvdsmeta.h"
#include "nvbufsurface.h"
#include "nvds_yml_parser.h"
#include "gst-nvmessage.h"
#include <vector>
#include <list>
#include <iterator>
#include <unordered_map>
#include <limits>
#include <cmath>
#include <algorithm>
#include <gst/rtsp-server/rtsp-server.h>
#define MUXER_OUTPUT_WIDTH 1920
#define MUXER_OUTPUT_HEIGHT 1080
#define MUXER_BATCH_TIMEOUT_USEC 40000
#define GST_CAPS_FEATURES_NVMM "memory:NVMM"
/* Check for parsing error. */
#define RETURN_ON_PARSER_ERROR(parse_expr) \
if (NVDS_YAML_PARSER_SUCCESS != parse_expr) \
{ \
g_printerr("Error in parsing configuration file.\n"); \
return -1; \
}
using namespace std;
int rtsp_stream = 1;
int udp_port = 8554;
int port = 8554;
string cam_area = "1";
bool isH264 = true;
static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *)data;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
g_print("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_printerr("ERROR from element %s: %s\n", GST_OBJECT_NAME(msg->src), error->message);
if (debug)
g_printerr("Error details: %s\n", debug);
g_free(debug);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return TRUE;
}
static void cb_newpad(GstElement *decodebin, GstPad *decoder_src_pad, gpointer data)
{
GstCaps *caps = gst_pad_get_current_caps(decoder_src_pad);
if (!caps) {
caps = gst_pad_query_caps(decoder_src_pad, NULL);
}
const GstStructure *str = gst_caps_get_structure(caps, 0);
const gchar *name = gst_structure_get_name(str);
GstElement *source_bin = (GstElement *)data;
GstCapsFeatures *features = gst_caps_get_features(caps, 0);
if (!strncmp(name, "video", 5)) {
if (gst_caps_features_contains(features, GST_CAPS_FEATURES_NVMM)) {
GstPad *bin_ghost_pad = gst_element_get_static_pad(source_bin, "src");
if (!gst_ghost_pad_set_target(GST_GHOST_PAD(bin_ghost_pad), decoder_src_pad)) {
g_printerr("Failed to link decoder src pad to source bin ghost pad\n");
}
gst_object_unref(bin_ghost_pad);
} else {
g_printerr("Error: Decodebin did not pick NVIDIA decoder plugin.\n");
}
}
}
static void decodebin_child_added(GstChildProxy *child_proxy, GObject *object, gchar *name, gpointer user_data)
{
g_print("Decodebin child added: %s\n", name);
if (g_strrstr(name, "decodebin") == name) {
g_signal_connect(G_OBJECT(object), "child-added", G_CALLBACK(decodebin_child_added), user_data);
}
if (g_strrstr(name, "source") == name) {
g_object_set(G_OBJECT(object), "drop-on-latency", TRUE, NULL);
}
}
static GstElement *create_source_bin(guint index, gchar *uri)
{
GstElement *bin = NULL, *uri_decode_bin = NULL;
gchar bin_name[16] = {};
g_snprintf(bin_name, 15, "source-bin-%02d", index);
bin = gst_bin_new(bin_name);
uri_decode_bin = gst_element_factory_make("uridecodebin", "uri-decode-bin");
if (!bin || !uri_decode_bin) {
g_printerr("One element in source bin could not be created.\n");
return NULL;
}
g_object_set(G_OBJECT(uri_decode_bin), "uri", uri, NULL);
g_signal_connect(G_OBJECT(uri_decode_bin), "pad-added", G_CALLBACK(cb_newpad), bin);
g_signal_connect(G_OBJECT(uri_decode_bin), "child-added", G_CALLBACK(decodebin_child_added), bin);
gst_bin_add(GST_BIN(bin), uri_decode_bin);
if (!gst_element_add_pad(bin, gst_ghost_pad_new_no_target("src", GST_PAD_SRC))) {
g_printerr("Failed to add ghost pad in source bin\n");
return NULL;
}
return bin;
}
/* osd_sink_pad_buffer_probe will extract metadata received from OSD
* and update params for drawing rectangle, object information etc. */
static GstPadProbeReturn
osd_sink_pad_buffer_probe(GstPad *pad, GstPadProbeInfo *info,
gpointer u_data)
{
GstBuffer *buf = (GstBuffer *)info->data;
guint num_rects = 0;
NvDsObjectMeta *obj_meta = NULL;
NvDsMetaList *l_frame = NULL;
NvDsMetaList *l_obj = NULL;
NvDsDisplayMeta *display_meta = NULL;
gboolean is_first_object = TRUE;
NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta(buf);
for (l_frame = batch_meta->frame_meta_list; l_frame != NULL;
l_frame = l_frame->next)
{
is_first_object = TRUE;
NvDsFrameMeta *frame_meta = (NvDsFrameMeta *)(l_frame->data);
display_meta = nvds_acquire_display_meta_from_pool(batch_meta);
NvOSD_LineParams *line_params = &display_meta->line_params[0];
line_params->x1 = 0;
line_params->y1 = 10;
line_params->x2 = 500;
line_params->y2 = 500;
line_params->line_width = 6;
line_params->line_color.red = 0.0;
line_params->line_color.green = 0.0;
line_params->line_color.blue = 1.0;
line_params->line_color.alpha = 1.0;
display_meta->num_lines = 1;
nvds_add_display_meta_to_frame(frame_meta, display_meta);
}
return GST_PAD_PROBE_OK;
}
static GstRTSPServer *server;
static gboolean
start_rtsp_streaming (guint rtsp_port_num, guint updsink_port_num,
guint64 udp_buffer_size, string name)
{
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
char udpsrc_pipeline[512];
char port_num_Str[64] = { 0 };
char *encoder_name;
if (udp_buffer_size == 0)
udp_buffer_size = 512 * 1024;
sprintf (udpsrc_pipeline,
"( udpsrc name=pay0 port=%d buffer-size=%lu caps=\"application/x-rtp, media=video, "
"clock-rate=90000, encoding-name=H264, payload=96 \" )",
updsink_port_num, udp_buffer_size);
sprintf (port_num_Str, "%d", rtsp_port_num);
server = gst_rtsp_server_new ();
g_object_set (server, "service", port_num_Str, NULL);
mounts = gst_rtsp_server_get_mount_points (server);
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory, udpsrc_pipeline);
string final_name = "/ds-test"+ name;
gst_rtsp_mount_points_add_factory (mounts, final_name.c_str() , factory);
g_object_unref (mounts);
gst_rtsp_server_attach (server, NULL);
g_print
("\n *** DeepStream: Launched RTSP Streaming at rtsp://localhost:%d%s ***\n\n",
rtsp_port_num, final_name.c_str());
return TRUE;
}
static GstRTSPFilterResult
client_filter (GstRTSPServer * server, GstRTSPClient * client,
gpointer user_data)
{
return GST_RTSP_FILTER_REMOVE;
}
static void
destroy_sink_bin ()
{
GstRTSPMountPoints *mounts;
GstRTSPSessionPool *pool;
mounts = gst_rtsp_server_get_mount_points (server);
gst_rtsp_mount_points_remove_factory (mounts, "/ds-test");
g_object_unref (mounts);
gst_rtsp_server_client_filter (server, client_filter, NULL);
pool = gst_rtsp_server_get_session_pool (server);
gst_rtsp_session_pool_cleanup (pool);
g_object_unref (pool);
}
int main(int argc, char *argv[])
{
GMainLoop *loop = NULL;
GstElement *pipeline = NULL, *streammux = NULL, *nvvidconv = NULL, *nvosd = NULL,
*sink, *nvvidconv1, *capfilt= NULL, *encoder = NULL, *parse = NULL,
*rtppay = NULL, *tiler = NULL;
GstPad *osd_sink_pad = NULL;
GstCapsFeatures *feature = NULL;
GstCaps *caps = NULL;
GstBus *bus = NULL;
guint bus_watch_id, num_sources=0;
gboolean yaml_config = FALSE;
int current_device = -1;
cudaGetDevice(¤t_device);
struct cudaDeviceProp prop;
cudaGetDeviceProperties(&prop, current_device);
if (argc != 2) {
g_printerr("Usage: %s <RTSP URL>\n or YAML FILE", argv[0]);
return -1;
}
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
pipeline = gst_pipeline_new("deepstream-pipeline");
streammux = gst_element_factory_make("nvstreammux", "stream-muxer");
if (!pipeline || !streammux)
{
g_printerr("One element could not be created. Exiting.\n");
return -1;
}
gst_bin_add(GST_BIN(pipeline), streammux);
tiler = gst_element_factory_make("nvmultistreamtiler", "nvtiler");
nvvidconv = gst_element_factory_make("nvvideoconvert", "nvvideo-converter");
nvosd = gst_element_factory_make("nvdsosd", "nv-onscreendisplay");
// gst_bin_add(GST_BIN(pipeline), streammux);
// gst_bin_add(GST_BIN(pipeline), nvvidconv);
// gst_bin_add(GST_BIN(pipeline), nvosd);
// gst_bin_add(GST_BIN(pipeline), sink);
/* Parse inference plugin type */
yaml_config = (g_str_has_suffix(argv[1], ".yml") ||
g_str_has_suffix(argv[1], ".yaml"));
/*Define number of sources*/
GList *src_list = NULL;
if (yaml_config)
{
RETURN_ON_PARSER_ERROR(nvds_parse_source_list(&src_list, argv[1], "source-list"));
GList *temp = src_list;
while (temp)
{
num_sources++;
temp = temp->next;
}
g_list_free(temp);
}
else
{
num_sources = argc - 1;
}
for (guint i = 0; i < num_sources; i++)
{
GstPad *sinkpad, *srcpad;
gchar pad_name[16] = {};
GstElement *source_bin = NULL;
if (g_str_has_suffix(argv[1], ".yml") || g_str_has_suffix(argv[1], ".yaml"))
{
g_print("Now playing : %s\n", (char *)(src_list)->data);
source_bin = create_source_bin(i, (char *)(src_list)->data);
}
else
{
source_bin = create_source_bin(i, argv[i + 1]);
}
if (!source_bin)
{
g_printerr("Failed to create source bin. Exiting.\n");
return -1;
}
gst_bin_add(GST_BIN(pipeline), source_bin);
g_snprintf(pad_name, 15, "sink_%u", i);
sinkpad = gst_element_get_request_pad(streammux, pad_name);
if (!sinkpad)
{
g_printerr("Streammux request sink pad failed. Exiting.\n");
return -1;
}
srcpad = gst_element_get_static_pad(source_bin, "src");
if (!srcpad)
{
g_printerr("Failed to get src pad of source bin. Exiting.\n");
return -1;
}
if (gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK)
{
g_printerr("Failed to link source bin to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref(srcpad);
gst_object_unref(sinkpad);
if (yaml_config)
{
src_list = src_list->next;
}
}
// Set properties for streammux
g_object_set(G_OBJECT(streammux), "batch-size", 1, NULL);
g_object_set(G_OBJECT(streammux), "width", MUXER_OUTPUT_WIDTH, "height", MUXER_OUTPUT_HEIGHT, "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
// Set tiler properties
g_object_set(G_OBJECT(tiler), "rows", 5, "columns", 5, "width", MUXER_OUTPUT_WIDTH, "height", MUXER_OUTPUT_HEIGHT, NULL);
// Link elements
if(rtsp_stream == 0)
{
sink = gst_element_factory_make("nveglglessink", "nvvideo-renderer");
if (!streammux || !nvvidconv || !nvosd || !sink)
{
g_printerr("One element could not be created. Exiting.\n");
return -1;
}
}
if (rtsp_stream == 1)
{
nvvidconv1 = gst_element_factory_make("nvvideoconvert", "nvvidconv1");
capfilt = gst_element_factory_make("capsfilter", "nvvideo-caps");
caps = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "I420", NULL);
feature = gst_caps_features_new("memory:NVMM", NULL);
gst_caps_set_features(caps, 0, feature);
g_object_set(G_OBJECT(capfilt), "caps", caps, NULL);
if (isH264)
encoder = gst_element_factory_make("nvv4l2h264enc", "nvvideo-h264enc");
else
encoder = gst_element_factory_make("nvv4l2h265enc", "nvvideo-h265enc");
/*parser*/
parse = gst_element_factory_make ("h264parse", "parse");
rtppay = gst_element_factory_make ("rtph264pay", "rtppay");
/*udp sink*/
sink = gst_element_factory_make ("udpsink", "sink");
g_object_set (G_OBJECT (sink), "host", "127.0.0.1", "port",
udp_port, "async", FALSE, "sync", 1, NULL);
if (!streammux || !nvvidconv || !nvosd ||
!nvvidconv1 || !capfilt || !caps || !encoder || !parse || !rtppay || !sink)
{
g_printerr("One element could not be created. Exiting.\n");
return -1;
}
}
if ((rtsp_stream == 1)) //not using LLM
{
gst_bin_add_many(GST_BIN(pipeline), nvvidconv, tiler,
nvosd, nvvidconv1, capfilt, encoder, parse, rtppay, sink, NULL);
/* we link the elements together
* nvstreammux -> nvinfer -> nvdslogger -> nvtiler -> nvvidconv -> nvosd
* -> video-renderer */
if (!gst_element_link_many(streammux, nvvidconv, tiler,
nvosd, nvvidconv1, capfilt, encoder, parse, rtppay, sink, NULL))
{
g_printerr("Elements could not be linked. Exiting.\n");
return -1;
}
}
if ((rtsp_stream == 0))
{
gst_bin_add_many(GST_BIN(pipeline), streammux, nvvidconv, nvosd, sink, NULL);
/* we link the elements together
* nvstreammux -> nvinfer -> nvdslogger -> nvtiler -> nvvidconv -> nvosd
* -> video-renderer */
if (!gst_element_link_many(streammux, nvvidconv, nvosd, sink, NULL))
{
g_printerr("Elements could not be linked. Exiting.\n");
return -1;
}
}
/* Lets add probe to get informed of the meta data generated, we add probe to
* the sink pad of the osd element, since by that time, the buffer would have
* had got all the metadata. */
osd_sink_pad = gst_element_get_static_pad (nvosd, "sink");
if (!osd_sink_pad)
g_print ("Unable to get sink pad\n");
else
gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
osd_sink_pad_buffer_probe, NULL, NULL);
gst_object_unref (osd_sink_pad);
// Add message handler
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
g_print("Using file: %s\n", argv[1]);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
if(rtsp_stream == 1)
{
start_rtsp_streaming (port/*rtsp_port*/, udp_port, 0, cam_area);
}
g_print("Running...\n");
g_main_loop_run(loop);
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
g_source_remove(bus_watch_id);
g_main_loop_unref(loop);
return 0;
}
Here’s the list of sources:
source-list:
list: rtmp://13.235.73.211/live/stream1;
Any help is highly appreciated. Regards!