Multiple output streams in RTSP

Please provide complete information as applicable to your setup.

• Hardware Platform (Jetson / GPU): 4070 ti
• DeepStream Version: 7.1
• TensorRT Version: 10.3.0.26
• NVIDIA GPU Driver Version (valid for GPU only): 560.35.05
• Issue Type( questions, new requirements, bugs): Question

Hi @junshengy ,
I have a code written to publish the tiled output as rtsp stream. However I need individual rtsp output for each individual rtsp input. Any insights on this highly appreciated. I’m attaching the code below:

#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <math.h>
#include <string.h>
#include <iostream>
#include <typeinfo>
#include <fstream>
#include <sys/time.h>
#include <cuda_runtime_api.h>
#include "gstnvdsmeta.h"
#include "nvbufsurface.h"
#include "nvds_yml_parser.h"
#include "gst-nvmessage.h"
#include <vector>
#include <list>
#include <iterator>
#include <unordered_map>
#include <limits>
#include <cmath>
#include <algorithm>
#include <gst/rtsp-server/rtsp-server.h>

#define MUXER_OUTPUT_WIDTH 1920
#define MUXER_OUTPUT_HEIGHT 1080
#define MUXER_BATCH_TIMEOUT_USEC 40000
#define GST_CAPS_FEATURES_NVMM "memory:NVMM"

/* Check for parsing error. */
#define RETURN_ON_PARSER_ERROR(parse_expr)                    \
    if (NVDS_YAML_PARSER_SUCCESS != parse_expr)               \
    {                                                         \
        g_printerr("Error in parsing configuration file.\n"); \
        return -1;                                            \
    }

using namespace std;

int rtsp_stream = 1;
int udp_port = 8554;
int port = 8554;
string cam_area = "1";
bool isH264 = true;

static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
    GMainLoop *loop = (GMainLoop *)data;
    switch (GST_MESSAGE_TYPE(msg)) {
    case GST_MESSAGE_EOS:
        g_print("End of stream\n");
        g_main_loop_quit(loop);
        break;
    case GST_MESSAGE_ERROR: {
        gchar *debug;
        GError *error;
        gst_message_parse_error(msg, &error, &debug);
        g_printerr("ERROR from element %s: %s\n", GST_OBJECT_NAME(msg->src), error->message);
        if (debug)
            g_printerr("Error details: %s\n", debug);
        g_free(debug);
        g_error_free(error);
        g_main_loop_quit(loop);
        break;
    }
    default:
        break;
    }
    return TRUE;
}

static void cb_newpad(GstElement *decodebin, GstPad *decoder_src_pad, gpointer data)
{
    GstCaps *caps = gst_pad_get_current_caps(decoder_src_pad);
    if (!caps) {
        caps = gst_pad_query_caps(decoder_src_pad, NULL);
    }
    const GstStructure *str = gst_caps_get_structure(caps, 0);
    const gchar *name = gst_structure_get_name(str);
    GstElement *source_bin = (GstElement *)data;
    GstCapsFeatures *features = gst_caps_get_features(caps, 0);

    if (!strncmp(name, "video", 5)) {
        if (gst_caps_features_contains(features, GST_CAPS_FEATURES_NVMM)) {
            GstPad *bin_ghost_pad = gst_element_get_static_pad(source_bin, "src");
            if (!gst_ghost_pad_set_target(GST_GHOST_PAD(bin_ghost_pad), decoder_src_pad)) {
                g_printerr("Failed to link decoder src pad to source bin ghost pad\n");
            }
            gst_object_unref(bin_ghost_pad);
        } else {
            g_printerr("Error: Decodebin did not pick NVIDIA decoder plugin.\n");
        }
    }
}

static void decodebin_child_added(GstChildProxy *child_proxy, GObject *object, gchar *name, gpointer user_data)
{
    g_print("Decodebin child added: %s\n", name);
    if (g_strrstr(name, "decodebin") == name) {
        g_signal_connect(G_OBJECT(object), "child-added", G_CALLBACK(decodebin_child_added), user_data);
    }
    if (g_strrstr(name, "source") == name) {
        g_object_set(G_OBJECT(object), "drop-on-latency", TRUE, NULL);
    }
}

static GstElement *create_source_bin(guint index, gchar *uri)
{
    GstElement *bin = NULL, *uri_decode_bin = NULL;
    gchar bin_name[16] = {};

    g_snprintf(bin_name, 15, "source-bin-%02d", index);
    bin = gst_bin_new(bin_name);

    uri_decode_bin = gst_element_factory_make("uridecodebin", "uri-decode-bin");

    if (!bin || !uri_decode_bin) {
        g_printerr("One element in source bin could not be created.\n");
        return NULL;
    }

    g_object_set(G_OBJECT(uri_decode_bin), "uri", uri, NULL);

    g_signal_connect(G_OBJECT(uri_decode_bin), "pad-added", G_CALLBACK(cb_newpad), bin);
    g_signal_connect(G_OBJECT(uri_decode_bin), "child-added", G_CALLBACK(decodebin_child_added), bin);

    gst_bin_add(GST_BIN(bin), uri_decode_bin);

    if (!gst_element_add_pad(bin, gst_ghost_pad_new_no_target("src", GST_PAD_SRC))) {
        g_printerr("Failed to add ghost pad in source bin\n");
        return NULL;
    }

    return bin;
}

/* osd_sink_pad_buffer_probe  will extract metadata received from OSD
 * and update params for drawing rectangle, object information etc. */
static GstPadProbeReturn
osd_sink_pad_buffer_probe(GstPad *pad, GstPadProbeInfo *info,
                          gpointer u_data)
{
  GstBuffer *buf = (GstBuffer *)info->data;
  guint num_rects = 0;
  NvDsObjectMeta *obj_meta = NULL;
  NvDsMetaList *l_frame = NULL;
  NvDsMetaList *l_obj = NULL;
  NvDsDisplayMeta *display_meta = NULL;
  gboolean is_first_object = TRUE;
  NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta(buf);

  for (l_frame = batch_meta->frame_meta_list; l_frame != NULL;
       l_frame = l_frame->next)
  {
    is_first_object = TRUE;
    NvDsFrameMeta *frame_meta = (NvDsFrameMeta *)(l_frame->data);
    
    display_meta = nvds_acquire_display_meta_from_pool(batch_meta);

    NvOSD_LineParams *line_params = &display_meta->line_params[0];
    line_params->x1 = 0;
    line_params->y1 = 10;
    line_params->x2 = 500;
    line_params->y2 = 500;
    line_params->line_width = 6;
    line_params->line_color.red = 0.0;
    line_params->line_color.green = 0.0;
    line_params->line_color.blue = 1.0;
    line_params->line_color.alpha = 1.0;

    display_meta->num_lines = 1;
    nvds_add_display_meta_to_frame(frame_meta, display_meta);
  }
  
  return GST_PAD_PROBE_OK;
}

static GstRTSPServer *server;
static gboolean
start_rtsp_streaming (guint rtsp_port_num, guint updsink_port_num,
                      guint64 udp_buffer_size, string name)
{
    GstRTSPMountPoints *mounts;
    GstRTSPMediaFactory *factory;
    char udpsrc_pipeline[512];

    char port_num_Str[64] = { 0 };
    char *encoder_name;

    if (udp_buffer_size == 0)
        udp_buffer_size = 512 * 1024;

    sprintf (udpsrc_pipeline,
             "( udpsrc name=pay0 port=%d buffer-size=%lu caps=\"application/x-rtp, media=video, "
             "clock-rate=90000, encoding-name=H264, payload=96 \" )",
             updsink_port_num, udp_buffer_size);

    sprintf (port_num_Str, "%d", rtsp_port_num);

    server = gst_rtsp_server_new ();
    g_object_set (server, "service", port_num_Str, NULL);

    mounts = gst_rtsp_server_get_mount_points (server);

    factory = gst_rtsp_media_factory_new ();
    gst_rtsp_media_factory_set_launch (factory, udpsrc_pipeline);
    string final_name = "/ds-test"+ name;
    gst_rtsp_mount_points_add_factory (mounts, final_name.c_str() , factory);

    g_object_unref (mounts);

    gst_rtsp_server_attach (server, NULL);

    g_print
            ("\n *** DeepStream: Launched RTSP Streaming at rtsp://localhost:%d%s ***\n\n",
             rtsp_port_num, final_name.c_str());

    return TRUE;
}

static GstRTSPFilterResult
client_filter (GstRTSPServer * server, GstRTSPClient * client,
               gpointer user_data)
{
    return GST_RTSP_FILTER_REMOVE;
}

static void
destroy_sink_bin ()
{
    GstRTSPMountPoints *mounts;
    GstRTSPSessionPool *pool;

    mounts = gst_rtsp_server_get_mount_points (server);
    gst_rtsp_mount_points_remove_factory (mounts, "/ds-test");
    g_object_unref (mounts);
    gst_rtsp_server_client_filter (server, client_filter, NULL);
    pool = gst_rtsp_server_get_session_pool (server);
    gst_rtsp_session_pool_cleanup (pool);
    g_object_unref (pool);
}

int main(int argc, char *argv[])
{
    GMainLoop *loop = NULL;
    GstElement *pipeline = NULL, *streammux = NULL, *nvvidconv = NULL, *nvosd = NULL, 
               *sink, *nvvidconv1, *capfilt= NULL, *encoder = NULL, *parse = NULL,
               *rtppay = NULL, *tiler = NULL;
    GstPad *osd_sink_pad = NULL;
    GstCapsFeatures *feature = NULL;
    GstCaps *caps = NULL;
    GstBus *bus = NULL;
    guint bus_watch_id, num_sources=0;
    gboolean yaml_config = FALSE;

    int current_device = -1;
    cudaGetDevice(&current_device);
    struct cudaDeviceProp prop;
    cudaGetDeviceProperties(&prop, current_device);

    if (argc != 2) {
        g_printerr("Usage: %s <RTSP URL>\n or YAML FILE", argv[0]);
        return -1;
    }

    gst_init(&argc, &argv);
    loop = g_main_loop_new(NULL, FALSE);

    pipeline = gst_pipeline_new("deepstream-pipeline");

    streammux = gst_element_factory_make("nvstreammux", "stream-muxer");
    if (!pipeline || !streammux)
    {
        g_printerr("One element could not be created. Exiting.\n");
        return -1;
    }
    gst_bin_add(GST_BIN(pipeline), streammux);

    tiler = gst_element_factory_make("nvmultistreamtiler", "nvtiler");
    nvvidconv = gst_element_factory_make("nvvideoconvert", "nvvideo-converter");
    nvosd = gst_element_factory_make("nvdsosd", "nv-onscreendisplay");

    // gst_bin_add(GST_BIN(pipeline), streammux);
    // gst_bin_add(GST_BIN(pipeline), nvvidconv);
    // gst_bin_add(GST_BIN(pipeline), nvosd);
    // gst_bin_add(GST_BIN(pipeline), sink);

    /* Parse inference plugin type */
    yaml_config = (g_str_has_suffix(argv[1], ".yml") ||
                   g_str_has_suffix(argv[1], ".yaml"));

    /*Define number of sources*/
    GList *src_list = NULL;
    if (yaml_config)
    {

        RETURN_ON_PARSER_ERROR(nvds_parse_source_list(&src_list, argv[1], "source-list"));

        GList *temp = src_list;
        while (temp)
        {
            num_sources++;
            temp = temp->next;
        }
        g_list_free(temp);
    }
    else
    {
        num_sources = argc - 1;
    }

    for (guint i = 0; i < num_sources; i++)
    {
        GstPad *sinkpad, *srcpad;
        gchar pad_name[16] = {};

        GstElement *source_bin = NULL;
        if (g_str_has_suffix(argv[1], ".yml") || g_str_has_suffix(argv[1], ".yaml"))
        {
            g_print("Now playing : %s\n", (char *)(src_list)->data);
            source_bin = create_source_bin(i, (char *)(src_list)->data);
        }
        else
        {
            source_bin = create_source_bin(i, argv[i + 1]);
        }
        if (!source_bin)
        {
            g_printerr("Failed to create source bin. Exiting.\n");
            return -1;
        }

        gst_bin_add(GST_BIN(pipeline), source_bin);

        g_snprintf(pad_name, 15, "sink_%u", i);
        sinkpad = gst_element_get_request_pad(streammux, pad_name);
        if (!sinkpad)
        {
            g_printerr("Streammux request sink pad failed. Exiting.\n");
            return -1;
        }

        srcpad = gst_element_get_static_pad(source_bin, "src");
        if (!srcpad)
        {
            g_printerr("Failed to get src pad of source bin. Exiting.\n");
            return -1;
        }

        if (gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK)
        {
            g_printerr("Failed to link source bin to stream muxer. Exiting.\n");
            return -1;
        }

        gst_object_unref(srcpad);
        gst_object_unref(sinkpad);

        if (yaml_config)
        {
            src_list = src_list->next;
        }
    }

    // Set properties for streammux
    g_object_set(G_OBJECT(streammux), "batch-size", 1, NULL);
    g_object_set(G_OBJECT(streammux), "width", MUXER_OUTPUT_WIDTH, "height", MUXER_OUTPUT_HEIGHT, "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);

    // Set tiler properties
    g_object_set(G_OBJECT(tiler), "rows", 5, "columns", 5, "width", MUXER_OUTPUT_WIDTH, "height", MUXER_OUTPUT_HEIGHT, NULL);

    // Link elements
    if(rtsp_stream == 0)
    {
        sink = gst_element_factory_make("nveglglessink", "nvvideo-renderer");
        if (!streammux || !nvvidconv || !nvosd || !sink)
        {
            g_printerr("One element could not be created. Exiting.\n");
            return -1;
        }
    }
    
    if (rtsp_stream == 1)
    {
        nvvidconv1 = gst_element_factory_make("nvvideoconvert", "nvvidconv1");

        capfilt = gst_element_factory_make("capsfilter", "nvvideo-caps");
        caps = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "I420", NULL);
        feature = gst_caps_features_new("memory:NVMM", NULL);
        gst_caps_set_features(caps, 0, feature);
        g_object_set(G_OBJECT(capfilt), "caps", caps, NULL);

        if (isH264)
            encoder = gst_element_factory_make("nvv4l2h264enc", "nvvideo-h264enc");
        else
            encoder = gst_element_factory_make("nvv4l2h265enc", "nvvideo-h265enc");

        /*parser*/
        parse = gst_element_factory_make ("h264parse", "parse");

        rtppay = gst_element_factory_make ("rtph264pay", "rtppay");
        /*udp sink*/
        sink = gst_element_factory_make ("udpsink", "sink");
        g_object_set (G_OBJECT (sink), "host", "127.0.0.1", "port",
                  udp_port, "async", FALSE, "sync", 1, NULL);

        if (!streammux || !nvvidconv || !nvosd ||
            !nvvidconv1 || !capfilt || !caps || !encoder || !parse || !rtppay || !sink)
        {
            g_printerr("One element could not be created. Exiting.\n");
            return -1;
        }
    }

    if ((rtsp_stream == 1)) //not using LLM 
    {
        gst_bin_add_many(GST_BIN(pipeline), nvvidconv, tiler, 
        nvosd, nvvidconv1, capfilt, encoder, parse, rtppay, sink, NULL);
        /* we link the elements together
        * nvstreammux -> nvinfer -> nvdslogger -> nvtiler -> nvvidconv -> nvosd
        * -> video-renderer */
        if (!gst_element_link_many(streammux, nvvidconv, tiler, 
            nvosd, nvvidconv1, capfilt, encoder, parse, rtppay, sink, NULL))
        {
        g_printerr("Elements could not be linked. Exiting.\n");
        return -1;
        }
    }
    if ((rtsp_stream == 0))
    {
        gst_bin_add_many(GST_BIN(pipeline), streammux, nvvidconv, nvosd, sink, NULL);
        /* we link the elements together
         * nvstreammux -> nvinfer -> nvdslogger -> nvtiler -> nvvidconv -> nvosd
         * -> video-renderer */
        if (!gst_element_link_many(streammux, nvvidconv, nvosd, sink, NULL))
        {
            g_printerr("Elements could not be linked. Exiting.\n");
            return -1;
        } 
    }

    /* Lets add probe to get informed of the meta data generated, we add probe to
   * the sink pad of the osd element, since by that time, the buffer would have
   * had got all the metadata. */
    osd_sink_pad = gst_element_get_static_pad (nvosd, "sink");
    if (!osd_sink_pad)
        g_print ("Unable to get sink pad\n");
    else
        gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
            osd_sink_pad_buffer_probe, NULL, NULL);
    gst_object_unref (osd_sink_pad);

    // Add message handler
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
    gst_object_unref(bus);

    g_print("Using file: %s\n", argv[1]);
    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    if(rtsp_stream == 1)
    {
        start_rtsp_streaming (port/*rtsp_port*/, udp_port, 0, cam_area);
    }

    g_print("Running...\n");
    g_main_loop_run(loop);

    g_print("Returned, stopping playback\n");
    gst_element_set_state(pipeline, GST_STATE_NULL);
    g_print("Deleting pipeline\n");
    gst_object_unref(GST_OBJECT(pipeline));
    g_source_remove(bus_watch_id);
    g_main_loop_unref(loop);
    return 0;
}

Here’s the list of sources:

source-list:
  list: rtmp://13.235.73.211/live/stream1;

Any help is highly appreciated. Regards!

This does not seem to be possible. Tiled will flatten multiple streams into one stream, so your multiple inputs have been merged into one.

If you want multiple inputs to be multiple outputs after inference, please refer to

You can use nvstreamdemux/tiler at the same time, such as

nvinfer --> tee  |--> tiler --> sink
                 |
                 |  --> nvstreamdemux | --> rtsp sink 1
                                      | --> rtsp sink 2

Hi @junshengy . Thanks again for your valuable time. I have decided to not include tee and make a single flow. Here is the the code:

int main(int argc, char *argv[])
{
    GMainLoop *loop = NULL;
    GstElement *pipeline = NULL, *streammux = NULL, 
               *nvvidconv1 = NULL, *nvvidconv2 = NULL, *nvvidconv3 = NULL,
               *nvosd_1 = NULL, *nvosd_2 = NULL, *nvosd_3 = NULL, 
               *sink_1 = NULL, *sink_2 = NULL, *sink_3 = NULL,
               *nvvidconv_post1 = NULL, *nvvidconv_post2 = NULL, *nvvidconv_post3 = NULL,
               *capfilt_1 = NULL, *capfilt_2 = NULL, *capfilt_3 = NULL,
               *encoder_1 = NULL, *encoder_2 = NULL, *encoder_3 = NULL,
               *parse_1 = NULL, *parse_2 = NULL, *parse_3 = NULL,
               *rtppay_1 = NULL, *rtppay_2 = NULL, *rtppay_3 = NULL;
    GstPad *osd_sink_pad = NULL;
    GstCapsFeatures *feature1 = NULL, *feature2 = NULL, *feature3 = NULL;
    GstCaps * caps = NULL, *caps1 = NULL, *caps2 = NULL, *caps3 = NULL;
    GstBus *bus = NULL;
    guint bus_watch_id, num_sources=0;
    gboolean yaml_config = FALSE;

    int current_device = -1;
    cudaGetDevice(&current_device);
    struct cudaDeviceProp prop;
    cudaGetDeviceProperties(&prop, current_device);

    if (argc != 2) {
        g_printerr("Usage: %s <RTSP URL>\n or YAML FILE", argv[0]);
        return -1;
    }

    gst_init(&argc, &argv);
    loop = g_main_loop_new(NULL, FALSE);

    pipeline = gst_pipeline_new("deepstream-pipeline");

    streammux = gst_element_factory_make("nvstreammux", "stream-muxer");
    if (!pipeline || !streammux)
    {
        g_printerr("One element could not be created. Exiting.\n");
        return -1;
    }
    gst_bin_add(GST_BIN(pipeline), streammux);

    // tiler = gst_element_factory_make("nvmultistreamtiler", "nvtiler");
    // nvvidconv = gst_element_factory_make("nvvideoconvert", "nvvideo-converter");
    // nvosd = gst_element_factory_make("nvdsosd", "nv-onscreendisplay");

    // gst_bin_add(GST_BIN(pipeline), streammux);
    // gst_bin_add(GST_BIN(pipeline), nvvidconv);
    // gst_bin_add(GST_BIN(pipeline), nvosd);
    // gst_bin_add(GST_BIN(pipeline), sink);

    /* Parse inference plugin type */
    yaml_config = (g_str_has_suffix(argv[1], ".yml") ||
                   g_str_has_suffix(argv[1], ".yaml"));

    /*Define number of sources*/
    GList *src_list = NULL;
    if (yaml_config)
    {

        RETURN_ON_PARSER_ERROR(nvds_parse_source_list(&src_list, argv[1], "source-list"));

        GList *temp = src_list;
        while (temp)
        {
            num_sources++;
            temp = temp->next;
        }
        g_list_free(temp);
    }
    else
    {
        num_sources = argc - 1;
    }

    for (guint i = 0; i < num_sources; i++)
    {
        GstPad *sinkpad, *srcpad;
        gchar pad_name[16] = {};

        GstElement *source_bin = NULL;
        if (g_str_has_suffix(argv[1], ".yml") || g_str_has_suffix(argv[1], ".yaml"))
        {
            g_print("Now playing : %s\n", (char *)(src_list)->data);
            source_bin = create_source_bin(i, (char *)(src_list)->data);
        }
        else
        {
            source_bin = create_source_bin(i, argv[i + 1]);
        }
        if (!source_bin)
        {
            g_printerr("Failed to create source bin. Exiting.\n");
            return -1;
        }

        gst_bin_add(GST_BIN(pipeline), source_bin);

        g_snprintf(pad_name, 15, "sink_%u", i);
        sinkpad = gst_element_get_request_pad(streammux, pad_name);
        if (!sinkpad)
        {
            g_printerr("Streammux request sink pad failed. Exiting.\n");
            return -1;
        }

        srcpad = gst_element_get_static_pad(source_bin, "src");
        if (!srcpad)
        {
            g_printerr("Failed to get src pad of source bin. Exiting.\n");
            return -1;
        }

        if (gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK)
        {
            g_printerr("Failed to link source bin to stream muxer. Exiting.\n");
            return -1;
        }

        gst_object_unref(srcpad);
        gst_object_unref(sinkpad);

        if (yaml_config)
        {
            src_list = src_list->next;
        }
    }

    // Set properties for streammux
    g_object_set(G_OBJECT(streammux), "batch-size", 1, NULL);
    g_object_set(G_OBJECT(streammux), "width", MUXER_OUTPUT_WIDTH, "height", MUXER_OUTPUT_HEIGHT, "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);

    // Set tiler properties
    // g_object_set(G_OBJECT(tiler), "rows", 5, "columns", 5, "width", MUXER_OUTPUT_WIDTH, "height", MUXER_OUTPUT_HEIGHT, NULL);

    // nvstreamdemux = gst_element_factory_make("nvstreamdemux", "nvstreamdemux")

    // Link elements
    if(rtsp_stream == 0)
    {
    	nvvidconv1 = gst_element_factory_make("nvvideoconvert", "nvvidconv1");
        nvosd_1 = gst_element_factory_make("nvdsosd", "nv-onscreendisplay");
        sink_1 = gst_element_factory_make("nveglglessink", "nvvideo-renderer");
        if (!streammux || !nvvidconv1 || !nvosd_1 || !sink_1)
        {
            g_printerr("One element could not be created. Exiting.\n");
            return -1;
        }
    }
    
    if (rtsp_stream == 1)
    {
        nvosd_1 = gst_element_factory_make("nvdsosd", "nv-onscreendisplay1");
        nvosd_2 = gst_element_factory_make("nvdsosd", "nv-onscreendisplay2");
        nvosd_3 = gst_element_factory_make("nvdsosd", "nv-onscreendisplay3");

        nvvidconv_post1 = gst_element_factory_make("nvvideoconvert", "nvvidconvpost1");
        nvvidconv_post2 = gst_element_factory_make("nvvideoconvert", "nvvidconvpost2");
        nvvidconv_post3 = gst_element_factory_make("nvvideoconvert", "nvvidconvpost3");

        nvvidconv1 = gst_element_factory_make("nvvideoconvert", "nvvidconv1");
        nvvidconv2 = gst_element_factory_make("nvvideoconvert", "nvvidconv2");
        nvvidconv3 = gst_element_factory_make("nvvideoconvert", "nvvidconv3");

        // for (int i = 0; i < 3; ++i) {
        //     std::string name = "nvvidconv" + std::to_string(i + 1);
        //     nvvidconv[i] = gst_element_factory_make("nvvidconv", name.c_str());
        //     if (!nvvidconv[i]) {
        //         g_printerr("Failed to create nvvidconv element: %s\n", name.c_str());
        //     }
        // }        

        capfilt_1 = gst_element_factory_make("capsfilter", "nvvideo-caps1");
        caps1 = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "I420", NULL);
        feature1 = gst_caps_features_new("memory:NVMM", NULL);
        gst_caps_set_features(caps1, 0, feature1);
        g_object_set(G_OBJECT(capfilt_1), "caps", caps1, NULL);

        capfilt_2 = gst_element_factory_make("capsfilter", "nvvideo-caps2");
        caps2 = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "I420", NULL);
        feature2 = gst_caps_features_new("memory:NVMM", NULL);
        gst_caps_set_features(caps2, 0, feature2);
        g_object_set(G_OBJECT(capfilt_2), "caps", caps2, NULL);

        capfilt_3 = gst_element_factory_make("capsfilter", "nvvideo-caps3");
        caps3 = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "I420", NULL);
        feature3 = gst_caps_features_new("memory:NVMM", NULL);
        gst_caps_set_features(caps3, 0, feature3);
        g_object_set(G_OBJECT(capfilt_3), "caps", caps3, NULL);

        if (isH264)
        {
            encoder_1 = gst_element_factory_make("nvv4l2h264enc", "nvvideo-h264enc1");
            encoder_2 = gst_element_factory_make("nvv4l2h264enc", "nvvideo-h264enc2");
            encoder_3 = gst_element_factory_make("nvv4l2h264enc", "nvvideo-h264enc3");
        }
        else
        {
            encoder_1 = gst_element_factory_make("nvv4l2h265enc", "nvvideo-h265enc1");
            encoder_2 = gst_element_factory_make("nvv4l2h265enc", "nvvideo-h265enc2");
            encoder_3 = gst_element_factory_make("nvv4l2h265enc", "nvvideo-h265enc3");
        }

        /*parser*/
        parse_1 = gst_element_factory_make ("h264parse", "parse1");
        parse_2 = gst_element_factory_make ("h264parse", "parse2");
        parse_3 = gst_element_factory_make ("h264parse", "parse3");

        rtppay_1 = gst_element_factory_make ("rtph264pay", "rtppay1");
        rtppay_2 = gst_element_factory_make ("rtph264pay", "rtppay2");
        rtppay_3 = gst_element_factory_make ("rtph264pay", "rtppay3");

        /*udp sink*/
        sink_1 = gst_element_factory_make("udpsink", "sink1");
        g_object_set(G_OBJECT(sink_1), "host", "127.0.0.1", "port",
                     udp_port1, "async", FALSE, "sync", 1, NULL);

        sink_2 = gst_element_factory_make("udpsink", "sink2");
        g_object_set(G_OBJECT(sink_2), "host", "127.0.0.1", "port",
                     udp_port2, "async", FALSE, "sync", 1, NULL);

        sink_3 = gst_element_factory_make("udpsink", "sink3");
        g_object_set(G_OBJECT(sink_3), "host", "127.0.0.1", "port",
                     udp_port3, "async", FALSE, "sync", 1, NULL);

        if (!streammux || !nvvidconv1 || !nvosd_1 ||
            !nvvidconv_post1 || !capfilt_1 || !caps1 || !encoder_1 || !parse_1 || !rtppay_1 || !sink_1)
        {
            g_printerr("One element could not be created. Exiting.\n");
            return -1;
        }

        gst_bin_add_many(GST_BIN(pipeline), nvvidconv1, nvvidconv2, nvvidconv3,
        nvosd_1, nvosd_2, nvosd_3, nvvidconv_post1, nvvidconv_post2, nvvidconv_post3,
        capfilt_1, capfilt_2, capfilt_3, encoder_1, encoder_2, encoder_3,
        parse_1, parse_2, parse_3, rtppay_1, rtppay_2, rtppay_3,
        sink_1, sink_2, sink_3, NULL);
    }

    GstElement* nvvidconv[3] = {nvvidconv1, nvvidconv2, nvvidconv3};
    GstElement* nvosd[3] = {nvosd_1, nvosd_2, nvosd_3};
    GstElement* nvvidconv_post[3] = {nvvidconv_post1, nvvidconv_post2, nvvidconv_post3};
    GstElement* capfilt[3] = {capfilt_1, capfilt_2, capfilt_3};
    GstElement* encoder[3] = {encoder_1, encoder_2, encoder_3};
    GstElement* parse[3] = {parse_1, parse_2, parse_3};
    GstElement* rtppay[3] = {rtppay_1, rtppay_2, rtppay_3};
    GstElement* sink[3] = {sink_1, sink_2, sink_3};

    if ((rtsp_stream == 1)) // not using LLM
    {
        // if(!gst_element_link())
    
        for (int i = 0; i < 2; i++)
        {
            /* we link the elements together
             * nvstreammux ->nvvidconv -> nvosd
             * -> video-renderer */
            cout << i << endl;
            if (!gst_element_link_many(streammux, nvvidconv[i],
                                       nvosd[i], nvvidconv_post[i], capfilt[i], encoder[i], parse[i], rtppay[i], sink[i], NULL))
            {
                g_printerr("Elements could not be linked. Exiting.\n");
                return -1;
            }
        }
    }
    if ((rtsp_stream == 0))
    {
        // gst_bin_add_many(GST_BIN(pipeline), streammux, nvvidconv, nvosd, sink, NULL);
        // /* we link the elements together
        //  * nvstreammux -> nvinfer -> nvdslogger -> nvtiler -> nvvidconv -> nvosd
        //  * -> video-renderer */
        // if (!gst_element_link_many(streammux, nvvidconv, nvosd, sink, NULL))
        // {
        //     g_printerr("Elements could not be linked. Exiting.\n");
        //     return -1;
        // } 
        ;
    }

    /* Lets add probe to get informed of the meta data generated, we add probe to
   * the sink pad of the osd element, since by that time, the buffer would have
   * had got all the metadata. */
    osd_sink_pad = gst_element_get_static_pad (nvosd[0], "sink");
    if (!osd_sink_pad)
        g_print ("Unable to get sink pad\n");
    else
        gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
            osd_sink_pad_buffer_probe, NULL, NULL);
    gst_object_unref (osd_sink_pad);

    // Add message handler
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
    gst_object_unref(bus);

    g_print("Using file: %s\n", argv[1]);
    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    if(rtsp_stream == 1)
    {
        start_rtsp_streaming (udp_port1/*rtsp_port*/, udp_port1, 0, cam_area);
        start_rtsp_streaming (udp_port2/*rtsp_port*/, udp_port2, 0, "2");
        start_rtsp_streaming (udp_port2/*rtsp_port*/, udp_port2, 0, "3");
    }

    g_print("Running...\n");
    g_main_loop_run(loop);

    g_print("Returned, stopping playback\n");
    gst_element_set_state(pipeline, GST_STATE_NULL);
    g_print("Deleting pipeline\n");
    gst_object_unref(GST_OBJECT(pipeline));
    g_source_remove(bus_watch_id);
    g_main_loop_unref(loop);
    return 0;
}

However it gives me the following error:

Now playing : rtmp://65.2.153.168/live/stream1
Now playing : rtmp://65.2.153.168/live/stream1
Now playing : rtmp://65.2.153.168/live/stream1
0
1
Elements could not be linked. Exiting.

Any help is highly appreciated.

You can refer to the following code to build an rtsp server, then use the gst-launch-1.0 command line to push the udp stream, and then rewrite it into C language code.

#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>

#define NUM_STREAMS 20

int main(int argc, char *argv[]) {
  GMainLoop *loop;
  GstRTSPServer *server;
  GstRTSPMountPoints *mounts;
  GstRTSPMediaFactory *factory;
  gchar *launch_line;
  gchar *mount_point;

  gst_init(&argc, &argv);

  loop = g_main_loop_new(NULL, FALSE);

  server = gst_rtsp_server_new();
  g_object_set(server, "service", "8554", NULL);

  mounts = gst_rtsp_server_get_mount_points(server);

  for (int i = 0; i < NUM_STREAMS; i++) {
    factory = gst_rtsp_media_factory_new();

    launch_line = g_strdup_printf(
        "( videotestsrc pattern=%d ! video/x-raw,width=640,height=480 ! x264enc                                                                                                                                    ! rtph264pay name=pay0 pt=96 )",
        i % 20);
    gst_rtsp_media_factory_set_launch(factory, launch_line);
    g_free(launch_line);

    gst_rtsp_media_factory_set_shared(factory, TRUE);

    mount_point = g_strdup_printf("/stream%d", i);
    gst_rtsp_mount_points_add_factory(mounts, mount_point, factory);
    g_free(mount_point);
  }

  g_object_unref(mounts);

  gst_rtsp_server_attach(server, NULL);

  g_print("RTSP server is running. Streams are available at:\n");
  for (int i = 0; i < NUM_STREAMS; i++) {
    g_print("rtsp://127.0.0.1:8554/stream%d\n", i);
  }

  g_main_loop_run(loop);

  return 0;
}

Thank you. Is it possible to perform Multiple Endpoints, Same Port, like:

RTSP endpoints:

    rtsp://localhost:8554/ds-test0

    rtsp://localhost:8554/ds-test1

    rtsp://localhost:8554/ds-test2

All use the same RTSP server/port (8554).

Each listens to a different UDP source port for incoming RTP.

I tried the below code in the next comment!

@junshengy , Hi, I tried this code:

*Multiple Endpoints, Same Port*/
static gboolean
add_rtsp_stream(guint rtsp_port_num, guint updsink_port_num,
                guint64 udp_buffer_size, const std::string& name, int payname)
{
    GstRTSPMountPoints *mounts;
    GstRTSPMediaFactory *factory;
    char udpsrc_pipeline[512];
    char port_num_Str[64] = { 0 };

    if (udp_buffer_size == 0)
        udp_buffer_size = 512 * 1024;

    // snprintf(udpsrc_pipeline, sizeof(udpsrc_pipeline),
    //          "( udpsrc name=pay%d port=%d buffer-size=%lu caps=\"application/x-rtp, media=video, "
    //          "clock-rate=90000, encoding-name=H264, payload=96\" )",
    //          payname,
    //          updsink_port_num, udp_buffer_size);

    snprintf(udpsrc_pipeline, sizeof(udpsrc_pipeline),
             "( udpsrc port=%d buffer-size=%lu caps=\"application/x-rtp, media=video, "
             "clock-rate=90000, encoding-name=H264, payload=96\" ! "
             "rtpjitterbuffer ! rtph264depay ! h264parse ! rtph264pay name=pay0 pt=96 )",
             updsink_port_num, udp_buffer_size);

    // Create RTSP server only once
    if (!server) {
        snprintf(port_num_Str, sizeof(port_num_Str), "%d", rtsp_port_num);
        server = gst_rtsp_server_new();
        g_object_set(server, "service", port_num_Str, NULL);
    }

    mounts = gst_rtsp_server_get_mount_points(server);
    factory = gst_rtsp_media_factory_new();
    gst_rtsp_media_factory_set_launch(factory, udpsrc_pipeline);
    gst_rtsp_media_factory_set_shared(factory, TRUE);

    string mount_path = "/ds-test" + name;
    gst_rtsp_mount_points_add_factory(mounts, mount_path.c_str(), factory);

    g_object_unref(mounts);

    // Attach only once
    static bool attached = false;
    if (!attached) {
        gst_rtsp_server_attach(server, NULL);
        attached = true;
    }

    g_print("*** DeepStream: Launched RTSP Streaming at rtsp://localhost:%d%s ***\n",
            rtsp_port_num, mount_path.c_str());

    return TRUE;
}

and in the main:

int base_rtsp_port = 8554;
int udp_port1 = 5000;

if (rtsp_stream == 1)
    {
        for (int i = 0; i < num_sources; i++)
        {
            string name = to_string(i);     // e.g., /ds-test0
            guint udp_port = udp_port1 + i; // each endpoint uses different UDP source port
            g_print("udp_port: %d\n", udp_port);
            add_rtsp_stream(base_rtsp_port, udp_port, 0, name,i);
        }
        // start_rtsp_streaming (udp_port1/*rtsp_port*/, udp_port1, 0, cam_area);
    }

Howeve only the 1st stream stream is visible, rest throws error:

method DESCRIBE failed: 503 Service Unavailable
rtsp://192.168.1.31:8554/ds-test1: Server returned 5XX Server Error reply

Any help on this. I performed ss -tunlp and saw that only 5000 port is open rest 5001 and 5002 (udp_port)are not opening. rstp port 8445 is open.

/*
 * SPDX-FileCopyrightText: Copyright (c) 2018-2024 NVIDIA CORPORATION &
 * AFFILIATES. All rights reserved. SPDX-License-Identifier:
 * LicenseRef-NvidiaProprietary
 *
 * NVIDIA CORPORATION, its affiliates and licensors retain all intellectual
 * property and proprietary rights in and to this material, related
 * documentation and any modifications thereto. Any use, reproduction,
 * disclosure or distribution of this material and related documentation
 * without an express license agreement from NVIDIA CORPORATION or
 * its affiliates is strictly prohibited.
 */

#include <cuda_runtime_api.h>
#include <glib.h>
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <math.h>
#include <stdio.h>
#include <string.h>
#include <sys/time.h>

#include "gst-nvmessage.h"
#include "gstnvdsmeta.h"

#define MAX_DISPLAY_LEN 64

#define PGIE_CLASS_ID_VEHICLE 0
#define PGIE_CLASS_ID_PERSON 2

/* By default, OSD process-mode is set to GPU_MODE. To change mode, set as:
 * 0: CPU mode
 * 1: GPU mode
 */
#define OSD_PROCESS_MODE 1

/* By default, OSD will not display text. To display text, change this to 1 */
#define OSD_DISPLAY_TEXT 1

/* The muxer output resolution must be set if the input streams will be of
 * different resolution. The muxer will scale all the input frames to this
 * resolution. */
#define MUXER_OUTPUT_WIDTH 1920
#define MUXER_OUTPUT_HEIGHT 1080

/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
 * based on the fastest source's framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 40000

/* NVIDIA Decoder source pad memory feature. This feature signifies that source
 * pads having this capability will push GstBuffers containing cuda buffers. */
#define GST_CAPS_FEATURES_NVMM "memory:NVMM"

static gboolean PERF_MODE = FALSE;

gchar pgie_classes_str[4][32] = {"Vehicle", "TwoWheeler", "Person", "RoadSign"};

typedef enum NvDsEncoderType {
  NV_DS_ENCODER_H264 = 1,
  NV_DS_ENCODER_H265,
} NvDsEncoderType;

typedef enum NvDsEncMode {
  NV_DS_ENCODER_MODE_HW = 1,
  NV_DS_ENCODER_MODE_SW
} NvDsEncMode;

/* tiler_sink_pad_buffer_probe  will extract metadata received on OSD sink pad
 * and update params for drawing rectangle, object information etc. */

static GstPadProbeReturn tiler_src_pad_buffer_probe(GstPad *pad,
                                                    GstPadProbeInfo *info,
                                                    gpointer u_data) {
  GstBuffer *buf = (GstBuffer *)info->data;
  guint num_rects = 0;
  NvDsObjectMeta *obj_meta = NULL;
  guint vehicle_count = 0;
  guint person_count = 0;
  NvDsMetaList *l_frame = NULL;
  NvDsMetaList *l_obj = NULL;
  // NvDsDisplayMeta *display_meta = NULL;

  NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta(buf);

  for (l_frame = batch_meta->frame_meta_list; l_frame != NULL;
       l_frame = l_frame->next) {
    NvDsFrameMeta *frame_meta = (NvDsFrameMeta *)(l_frame->data);
    // int offset = 0;
    for (l_obj = frame_meta->obj_meta_list; l_obj != NULL;
         l_obj = l_obj->next) {
      obj_meta = (NvDsObjectMeta *)(l_obj->data);
      if (obj_meta->class_id == PGIE_CLASS_ID_VEHICLE) {
        vehicle_count++;
        num_rects++;
      }
      if (obj_meta->class_id == PGIE_CLASS_ID_PERSON) {
        person_count++;
        num_rects++;
      }
    }
    g_print("Frame Number = %d Number of objects = %d "
            "Vehicle Count = %d Person Count = %d\n",
            frame_meta->frame_num, num_rects, vehicle_count, person_count);
  }
  return GST_PAD_PROBE_OK;
}

static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data) {
  GMainLoop *loop = (GMainLoop *)data;
  switch (GST_MESSAGE_TYPE(msg)) {
  case GST_MESSAGE_EOS:
    g_print("End of stream\n");
    g_main_loop_quit(loop);
    break;
  case GST_MESSAGE_WARNING: {
    gchar *debug = NULL;
    GError *error = NULL;
    gst_message_parse_warning(msg, &error, &debug);
    g_printerr("WARNING from element %s: %s\n", GST_OBJECT_NAME(msg->src),
               error->message);
    g_free(debug);
    g_printerr("Warning: %s\n", error->message);
    g_error_free(error);
    break;
  }
  case GST_MESSAGE_ERROR: {
    gchar *debug = NULL;
    GError *error = NULL;
    gst_message_parse_error(msg, &error, &debug);
    g_printerr("ERROR from element %s: %s\n", GST_OBJECT_NAME(msg->src),
               error->message);
    if (debug)
      g_printerr("Error details: %s\n", debug);
    g_free(debug);
    g_error_free(error);
    g_main_loop_quit(loop);
    break;
  }
  case GST_MESSAGE_ELEMENT: {
    if (gst_nvmessage_is_stream_eos(msg)) {
      guint stream_id = 0;
      if (gst_nvmessage_parse_stream_eos(msg, &stream_id)) {
        g_print("Got EOS from stream %d\n", stream_id);
      }
    }
    break;
  }
  default:
    break;
  }
  return TRUE;
}

static void cb_newpad(GstElement *decodebin, GstPad *decoder_src_pad,
                      gpointer data) {
  GstCaps *caps = gst_pad_get_current_caps(decoder_src_pad);
  if (!caps) {
    caps = gst_pad_query_caps(decoder_src_pad, NULL);
  }
  const GstStructure *str = gst_caps_get_structure(caps, 0);
  const gchar *name = gst_structure_get_name(str);
  GstElement *source_bin = (GstElement *)data;
  GstCapsFeatures *features = gst_caps_get_features(caps, 0);

  /* Need to check if the pad created by the decodebin is for video and not
   * audio. */
  if (!strncmp(name, "video", 5)) {
    /* Link the decodebin pad only if decodebin has picked nvidia
     * decoder plugin nvdec_*. We do this by checking if the pad caps contain
     * NVMM memory features. */
    if (gst_caps_features_contains(features, GST_CAPS_FEATURES_NVMM)) {
      /* Get the source bin ghost pad */
      GstPad *bin_ghost_pad = gst_element_get_static_pad(source_bin, "src");
      if (!gst_ghost_pad_set_target(GST_GHOST_PAD(bin_ghost_pad),
                                    decoder_src_pad)) {
        g_printerr("Failed to link decoder src pad to source bin ghost pad\n");
      }
      gst_object_unref(bin_ghost_pad);
    } else {
      g_printerr("Error: Decodebin did not pick nvidia decoder plugin.\n");
    }
  }
}

static void decodebin_child_added(GstChildProxy *child_proxy, GObject *object,
                                  gchar *name, gpointer user_data) {
  g_print("Decodebin child added: %s\n", name);
  if (g_strrstr(name, "decodebin") == name) {
    g_signal_connect(G_OBJECT(object), "child-added",
                     G_CALLBACK(decodebin_child_added), user_data);
  }
  if (g_strrstr(name, "source") == name) {
    g_object_set(G_OBJECT(object), "drop-on-latency", true, NULL);
  }
}

static GstElement *create_source_bin(guint index, gchar *uri) {
  GstElement *bin = NULL, *uri_decode_bin = NULL;
  gchar bin_name[16] = {};

  g_snprintf(bin_name, 15, "source-bin-%02d", index);
  /* Create a source GstBin to abstract this bin's content from the rest of the
   * pipeline */
  bin = gst_bin_new(bin_name);

  /* Source element for reading from the uri.
   * We will use decodebin and let it figure out the container format of the
   * stream and the codec and plug the appropriate demux and decode plugins. */
  if (PERF_MODE) {
    uri_decode_bin = gst_element_factory_make("nvurisrcbin", "uri-decode-bin");
    g_object_set(G_OBJECT(uri_decode_bin), "file-loop", TRUE, NULL);
    g_object_set(G_OBJECT(uri_decode_bin), "cudadec-memtype", 0, NULL);
  } else {
    uri_decode_bin = gst_element_factory_make("uridecodebin", "uri-decode-bin");
  }

  if (!bin || !uri_decode_bin) {
    g_printerr("One element in source bin could not be created.\n");
    return NULL;
  }

  /* We set the input uri to the source element */
  g_object_set(G_OBJECT(uri_decode_bin), "uri", uri, NULL);

  /* Connect to the "pad-added" signal of the decodebin which generates a
   * callback once a new pad for raw data has beed created by the decodebin */
  g_signal_connect(G_OBJECT(uri_decode_bin), "pad-added", G_CALLBACK(cb_newpad),
                   bin);
  g_signal_connect(G_OBJECT(uri_decode_bin), "child-added",
                   G_CALLBACK(decodebin_child_added), bin);

  gst_bin_add(GST_BIN(bin), uri_decode_bin);

  /* We need to create a ghost pad for the source bin which will act as a proxy
   * for the video decoder src pad. The ghost pad will not have a target right
   * now. Once the decode bin creates the video decoder and generates the
   * cb_newpad callback, we will set the ghost pad target to the video decoder
   * src pad. */
  if (!gst_element_add_pad(bin,
                           gst_ghost_pad_new_no_target("src", GST_PAD_SRC))) {
    g_printerr("Failed to add ghost pad in source bin\n");
    return NULL;
  }

  return bin;
}

static GstRTSPFilterResult client_filter(GstRTSPServer *server,
                                         GstRTSPClient *client,
                                         gpointer user_data) {
  return GST_RTSP_FILTER_REMOVE;
}

void destroy_rtsp_server(GstRTSPServer *server, char *mounts_str) {
  GstRTSPMountPoints *mounts;
  GstRTSPSessionPool *pool;
  mounts = gst_rtsp_server_get_mount_points(server);
  gst_rtsp_mount_points_remove_factory(mounts, mounts_str);
  g_object_unref(mounts);
  gst_rtsp_server_client_filter(server, client_filter, NULL);
  pool = gst_rtsp_server_get_session_pool(server);
  gst_rtsp_session_pool_cleanup(pool);
  g_object_unref(pool);
}

static void start_rtsp_streaming(GstRTSPServer *server, guint updsink_port_num,
                                 NvDsEncoderType enctype, char *mounts_point,
                                 guint64 udp_buffer_size) {
  GstRTSPMountPoints *mounts;
  GstRTSPMediaFactory *factory;
  char udpsrc_pipeline[512];
  char *encoder_name;

  if (enctype == NV_DS_ENCODER_H264) {
    encoder_name = "H264";
  } else if (enctype == NV_DS_ENCODER_H265) {
    encoder_name = "H265";
  } else {
    g_print("%s failed", __func__);
    return;
  }

  if (udp_buffer_size == 0)
    udp_buffer_size = 512 * 1024;

  sprintf(udpsrc_pipeline,
          "( udpsrc name=pay0 port=%d buffer-size=%lu "
          "caps=\"application/x-rtp, media=video, "
          "clock-rate=90000, encoding-name=%s, payload=96 \" )",
          updsink_port_num, udp_buffer_size, encoder_name);

  mounts = gst_rtsp_server_get_mount_points(server);

  factory = gst_rtsp_media_factory_new();
  gst_rtsp_media_factory_set_shared(factory, TRUE);
  gst_rtsp_media_factory_set_launch(factory, udpsrc_pipeline);
  gst_rtsp_mount_points_add_factory(mounts, mounts_point, factory);

  g_object_unref(mounts);

  g_print("\n *** DeepStream: Launched RTSP Streaming at "
          "rtsp://localhost:8554%s ***\n\n",
          mounts_point);
}

static GstElement *create_udpsink_bin(int index, guint udp_port,
                                      NvDsEncMode enc_mode,
                                      NvDsEncoderType enc_type) {
  GstCaps *caps = NULL;
  gchar elem_name[50];
  gchar encode_name[50];
  gchar rtppay_name[50];
  GstElement *bin = NULL;

  g_snprintf(elem_name, sizeof(elem_name), "sink_sub_bin_%d", index);
  bin = gst_bin_new(elem_name);
  if (!bin) {
    g_print("Failed to create '%s'", elem_name);
    goto done;
  }

  g_snprintf(elem_name, sizeof(elem_name), "sink_sub_bin_queue%d", index);
  GstElement *queue = gst_element_factory_make("queue", elem_name);
  if (!queue) {
    g_print("Failed to create '%s'", elem_name);
    goto done;
  }

  g_snprintf(elem_name, sizeof(elem_name), "sink_sub_bin_transform%d", index);
  GstElement *transform = gst_element_factory_make("nvvideoconvert", elem_name);
  if (!transform) {
    g_print("Failed to create '%s'", elem_name);
    goto done;
  }

  g_snprintf(elem_name, sizeof(elem_name), "sink_sub_bin_cap_filter%d", index);
  GstElement *cap_filter = gst_element_factory_make("capsfilter", elem_name);
  if (!cap_filter) {
    g_print("Failed to create '%s'", elem_name);
    goto done;
  }

  g_snprintf(encode_name, sizeof(encode_name), "sink_sub_bin_encoder%d", index);
  g_snprintf(rtppay_name, sizeof(rtppay_name), "sink_sub_bin_rtppay%d", index);

  GstElement *codecparse = NULL;
  GstElement *rtppay = NULL;
  GstElement *encoder = NULL;
  switch (enc_type) {
  case NV_DS_ENCODER_H264:
    codecparse = gst_element_factory_make("h264parse", "h264-parser");
    g_object_set(G_OBJECT(codecparse), "config-interval", -1, NULL);
    rtppay = gst_element_factory_make("rtph264pay", rtppay_name);
    if (enc_mode == NV_DS_ENCODER_MODE_SW) {
      encoder = gst_element_factory_make("x264enc", encode_name);
    } else {
      encoder = gst_element_factory_make("nvv4l2h264enc", encode_name);
      if (!encoder) {
        g_print("Could not create HW encoder. Falling back to SW encoder");
        encoder = gst_element_factory_make("x264enc", encode_name);
      }
    }
    break;
  case NV_DS_ENCODER_H265:
    codecparse = gst_element_factory_make("h265parse", "h265-parser");
    g_object_set(G_OBJECT(codecparse), "config-interval", -1, NULL);
    rtppay = gst_element_factory_make("rtph265pay", rtppay_name);
    if (enc_mode == NV_DS_ENCODER_MODE_SW) {
      encoder = gst_element_factory_make("x265enc", encode_name);
    } else {
      encoder = gst_element_factory_make("nvv4l2h265enc", encode_name);
      if (!encoder) {
        g_print("Could not create HW encoder. Falling back to SW encoder");
        encoder = gst_element_factory_make("x265enc", encode_name);
      }
    }
    break;
  default:
    goto done;
  }

  if (!encoder) {
    g_print("Failed to create '%s'", encode_name);
    goto done;
  }

  if (enc_type == NV_DS_ENCODER_MODE_SW)
    caps = gst_caps_from_string("video/x-raw, format=I420");
  else
    caps = gst_caps_from_string("video/x-raw(memory:NVMM), format=NV12");

  g_object_set(G_OBJECT(cap_filter), "caps", caps, NULL);

  if (!rtppay) {
    g_print("Failed to create '%s'", rtppay_name);
    goto done;
  }

  if (enc_type == NV_DS_ENCODER_MODE_SW) {
    // bitrate is in kbits/sec for software encoder x264enc and x265enc
    g_object_set(G_OBJECT(encoder), "bitrate", 4000000, NULL);
  } else {
    g_object_set(G_OBJECT(encoder), "bitrate", 5000000, NULL);
    g_object_set(G_OBJECT(encoder), "profile", 0, NULL);
    g_object_set(G_OBJECT(encoder), "iframeinterval", 25, NULL);
  }

  struct cudaDeviceProp prop;
  cudaGetDeviceProperties(&prop, 0);

  if (prop.integrated) {
    if (enc_type == NV_DS_ENCODER_MODE_SW) {
      g_object_set(G_OBJECT(encoder), "preset-level", 1, NULL);
      g_object_set(G_OBJECT(encoder), "insert-sps-pps", 1, NULL);
      g_object_set(G_OBJECT(encoder), "gpu-id", 0, NULL);
    }
  } else {
    g_object_set(G_OBJECT(transform), "gpu-id", 0, NULL);
  }

  g_snprintf(elem_name, sizeof(elem_name), "sink_sub_bin_udpsink%d", index);
  GstElement *sink = gst_element_factory_make("udpsink", elem_name);
  if (!sink) {
    g_print("Failed to create '%s'", elem_name);
    goto done;
  }

  g_object_set(G_OBJECT(sink), "host", "127.0.0.1", "port", udp_port, "async",
               FALSE, "sync", TRUE, NULL);

  gst_bin_add_many(GST_BIN(bin), queue, cap_filter, transform, encoder,
                   codecparse, rtppay, sink, NULL);

  gst_element_link_many(queue, cap_filter, transform, encoder, codecparse,
                        rtppay, sink, NULL);

  GstPad *gstpad = gst_element_get_static_pad(queue, "sink");
  if (!gstpad) {
    g_print("Could not find sink in '%s'", GST_ELEMENT_NAME(queue));
    goto done;
  }
  gst_element_add_pad(bin, gst_ghost_pad_new("sink", gstpad));
  gst_object_unref(gstpad);
done:
  if (caps) {
    gst_caps_unref(caps);
  }
  return bin;
}

int main(int argc, char *argv[]) {
  GMainLoop *loop = NULL;
  GstElement *pipeline = NULL, *streammux = NULL, *sink = NULL, *pgie = NULL,
             *queue1, *queue2, *queue3, *queue4, *nvvidconv = NULL,
             *nvosd = NULL;
  GstBus *bus = NULL;
  guint bus_watch_id;
  guint i = 0, num_sources = 0;
  guint pgie_batch_size;

  int current_device = -1;
  cudaGetDevice(&current_device);
  struct cudaDeviceProp prop;
  cudaGetDeviceProperties(&prop, current_device);

  /* Check input arguments */
  if (argc < 2) {
    g_printerr("%s <uri1> [uri2] ... [uriN] \n", argv[0]);
    return -1;
  }

  GstRTSPServer *server = gst_rtsp_server_new();
  const char *rtsp_port = "8554";
  g_object_set(server, "service", rtsp_port, NULL);

  // g_setenv("GST_DEBUG_DUMP_DOT_DIR",
  //          "/opt/nvidia/deepstream/deepstream/sources/apps/sample_apps/"
  //          "deepstream-test3",
  //          TRUE);
  /* Standard GStreamer initialization */
  gst_init(&argc, &argv);
  loop = g_main_loop_new(NULL, FALSE);

  /* Create gstreamer elements */
  /* Create Pipeline element that will form a connection of other elements */
  pipeline = gst_pipeline_new("rtsp-pipeline");

  /* Create nvstreammux instance to form batches from one or more sources. */
  streammux = gst_element_factory_make("nvstreammux", "stream-muxer");

  if (!pipeline || !streammux) {
    g_printerr("One element could not be created. Exiting.\n");
    return -1;
  }
  gst_bin_add(GST_BIN(pipeline), streammux);

  GList *src_list = NULL;

  num_sources = argc - 1;

  gchar pad_name[16] = {};
  for (i = 0; i < num_sources; i++) {
    GstPad *sinkpad, *srcpad;

    GstElement *source_bin = NULL;
    source_bin = create_source_bin(i, argv[i + 1]);
    if (!source_bin) {
      g_printerr("Failed to create source bin. Exiting.\n");
      return -1;
    }

    gst_bin_add(GST_BIN(pipeline), source_bin);

    g_snprintf(pad_name, 15, "sink_%u", i);
    sinkpad = gst_element_request_pad_simple(streammux, pad_name);
    if (!sinkpad) {
      g_printerr("Streammux request sink pad failed. Exiting.\n");
      return -1;
    }

    srcpad = gst_element_get_static_pad(source_bin, "src");
    if (!srcpad) {
      g_printerr("Failed to get src pad of source bin. Exiting.\n");
      return -1;
    }

    if (gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK) {
      g_printerr("Failed to link source bin to stream muxer. Exiting.\n");
      return -1;
    }

    gst_object_unref(srcpad);
    gst_object_unref(sinkpad);
  }

  pgie = gst_element_factory_make("nvinfer", "primary-nvinference-engine");

  /* Add queue elements between every two elements */
  queue1 = gst_element_factory_make("queue", "queue1");
  queue2 = gst_element_factory_make("queue", "queue2");
  queue3 = gst_element_factory_make("queue", "queue3");
  queue4 = gst_element_factory_make("queue", "queue4");

  /* Use convertor to convert from NV12 to RGBA as required by nvosd */
  nvvidconv = gst_element_factory_make("nvvideoconvert", "nvvideo-converter");

  /* Create OSD to draw on the converted RGBA buffer */
  nvosd = gst_element_factory_make("nvdsosd", "nv-onscreendisplay");

  GstElement *demux = gst_element_factory_make("nvstreamdemux", "demux");
  if (!pgie || !nvvidconv || !nvosd || !demux) {
    g_printerr("One element could not be created. Exiting.\n");
    return -1;
  }

  g_object_set(G_OBJECT(streammux), "batch-size", num_sources, NULL);

  g_object_set(G_OBJECT(streammux), "width", MUXER_OUTPUT_WIDTH, "height",
               MUXER_OUTPUT_HEIGHT, "batched-push-timeout",
               MUXER_BATCH_TIMEOUT_USEC, NULL);

  /* Configure the nvinfer element using the nvinfer config file. */
  g_object_set(G_OBJECT(pgie), "config-file-path", "dstest3_pgie_config.txt",
               NULL);

  /* Override the batch-size set in the config file with the number of sources.
   */
  g_object_get(G_OBJECT(pgie), "batch-size", &pgie_batch_size, NULL);
  if (pgie_batch_size != num_sources) {
    g_printerr("WARNING: Overriding infer-config batch-size (%d) with number "
               "of sources (%d)\n",
               pgie_batch_size, num_sources);
    g_object_set(G_OBJECT(pgie), "batch-size", num_sources, NULL);
  }

  g_object_set(G_OBJECT(nvosd), "process-mode", OSD_PROCESS_MODE,
               "display-text", OSD_DISPLAY_TEXT, NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
  bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
  gst_object_unref(bus);

  /* Set up the pipeline */
  /* we add all elements into the pipeline */
  gst_bin_add_many(GST_BIN(pipeline), queue1, pgie, queue2, nvvidconv, queue3,
                   nvosd, queue4, demux, NULL);
  /* we link the elements together */
  if (!gst_element_link_many(streammux, queue1, pgie, queue2, nvvidconv, queue3,
                             nvosd, queue4, demux, NULL)) {
    g_printerr("Elements could not be linked. Exiting.\n");
    return -1;
  }

  char mounts_str[16] = {0};
  for (i = 0; i < num_sources; i++) {
    guint udp_port = 5400 + i;
    g_snprintf(pad_name, 15, "src_%u", i);
    g_snprintf(mounts_str, 15, "/ds-test-%u", i);
    GstPad *srcpad = gst_element_request_pad_simple(demux, pad_name);
    if (!srcpad) {
      g_printerr("Failed to get src pad of demux. Exiting.\n");
      continue;
    }
    g_print("mounts_str = %s\n", mounts_str);
    GstElement *udpsink = create_udpsink_bin(i, udp_port, NV_DS_ENCODER_MODE_HW,
                                             NV_DS_ENCODER_H264);
    gst_bin_add_many(GST_BIN(pipeline), udpsink, NULL);
    GstPad *sinkpad = gst_element_get_static_pad(udpsink, "sink");
    if (!sinkpad) {
      g_printerr("Failed to get sink pad of udpsink. Exiting.\n");
      continue;
    }
    gst_pad_link(srcpad, sinkpad);
    gst_object_unref(srcpad);
    gst_object_unref(sinkpad);
    start_rtsp_streaming(server, udp_port, NV_DS_ENCODER_H264, mounts_str, 0);
  }

  gst_rtsp_server_attach(server, NULL);
  GstPad *tiler_src_pad = gst_element_get_static_pad(pgie, "src");
  if (!tiler_src_pad)
    g_print("Unable to get src pad\n");
  else
    gst_pad_add_probe(tiler_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
                      tiler_src_pad_buffer_probe, NULL, NULL);
  gst_object_unref(tiler_src_pad);

  g_print("Now playing:");
  for (i = 0; i < num_sources; i++) {
    g_print(" %s,", argv[i + 1]);
  }
  g_print("\n");
  gst_element_set_state(pipeline, GST_STATE_PLAYING);
  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL,
                                    "demo-app-pipeline");

  /* Wait till pipeline encounters an error or EOS */
  g_print("Running...\n");
  g_main_loop_run(loop);

  /* Out of the main loop, clean up nicely */
  g_print("Returned, stopping playback\n");
  gst_element_set_state(pipeline, GST_STATE_NULL);

  for (int i = num_sources - 1; i >= 0; i--) {
    g_snprintf(mounts_str, 15, "/ds-test-%u", i);
    g_print("Destroying RTSP server %d %s\n", i, mounts_str);
    destroy_rtsp_server(server, mounts_str);
  }
  g_print("Deleting pipeline\n");
  gst_object_unref(GST_OBJECT(pipeline));
  g_source_remove(bus_watch_id);
  g_main_loop_unref(loop);
  return 0;
}

Please refer to the above code, save this above code to /opt/nvidia/deepstream/deepstream/sources/apps/sample_apps/deepstream-test3 as deepstream_test3_app.c, them make CUDA_VER=12.6 (For DS-7.1),

Run this command line.

./deepstream-test3-app file:///opt/nvidia/deepstream/deepstream/samples/streams/sample_720p.h264 file:///opt/nvidia/deepstream/deepstream/samples/streams/sample_720p.h264

# ffplay rtsp://localhost:8554/ds-test-0
# ffplay rtsp://localhost:8554/ds-test-1

This problem is not related to DeepStream, it is only a Gstreamer problem, please discuss it in the Gstreamer community.

Thank You

The error message says that the pipeline cannot be built. We usually don’t debug user code.

Here are some suggestions: open the comments in above code then use GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS to check the elements in the pipeline at different stages of the pipeline (such as NULL → PLAYING). To check why negotiation is not possible

1 Like