Gst_element_set_state (pipeline, GST_STATE_NULL); can not stop

1.My function is that 2 rtsp stream go to infer, and output 2 rtsp stream which is inferred by “nvstreamdemux”.
2.The problem is that one rtsp stream is off, some error happen, log is as bellow. And the code “gst_element_set_state (pipeline, GST_STATE_NULL);” can not jump out


3.The code is base on test3, which is as bellow:

/*
 * Copyright (c) 2018-2019, NVIDIA CORPORATION. All rights reserved.
 *
 * Permission is hereby granted, free of charge, to any person obtaining a
 * copy of this software and associated documentation files (the "Software"),
 * to deal in the Software without restriction, including without limitation
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
 * and/or sell copies of the Software, and to permit persons to whom the
 * Software is furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
 * DEALINGS IN THE SOFTWARE.
 */

#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <math.h>
#include <string.h>
#include <sys/time.h>
#include <gst/rtsp-server/rtsp-server.h>
#include "gstnvdsmeta.h"
#include <stdlib.h>
#include <ctype.h>

//#include "gstnvstreammeta.h"
#ifndef PLATFORM_TEGRA
#include "gst-nvmessage.h"
#endif

#define MAX_DISPLAY_LEN 64

/* The muxer output resolution must be set if the input streams will be of
 * different resolution. The muxer will scale all the input frames to this
 * resolution. */
#define MUXER_OUTPUT_WIDTH 1920
#define MUXER_OUTPUT_HEIGHT 1080

#define TILED_OUTPUT_WIDTH 1920
#define TILED_OUTPUT_HEIGHT 1080

/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
 * based on the fastest source's framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 40000

/* NVIDIA Decoder source pad memory feature. This feature signifies that source
 * pads having this capability will push GstBuffers containing cuda buffers. */
#define GST_CAPS_FEATURES_NVMM "memory:NVMM"

#define bufSize 1024
#define MaxKeyLength 80

#define CONFIG_PATH "/home/user/XM_COSO_RTG/Broad.conf"

#define FPS_PRINT_INTERVAL 300
//static struct timeval start_time = { };

//static guint probe_counter = 0;

/* tiler_sink_pad_buffer_probe  will extract metadata received on OSD sink pad
 * and update params for drawing rectangle, object information etc. */
 
/* *
 * @param buf
 * @desc 删除buf中的制表符以及注释
 * */
char *trimSp(char *buf)
{
    char tbuf[bufSize] = {'\0'};
    if (buf == NULL || strlen(buf) == 0)
    {
        return NULL;
    }
    int len = strlen(buf);
    int i = 0, j = 0;
    while (i < len)
    {
        //此处注释使用 //
        //若注释使用 # 则使用以下代码代替
        //if (buf[i] == '#')
        //    break;
        //if (buf[i] == '/' && buf[i + 1] == '/')
            //break;
 
        if (iscntrl(buf[i]) == 0 && isspace(buf[i]) == 0)
        {
            tbuf[j] = buf[i];
            j++;
        }
 
        i++;
    }
 
    //将结果复制到buf
    memset(buf, '\0', len);
    strncpy(buf, tbuf, strlen(tbuf));
    return buf;
}


/***
 * @param buf
 * @desc 获取 = 在字符串中的位置
 **/
int getPos(char *buf)
{
    if (buf == NULL || strlen(buf) == 0)
    {
        return -1;
    }
    int len = strlen(buf);
    int i = 0;
    while (i < len)
    {
        if (buf[i] == '=')
        {
            break;
        }
        i++;
    }
    return i++;
}



/* *
 * @param file
 * @param AppName
 * @param KeyName
 * @param KeyVal
 * @desc  从文件‘file’中的[AppName]中读取属性KeyName的值KeyVal
 * */
int ReadConf(char *file, char *AppName, char *KeyName, char *KeyVal)
{
    char buf[bufSize] = {'\0'};
    int find_app_name = 0;
 
    if (file == NULL || strlen(file) == 0) //文件名(路径)检查
    {
        return -1;
    }
    FILE *fp = fopen(file, "r"); //打开文件
 
    if (fp == NULL) //文件句柄检查
    {
        return -1;
    }
 
    while (fgets(buf, bufSize, fp) != NULL)
    {
        char *tbuf = trimSp(buf);
        int pos = getPos(tbuf);
        int len = strlen(tbuf);
 
        //printf("string:%s\n", tbuf);
 
        //查找到 [NAME]
        if (len > 0 && len == pos && tbuf[0] == '[' && tbuf[len - 1] == ']')
        {
            //存放NAME
            char *t_appname = (char *)malloc(len - 1);
            memset(t_appname, '\0', len - 1);
            strncpy(t_appname, tbuf + 1, len - 2);
 
            //查找到AppName
            if (strncmp(t_appname, AppName, strlen(t_appname)) == 0)
                find_app_name = 1;
            else
                find_app_name = 0;
 
            //注意释放空间,防止内存泄露
            free(t_appname);
        }
        else
        {
            //属性名 与 属性值
            if (len > 0 && pos != len && find_app_name)
            {
                char *val = tbuf + pos + 1;//属性值
                tbuf[pos] = '\0';//将=替换为\0  用以分割属性名 与 属性值
                char *key = tbuf;//属性名
 
                //查找到属性名
                if (strncmp(key, KeyName, strlen(key)) == 0)
                {
                    //将值复制到 KeyVal
                    strncpy(KeyVal, val, strlen(val));
                    break;
                }
            }
        }
    }
    //关闭文件句柄
    if (fp != NULL)
    {
        fclose(fp);
    }
 
    return 1;
}

static GstPadProbeReturn
tiler_src_pad_buffer_probe (GstPad * pad, GstPadProbeInfo * info,
    gpointer u_data)
{
    GstBuffer *buf = (GstBuffer *) info->data;
    NvDsObjectMeta *obj_meta = NULL;
    NvDsMetaList * l_frame = NULL;
    NvDsMetaList * l_obj = NULL;
    NvDsDisplayMeta *display_meta = NULL;

    NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta (buf);

    for (l_frame = batch_meta->frame_meta_list; l_frame != NULL;
      l_frame = l_frame->next) {
        NvDsFrameMeta *frame_meta = (NvDsFrameMeta *) (l_frame->data);
        int offset = 0;
		
		if(frame_meta->obj_meta_list != NULL)
		{
			display_meta = nvds_acquire_display_meta_from_pool(batch_meta);
			NvOSD_TextParams *txt_params  = &display_meta->text_params[0];
			display_meta->num_labels = 1;
			txt_params->display_text = g_malloc0 (MAX_DISPLAY_LEN);
			offset = snprintf(txt_params->display_text, MAX_DISPLAY_LEN, "WARMMING");

			/* Now set the offsets where the string should appear */
			txt_params->x_offset = 10;
			txt_params->y_offset = 12;

			/* Font , font-color and font-size */
			txt_params->font_params.font_name = "Courier";
			txt_params->font_params.font_size = 25;
			txt_params->font_params.font_color.red = 1.0;
			txt_params->font_params.font_color.green = 0.0;
			txt_params->font_params.font_color.blue = 0.0;
			txt_params->font_params.font_color.alpha = 1.0;

			/* Text background color */
			txt_params->set_bg_clr = 1;
			txt_params->text_bg_clr.red = 0.0;
			txt_params->text_bg_clr.green = 0.0;
			txt_params->text_bg_clr.blue = 0.0;
			txt_params->text_bg_clr.alpha = 0.0;

			nvds_add_display_meta_to_frame(frame_meta, display_meta);
		}
    }
	
    return GST_PAD_PROBE_OK;
}

static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data)
{
  GMainLoop *loop = (GMainLoop *) data;
  switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;
    case GST_MESSAGE_WARNING:
    {
      gchar *debug;
      GError *error;
      gst_message_parse_warning (msg, &error, &debug);
      g_printerr ("WARNING from element %s: %s\n",
          GST_OBJECT_NAME (msg->src), error->message);
      g_free (debug);
      g_printerr ("Warning: %s\n", error->message);
      g_error_free (error);
      break;
    }
    case GST_MESSAGE_ERROR:
    {
      gchar *debug;
      GError *error;
      gst_message_parse_error (msg, &error, &debug);
      g_printerr ("ERROR from element %s: %s\n",
          GST_OBJECT_NAME (msg->src), error->message);
      if (debug)
        g_printerr ("Error details: %s\n", debug);
      g_free (debug);
      g_error_free (error);
      g_main_loop_quit (loop);
      break;
    }
#ifndef PLATFORM_TEGRA
    case GST_MESSAGE_ELEMENT:
    {
      if (gst_nvmessage_is_stream_eos (msg)) {
        guint stream_id;
        if (gst_nvmessage_parse_stream_eos (msg, &stream_id)) {
          g_print ("Got EOS from stream %d\n", stream_id);
        }
      }
      break;
    }
#endif
    default:
      break;
  }
  return TRUE;
}

static void
cb_newpad (GstElement * decodebin, GstPad * decoder_src_pad, gpointer data)
{
  g_print ("In cb_newpad\n");
  GstCaps *caps = gst_pad_get_current_caps (decoder_src_pad);
  const GstStructure *str = gst_caps_get_structure (caps, 0);
  const gchar *name = gst_structure_get_name (str);
  GstElement *source_bin = (GstElement *) data;
  GstCapsFeatures *features = gst_caps_get_features (caps, 0);

  /* Need to check if the pad created by the decodebin is for video and not
   * audio. */
  if (!strncmp (name, "video", 5)) {
    /* Link the decodebin pad only if decodebin has picked nvidia
     * decoder plugin nvdec_*. We do this by checking if the pad caps contain
     * NVMM memory features. */
    if (gst_caps_features_contains (features, GST_CAPS_FEATURES_NVMM)) {
      /* Get the source bin ghost pad */
      GstPad *bin_ghost_pad = gst_element_get_static_pad (source_bin, "src");
      if (!gst_ghost_pad_set_target (GST_GHOST_PAD (bin_ghost_pad),
              decoder_src_pad)) {
        g_printerr ("Failed to link decoder src pad to source bin ghost pad\n");
      }
      gst_object_unref (bin_ghost_pad);
    } else {
      g_printerr ("Error: Decodebin did not pick nvidia decoder plugin.\n");
    }
  }
}

static void
decodebin_child_added (GstChildProxy * child_proxy, GObject * object,
    gchar * name, gpointer user_data)
{
  g_print ("Decodebin child added: %s\n", name);
  if (g_strrstr (name, "decodebin") == name) {
    g_signal_connect (G_OBJECT (object), "child-added",
        G_CALLBACK (decodebin_child_added), user_data);
  }
  if (g_strstr_len (name, -1, "nvv4l2decoder") == name) {
    g_print ("Seting bufapi_version\n");
    g_object_set (object, "bufapi-version", TRUE, NULL);
  }
}

static GstElement *
create_source_bin (guint index, gchar  *uri)
{
  GstElement *bin = NULL, *uri_decode_bin = NULL;
  gchar bin_name[16] = { };

  g_snprintf (bin_name, 15, "source-bin-%02d", index);
  /* Create a source GstBin to abstract this bin's content from the rest of the
   * pipeline */
  bin = gst_bin_new (bin_name);

  /* Source element for reading from the uri.
   * We will use decodebin and let it figure out the container format of the
   * stream and the codec and plug the appropriate demux and decode plugins. */
  uri_decode_bin = gst_element_factory_make ("uridecodebin", "uri-decode-bin");

  if (!bin || !uri_decode_bin) {
    g_printerr ("One element in source bin could not be created.\n");
    return NULL;
  }

  /* We set the input uri to the source element */
  g_object_set (G_OBJECT (uri_decode_bin), "uri", uri, "async-handling", 1, NULL);
  /* Connect to the "pad-added" signal of the decodebin which generates a
   * callback once a new pad for raw data has beed created by the decodebin */
  g_signal_connect (G_OBJECT (uri_decode_bin), "pad-added",
      G_CALLBACK (cb_newpad), bin);
  g_signal_connect (G_OBJECT (uri_decode_bin), "child-added",
      G_CALLBACK (decodebin_child_added), bin);

  gst_bin_add (GST_BIN (bin), uri_decode_bin);

  /* We need to create a ghost pad for the source bin which will act as a proxy
   * for the video decoder src pad. The ghost pad will not have a target right
   * now. Once the decode bin creates the video decoder and generates the
   * cb_newpad callback, we will set the ghost pad target to the video decoder
   * src pad. */
  if (!gst_element_add_pad (bin, gst_ghost_pad_new_no_target ("src",
              GST_PAD_SRC))) {
    g_printerr ("Failed to add ghost pad in source bin\n");
    return NULL;
  }

  return bin;
}

static gboolean
start_rtsp_streaming (guint rtsp_port_num, guint updsink_port_num)
{
  GstRTSPServer *server;
  GstRTSPMountPoints *mounts;
  GstRTSPMediaFactory *factory;
  char udpsrc_pipeline[512];

  char port_num_Str[64] = { 0 };
  char *encoder_name;

  encoder_name = "H264";
  
  sprintf (udpsrc_pipeline,
      "( udpsrc name=pay0 port=%d caps=\"application/x-rtp, media=video, "
      "clock-rate=90000, encoding-name=%s, payload=96 \" )",
      updsink_port_num, encoder_name);

  sprintf (port_num_Str, "%d", rtsp_port_num);

  server = gst_rtsp_server_new ();
  g_object_set (server, "service", port_num_Str, NULL);

  mounts = gst_rtsp_server_get_mount_points (server);

  factory = gst_rtsp_media_factory_new ();
  gst_rtsp_media_factory_set_launch (factory, udpsrc_pipeline);
  gst_rtsp_media_factory_set_shared (factory, TRUE);
  gst_rtsp_mount_points_add_factory (mounts, "/ds-test", factory);

  g_object_unref (mounts);

  gst_rtsp_server_attach (server, NULL);

  g_print
      ("\n *** DeepStream: Launched RTSP Streaming at rtsp://localhost:%d/ds-test ***\n\n",
      rtsp_port_num);

  return TRUE;
}

static GstElement *
create_rtsp_bin (guint index){
  GstElement *bin = NULL, *queue_out = NULL, *nvvidconv = NULL, *nvosd = NULL, *transform = NULL, *cap_filter = NULL, 
  *encoder = NULL, *rtppay = NULL, *sink = NULL;

  GstCaps *caps = NULL;
  gchar bin_name[16] = {};

  g_snprintf (bin_name, 15, "rtsp-bin-%02d", index);
  g_print ("create rtsp_bin: %s\n", bin_name);
  bin = gst_bin_new (bin_name);
  
  g_snprintf (bin_name, 15, "queue_out-%02d", index);
  queue_out = gst_element_factory_make ("queue", bin_name);
  g_snprintf (bin_name, 15, "nvvidconv-%02d", index);
  nvvidconv = gst_element_factory_make ("nvvideoconvert", bin_name);
  g_snprintf (bin_name, 15, "nvosd-%02d", index);
  nvosd = gst_element_factory_make ("nvdsosd", bin_name);
   
  g_snprintf (bin_name, 15, "transform-%02d", index);
  transform = gst_element_factory_make ("nvvideoconvert", bin_name);
  g_snprintf (bin_name, 15, "filter-%02d", index);
  cap_filter = gst_element_factory_make ("capsfilter", bin_name);
  g_snprintf (bin_name, 15, "h264-encoder-%02d", index);
  encoder = gst_element_factory_make ("nvv4l2h264enc", bin_name);
  g_snprintf (bin_name, 15, "rtppay-h264-%02d", index);
  rtppay = gst_element_factory_make ("rtph264pay", bin_name);
  g_snprintf (bin_name, 15, "udpsink-%02d", index);
  sink = gst_element_factory_make ("udpsink", bin_name);

  gst_bin_add_many (GST_BIN (bin), queue_out, nvvidconv, nvosd, transform, cap_filter, encoder, rtppay, sink, NULL);

  caps = gst_caps_from_string ("video/x-raw(memory:NVMM), format=I420");
  g_object_set (G_OBJECT (cap_filter), "caps", caps, NULL);
  g_object_set (G_OBJECT (encoder), "bitrate", 4000000, NULL);
  g_object_set (G_OBJECT (encoder), "preset-level", 1, NULL);
  g_object_set (G_OBJECT (encoder), "insert-sps-pps", 1, NULL);
  g_object_set (G_OBJECT (encoder), "bufapi-version", 1, NULL);
  gst_caps_unref(caps);

  g_object_set (G_OBJECT (sink), "port", 5400+index, "async", 1, "sync", 0, NULL);
  if (!gst_element_link_many (queue_out, nvvidconv, nvosd, transform, cap_filter, encoder, rtppay, sink, NULL)) {
    g_printerr ("Elements could not be linked: 3. Exiting.\n");
    return NULL;
  }

  gboolean ret = TRUE;

  ret = start_rtsp_streaming (8554+index, 5400+index);
  if (ret != TRUE) {
    g_print ("%s: start_rtsp_straming function failed\n", __func__);
  }

  GstPad *queue_out_sink = gst_element_get_static_pad (queue_out, "sink");
  gst_element_add_pad(bin, gst_ghost_pad_new("sink",queue_out_sink));
  gst_object_unref(queue_out_sink);

  return bin;
}

int
main (int argc, char *argv[])
{
  GMainLoop *loop = NULL;
  GstElement *pipeline = NULL, *streammux = NULL, *streamdemux = NULL, *pgie = NULL;

  GstCaps *caps = NULL;
  GstBus *bus = NULL;
  guint bus_watch_id;
  GstPad *tiler_src_pad = NULL;
  guint i,num_sources;
  guint pgie_batch_size;
  guint tiler_rows, tiler_columns;
  gchar KeyValue[MaxKeyLength];
  gchar rtsp_source [MaxKeyLength];
  GMutex struct_lock;
  
  memset(KeyValue,'\0',MaxKeyLength);
  ReadConf(CONFIG_PATH, "source", "num", KeyValue);
  num_sources = atoi(KeyValue);
  g_printerr("sourceNum is %d\n", num_sources);

  /* Standard GStreamer initialization */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  /* Create gstreamer elements */
  /* Create Pipeline element that will form a connection of other elements */
  pipeline = gst_pipeline_new ("dstest3-pipeline");

  /* Create nvstreammux instance to form batches from one or more sources. */
  streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");
  streamdemux = gst_element_factory_make ("nvstreamdemux", "stream-demuxer");

  if (!pipeline || !streammux) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }
  gst_bin_add (GST_BIN (pipeline), streammux);
  gst_bin_add (GST_BIN (pipeline), streamdemux);

  for (i = 0; i < num_sources; i++) {
    GstPad *sinkpad, *srcpad;
    gchar pad_name[16] = {};
	gchar source_name[8] ={};
	
	g_snprintf (source_name, 7, "url_%1d", i);
	memset(rtsp_source,'\0',MaxKeyLength);
    ReadConf(CONFIG_PATH, "camera", source_name, rtsp_source);
    g_printerr("rtsp_source is %s\n", rtsp_source);  
    GstElement *source_bin = create_source_bin (i, rtsp_source);
	GstElement *rtsp_bin = create_rtsp_bin (i);

    if (!source_bin) {
      g_printerr ("Failed to create source bin. Exiting.\n");
      return -1;
    }

    gst_bin_add (GST_BIN (pipeline), source_bin);
    g_snprintf (pad_name, 15, "sink_%u", i);
    sinkpad = gst_element_get_request_pad (streammux, pad_name);
    if (!sinkpad) {
      g_printerr ("Streammux request sink pad failed. Exiting.\n");
      return -1;
    }

    srcpad = gst_element_get_static_pad (source_bin, "src");
    if (!srcpad) {
      g_printerr ("Failed to get src pad of source bin. Exiting.\n");
      return -1;
    }

    if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
      g_printerr ("Failed to link source bin to stream muxer. Exiting.\n");
      return -1;
    }
	
    //demux
	if(! rtsp_bin)
	{
	  g_printerr ("Failed to create source bin. Exiting.\n");
      return -1;
	}
    gst_bin_add (GST_BIN (pipeline), rtsp_bin);
    g_snprintf (pad_name, 15, "src_%u", i);
    srcpad = gst_element_get_request_pad (streamdemux, pad_name);
    if (!srcpad) {
      g_printerr ("Streamdemux request src pad failed. Exiting.\n");
      return -1;
    }

    sinkpad = gst_element_get_static_pad (rtsp_bin, "sink");
    if (!sinkpad) {
      g_printerr ("Failed to get sink pad of rtsp_bin. Exiting.\n");
      return -1;
    }

    if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
      g_printerr ("Failed to link streamdemuxbin to rtsp_bin. Exiting.\n");
      return -1;
    }

    gst_object_unref (srcpad);
    gst_object_unref (sinkpad);
	
  }

  /* Use nvinfer to infer on batched frame. */
  pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");

  if (!pgie ) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",
      MUXER_OUTPUT_HEIGHT, "batch-size", num_sources, "live-source", 1,
      "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
  g_object_set (G_OBJECT (pgie),
      "config-file-path", "/home/user/XM_COSO_RTG/dstest3_pgie_config.txt","batch-size", num_sources, NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);


  gst_bin_add_many (GST_BIN (pipeline), pgie, NULL);
  if (!gst_element_link_many (streammux, pgie, streamdemux, NULL)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  /* Set the pipeline to "playing" state */
  g_print ("Now playing:");
  for (i = 0; i < num_sources; i++) {
    g_print (" %s,", argv[i + 1]);
  }
  g_print ("\n");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* Wait till pipeline encounters an error or EOS */
  g_print ("Running...\n");
  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
                GST_DEBUG_GRAPH_SHOW_ALL, "ds-app-playing");
  g_main_loop_run (loop);
  
  //g_mutex_lock (&struct_lock);
  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);
  //g_mutex_unlock (&struct_lock);
  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));
  g_source_remove (bus_watch_id);
  g_main_loop_unref (loop);
  return 0;
}

Hi,
For the usecase of dynamically adding/deleting sources, please refer to this sample:
deepstream_reference_apps/runtime_source_add_delete at master · NVIDIA-AI-IOT/deepstream_reference_apps · GitHub