Could not link pipeline

Please provide complete information as applicable to your setup.

• Hardware Platform (Jetson / GPU)
• DeepStream Version 6.0.1
• JetPack Version (valid for Jetson only) 4.6.3
• TensorRT Version 8.4
• NVIDIA GPU Driver Version (valid for GPU only) 10.2
• Issue Type( questions, new requirements, bugs) - the issue is that pipeline is not link correctly when doing smart record event.
• How to reproduce the issue ? (This is for bugs. Including which sample app is using, the configuration files content, the command line used and other details for reproducing) actually this is for pipeline bugs
• Requirement details( This is for new requirement. Including the module name-for which plugin or for which sample application, the function description)

Error occuring :
jetson@ubuntu:~/Documents/deepstream-testsr-app$ ./deepstream-testsr-app “rtsp://admin:Sieora123@192.168.1.108:554/cam/realmonitor?channel=3&subtype=0” --enc-type=1 --sink-type=1 --bbox-enable=1 --sr-mode=1
Hello
Now playing: rtsp://admin:Sieora123@192.168.1.108:554/cam/realmonitor?channel=3&subtype=0
Opening in BLOCKING MODE
0:00:10.325195570 11567 0x5581f0c260 INFO nvinfer gstnvinfer.cpp:638:gst_nvinfer_logger: NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::deserializeEngineAndBackend() <nvdsinfer_context_impl.cpp:1900> [UID = 1]: deserialized trt engine from :/home/jetson/Documents/deepstream-testsr-app/Primary_Detector/resnet10.caffemodel_b1_gpu0_fp16.engine
INFO: [Implicit Engine Info]: layers num: 3
0 INPUT kFLOAT input_1 3x368x640
1 OUTPUT kFLOAT conv2d_bbox 16x23x40
2 OUTPUT kFLOAT conv2d_cov/Sigmoid 4x23x40

0:00:10.326747306 11567 0x5581f0c260 INFO nvinfer gstnvinfer.cpp:638:gst_nvinfer_logger: NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::generateBackendContext() <nvdsinfer_context_impl.cpp:2004> [UID = 1]: Use deserialized engine model: /home/jetson/Documents/deepstream-testsr-app/Primary_Detector/resnet10.caffemodel_b1_gpu0_fp16.engine
0:00:10.360413816 11567 0x5581f0c260 INFO nvinfer gstnvinfer_impl.cpp:313:notifyLoadModelStatus: [UID 1]: Load new model:dstestsr_pgie_config.txt sucessfully
Running…
Recording started…
In cb_newpad
Failed to link depay loader to rtsp srcERROR from element udpsrc0: Internal data stream error.
Error details: gstbasesrc.c(3055): gst_base_src_loop (): /GstPipeline:dstest-sr-pipeline/GstRTSPSrc:rtsp-source/GstUDPSrc:udpsrc0:
streaming stopped, reason not-linked (-1)
** ERROR: RunUserCallback:207: No video stream found
Returned, stopping playback
Deleting pipeline

this is my compilation file
CUDA_VER?=10.2

ifeq ($(CUDA_VER),)

$(error “CUDA_VER is not set”)

endif

APP:= deepstream-testsr-app

TARGET_DEVICE = $(shell gcc -dumpmachine | cut -f1 -d -)

NVDS_VERSION:=6.0

LIB_INSTALL_DIR?=/opt/nvidia/deepstream/deepstream-$(NVDS_VERSION)/lib/

APP_INSTALL_DIR?=/opt/nvidia/deepstream/deepstream-$(NVDS_VERSION)/bin/

ifeq ($(TARGET_DEVICE),aarch64)

CFLAGS:= -DPLATFORM_TEGRA

endif

SRCS:= $(wildcard *.c)

INCS:= $(wildcard *.h)

PKGS:= gstreamer-1.0

OBJS:= $(SRCS:.c=.o)

CFLAGS+= -I…/…/…/includes \

-I /usr/local/cuda-$(CUDA_VER)/include\

-I /home/jetson/Documents/deepstream-testsr-app/include\

-I /home/jetson/Documents/deepstream-testsr-app/includes

CFLAGS+= pkg-config --cflags $(PKGS)

LIBS:= pkg-config --libs $(PKGS)

LIBS+= -L$(LIB_INSTALL_DIR) -lnvdsgst_meta -lnvds_meta -lnvdsgst_helper -lnvdsgst_smartrecord \

-L/usr/local/cuda-$(CUDA_VER)/lib64/ -lcudart \

-lcuda -Wl,-rpath,$(LIB_INSTALL_DIR)

all: $(APP)

%.o: %.c $(INCS) Makefile

$(CC) -c -o $@ $(CFLAGS) $<

$(APP): $(OBJS) Makefile

$(CC) -o $(APP) $(OBJS) $(LIBS)

install: $(APP)

cp -rv $(APP) $(APP_INSTALL_DIR)

clean:

rm -rf $(OBJS) $(APP)

this is a configuration file :

property]
gpu-id=0
net-scale-factor=0.0039215697906911373
model-file = Primary_Detector/resnet10.caffemodel
proto-file = Primary_Detector/resnet10.prototxt
model-engine-file = Primary_Detector/resnet10.caffemodel_b1_gpu0_fp16.engine
labelfile-path = Primary_Detector/labels.txt
int8-calib-file = Primary_Detector/cal_trt.bin
force-implicit-batch-dim = 1
batch-size=1
#width=1920
#height=1080
network-mode=1
num-detected-classes=4
interval=0
gie-unique-id=1
output-blob-names=conv2d_bbox;conv2d_cov/Sigmoid
#scaling-filter=0
#scaling-compute-hw=0
cluster-mode=2

[class-attrs-all]
pre-cluster-threshold=0.2
topk=20
nms-iou-threshold=0.5

#[class-attrs-0]
#pre-cluster-threshold=0.05
#eps=0.7
#dbscan-min-score=0.95

#[class-attrs-1]
#pre-cluster-threshold=0.05
#eps=0.7
#dbscan-min-score=0.5

#[class-attrs-2]
#pre-cluster-threshold=0.1
#eps=0.6
#dbscan-min-score=0.95

#[class-attrs-3]
#pre-cluster-threshold=0.05
#eps=0.7
#dbscan-min-score=0.5

this is a code for smart record:

#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <string.h>
#include <cuda_runtime_api.h>
#include “gst-nvdssr.h”

GST_DEBUG_CATEGORY (NVDS_APP);

#define MAX_DISPLAY_LEN 64

#define PGIE_CLASS_ID_VEHICLE 0
#define PGIE_CLASS_ID_PERSON 2

/* The muxer output resolution must be set if the input streams will be of

  • different resolution. The muxer will scale all the input frames to this
  • resolution. */
    #define MUXER_OUTPUT_WIDTH 1920
    #define MUXER_OUTPUT_HEIGHT 1080

/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set

  • based on the fastest source’s framerate. */
    #define MUXER_BATCH_TIMEOUT_USEC 40000

/* By default, OSD process-mode is set to CPU_MODE. To change mode, set as:

  • 1: GPU mode (for Tesla only)
  • 2: HW mode (For Jetson only)
    */
    #define OSD_PROCESS_MODE 0

/* By default, OSD will not display text. To display text, change this to 1 */
#define OSD_DISPLAY_TEXT 0

gint frame_number = 0;
gchar pgie_classes_str[4][32] = { “Vehicle”, “TwoWheeler”, “Person”,
“Roadsign”
};

/* Config file parameters used for recording

  • User needs to change these parameters to reflect the change in recordings
  • e.g duration, start-time etc. */

/* Container format of recorded file 0 for mp4 and 1 for mkv format
*/
#define SMART_REC_CONTAINER 0

/* Cache functionality of recording
*/
#define CACHE_SIZE_SEC 15

/* Timeout if duration of recording is not set by user
*/
#define SMART_REC_DEFAULT_DURATION 10

/* Time at which it recording is started
*/
#define START_TIME 2

/* Duration of recording
*/
#define SMART_REC_DURATION 7

/* Interval in seconds for

  • SR start / stop events generation.
    */
    #define SMART_REC_INTERVAL 7

static gboolean bbox_enabled = TRUE;
static gint enc_type = 0; // Default: Hardware encoder
static gint sink_type = 2; // Default: Eglsink
static guint sr_mode = 0; // Default: Audio + Video

GOptionEntry entries = {
{“bbox-enable”, ‘e’, 0, G_OPTION_ARG_INT, &bbox_enabled,
“0: Disable bboxes,
1: Enable bboxes,
Default: bboxes enabled”, NULL}
,
{“enc-type”, ‘c’, 0, G_OPTION_ARG_INT, &enc_type,
“0: Hardware encoder,
1: Software encoder,
Default: Hardware encoder”, NULL}
,
{“sink-type”, ‘s’, 0, G_OPTION_ARG_INT, &sink_type,
“1: Fakesink,
2: Eglsink,
3: RTSP sink,
Default: Eglsink”, NULL}
,
{“sr-mode”, ‘m’, 0, G_OPTION_ARG_INT, &sr_mode,
“SR mode: 0 = Audio + Video,
1 = Video only,
2 = Audio only”, NULL}
,
{NULL}
,
};

static GstElement *pipeline = NULL, *tee_pre_decode = NULL;
static NvDsSRContext *nvdssrCtx = NULL;
static GMainLoop *loop = NULL;

static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print (“End of stream\n”);
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR:{
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_printerr (“ERROR from element %s: %s\n”,
GST_OBJECT_NAME (msg->src), error->message);
if (debug)
g_printerr (“Error details: %s\n”, debug);
g_free (debug);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}

static gpointer
smart_record_callback (NvDsSRRecordingInfo * info, gpointer userData)
{
static GMutex mutex;
FILE *logfile = NULL;
g_return_val_if_fail (info, NULL);

g_mutex_lock (&mutex);
logfile = fopen (“smart_record.log”, “a”);
if (logfile) {
fprintf (logfile, “%d:%s:%d:%d:%s:%d channel(s):%d Hz:%ldms:%s:%s\n”,
info->sessionId, info->containsVideo ? “video” : “no-video”,
info->width, info->height, info->containsAudio ? “audio” : “no-audio”,
info->channels, info->samplingRate, info->duration,
info->dirpath, info->filename);
fclose (logfile);
} else {
g_print (“Error in opeing smart record log file\n”);
}
g_mutex_unlock (&mutex);

return NULL;
}

static gboolean
smart_record_event_generator (gpointer data)
{
NvDsSRSessionId sessId = 0;
NvDsSRContext *ctx = (NvDsSRContext *) data;
guint startTime = START_TIME;
guint duration = SMART_REC_DURATION;

if (ctx->recordOn) {
g_print (“Recording done.\n”);
if (NvDsSRStop (ctx, 0) != NVDSSR_STATUS_OK)
g_printerr (“Unable to stop recording\n”);
} else {
g_print (“Recording started…\n”);
if (NvDsSRStart (ctx, &sessId, startTime, duration,
NULL) != NVDSSR_STATUS_OK)
g_printerr (“Unable to start recording\n”);
}
return TRUE;
}

static void
cb_newpad_audio_parsebin (GstElement * element, GstPad * element_src_pad, gpointer data)
{
GstPad *sinkpad = gst_element_get_static_pad(nvdssrCtx->recordbin, “asink”);
if (gst_pad_link(element_src_pad, sinkpad) != GST_PAD_LINK_OK) {
g_print (“Elements not linked. Exiting. \n”);
g_main_loop_quit(loop);
}
}

static void
cb_newpad (GstElement * element, GstPad * element_src_pad, gpointer data)
{

g_print (“In cb_newpad\n”);
GstCaps *caps = gst_pad_get_current_caps (element_src_pad);
const GstStructure *str = gst_caps_get_structure (caps, 0);
const gchar *name = gst_structure_get_name (str);

GstElement *depay_elem = (GstElement *) data;

const gchar *media = gst_structure_get_string (str, “media”);
gboolean is_video = (!g_strcmp0 (media, “video”));
gboolean is_audio = (!g_strcmp0 (media, “audio”));

if (g_strrstr (name, “x-rtp”) && is_video) {
GstPad *sinkpad = gst_element_get_static_pad (depay_elem, “sink”);
if (gst_pad_link (element_src_pad, sinkpad) != GST_PAD_LINK_OK) {
g_printerr (“Failed to link depay loader to rtsp src”);
}
gst_object_unref (sinkpad);

if (!bbox_enabled && (sr_mode == 0 || sr_mode == 1)) {
  GstElement *parser_pre_recordbin =
      gst_element_factory_make ("h264parse", "parser-pre-recordbin");

  gst_bin_add_many (GST_BIN (pipeline), parser_pre_recordbin, NULL);

  if (!gst_element_link_many (tee_pre_decode, parser_pre_recordbin,
          nvdssrCtx->recordbin, NULL)) {
    g_print ("Elements not linked. Exiting. \n");
    g_main_loop_quit(loop);
  }
  gst_element_sync_state_with_parent(parser_pre_recordbin);
}

}

if (g_strrstr (name, “x-rtp”) && is_audio) {
if (!bbox_enabled && (sr_mode == 0 || sr_mode == 2)) {
GstElement *parser_pre_recordbin =
gst_element_factory_make (“parsebin”, “audio-parser-pre-recordbin”);

  gst_bin_add_many (GST_BIN (pipeline), parser_pre_recordbin, NULL);

  GstPad *sinkpad = gst_element_get_static_pad(parser_pre_recordbin, "sink");
  if (gst_pad_link(element_src_pad, sinkpad) != GST_PAD_LINK_OK) {
    g_print ("Elements not linked. Exiting. \n");
    g_main_loop_quit(loop);
  }

  g_signal_connect(G_OBJECT(parser_pre_recordbin), "pad-added", G_CALLBACK(cb_newpad_audio_parsebin), NULL);

  gst_element_sync_state_with_parent(parser_pre_recordbin);
}

}

gst_caps_unref (caps);
}

int
main (int argc, char *argv)
{
GstElement *streammux = NULL, *sink = NULL, *pgie = NULL, *source = NULL,
*nvvidconv = NULL, *nvvidconv2 = NULL, *encoder_post_osd = NULL,
*queue_pre_sink = NULL, *queue_post_osd = NULL, *parser_post_osd = NULL,
*nvosd = NULL, *tee_post_osd = NULL, *queue_pre_decode = NULL,
*depay_pre_decode = NULL, *decoder = NULL, *nvvidconv3 = NULL,
*swenc_caps = NULL;
g_print(“Hello \n”);
GstCaps *caps = NULL;
GstElement *cap_filter = NULL;

GstBus *bus = NULL;
guint bus_watch_id = 0;
guint i = 0, num_sources = 1;

guint pgie_batch_size = 0;

int current_device = -1;
cudaGetDevice(&current_device);
struct cudaDeviceProp prop;
cudaGetDeviceProperties(&prop, current_device);

GstElement *transform = NULL;
GOptionContext *gctx = NULL;
GOptionGroup *group = NULL;
GError *error = NULL;

NvDsSRInitParams params = { 0 };

gctx = g_option_context_new (“Nvidia DeepStream Test-SR app”);
group = g_option_group_new (“SR_test”, NULL, NULL, NULL, NULL);
g_option_group_add_entries (group, entries);

g_option_context_set_main_group (gctx, group);
g_option_context_add_group (gctx, gst_init_get_option_group ());

GST_DEBUG_CATEGORY_INIT (NVDS_APP, “NVDS_APP”, 0, NULL);

if (!g_option_context_parse (gctx, &argc, &argv, &error)) {
g_printerr (“%s”, error->message);
g_print (“%s”, g_option_context_get_help (gctx, TRUE, NULL));
return -1;
}

/* Check input arguments */
if (argc < 2) {
g_printerr (“Usage: %s <rtsp_h264 uri>\n”, argv[0]);
return -1;
}

if (argc > 2) {
g_printerr (“One rtsp_h264 uri supported Usage: %s <rtsp_h264 uri> \n”,
argv[0]);
return -1;
}

/* Standard GStreamer initialization */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);

/* Create gstreamer elements /
/
Create Pipeline element that will form a connection of other elements */
pipeline = gst_pipeline_new (“dstest-sr-pipeline”);

source = gst_element_factory_make (“rtspsrc”, “rtsp-source”);
g_object_set (G_OBJECT (source), “location”, argv[1], NULL);

depay_pre_decode = gst_element_factory_make (“rtph264depay”, “h264-depay”);

queue_pre_decode = gst_element_factory_make (“queue”, “queue-pre-decode”);

if (!source || !depay_pre_decode || !queue_pre_decode) {
g_printerr (“One element in source end could not be created.\n”);
return -1;
}

g_signal_connect (G_OBJECT (source), “pad-added”,
G_CALLBACK (cb_newpad), depay_pre_decode);
/* Create tee which connects decoded source data and Smart record bin without bbox */
tee_pre_decode = gst_element_factory_make (“tee”, “tee-pre-decode”);

decoder = gst_element_factory_make (“nvv4l2decoder”, “nvv4l2-decoder”);

streammux = gst_element_factory_make (“nvstreammux”, “stream-muxer”);

/* Use nvinfer to infer on batched frame. */
pgie = gst_element_factory_make (“nvinfer”, “primary-nvinference-engine”);

/* Use queue to connect to the sink after tee_post_osd element */
queue_pre_sink = gst_element_factory_make (“queue”, “queue-pre-sink”);

/* Use convertor to convert from NV12 to RGBA as required by nvosd */
nvvidconv = gst_element_factory_make (“nvvideoconvert”, “nvvideo-converter”);

/* Use convertor to convert from RGBA to CAPS filter data format */
nvvidconv2 =
gst_element_factory_make (“nvvideoconvert”, “nvvideo-converter2”);

/* Create OSD to draw on the converted RGBA buffer */
nvosd = gst_element_factory_make (“nvdsosd”, “nv-onscreendisplay”);

/* Create tee which connects to sink and Smart record bin with bbox */
tee_post_osd = gst_element_factory_make (“tee”, “tee-post-osd”);

/* Finally render the osd output */
if(prop.integrated) {
transform = gst_element_factory_make (“nvegltransform”, “nvegl-transform”);
if (!transform) {
g_printerr (“One tegra element could not be created. Exiting.\n”);
return -1;
}
}

if (sink_type == 1) {
sink = gst_element_factory_make (“fakesink”, “nvvideo-renderer”);
}
else if (sink_type == 2) {
sink = gst_element_factory_make (“nveglglessink”, “nvvideo-renderer”);
g_object_set (G_OBJECT (sink), “async”, FALSE, NULL);
}
else if (sink_type == 3) {
sink = gst_element_factory_make (“nvrtspoutsinkbin”, “nvvideo-renderer”);
g_object_set (G_OBJECT (sink), “sync”, TRUE, NULL);
g_object_set (G_OBJECT (sink), “bitrate”, 768000, NULL);
g_object_set (G_OBJECT (sink), “enc-type”, 1, NULL);
}

g_object_set (G_OBJECT (streammux), “live-source”, 1, NULL);

caps = gst_caps_from_string (“video/x-raw(memory:NVMM), format=(string)I420”);
cap_filter =
gst_element_factory_make (“capsfilter”, “src_cap_filter_nvvidconv”);
g_object_set (G_OBJECT (cap_filter), “caps”, caps, NULL);
gst_caps_unref (caps);

if (!pgie || !nvvidconv || !nvosd || !nvvidconv2 || !cap_filter
|| !tee_post_osd || !tee_pre_decode || !sink) {
g_printerr (“One element could not be created. Exiting.\n”);
return -1;
}

g_object_set (G_OBJECT (streammux), “batch-size”, num_sources, NULL);

g_object_set (G_OBJECT (streammux), “width”, MUXER_OUTPUT_WIDTH, “height”,
MUXER_OUTPUT_HEIGHT,
“batched-push-timeout”, MUXER_BATCH_TIMEOUT_USEC, NULL);

/* Configure the nvinfer element using the nvinfer config file. */
g_object_set (G_OBJECT (pgie),
“config-file-path”, “dstestsr_pgie_config.txt”, NULL);

/* Override the batch-size set in the config file with the number of sources. */
g_object_get (G_OBJECT (pgie), “batch-size”, &pgie_batch_size, NULL);
if (pgie_batch_size != num_sources) {
g_printerr
(“WARNING: Overriding infer-config batch-size (%d) with number of sources (%d)\n”,
pgie_batch_size, num_sources);
g_object_set (G_OBJECT (pgie), “batch-size”, num_sources, NULL);
}

g_object_set (G_OBJECT (nvosd), “process-mode”, OSD_PROCESS_MODE,
“display-text”, OSD_DISPLAY_TEXT, NULL);

g_object_set (G_OBJECT (sink), “qos”, 0, NULL);

/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);

/* Set up the pipeline

  • rtsp-source-> h264-depay → tee-> queue → decoder ->nvstreammux → nvinfer → nvvidconv → nvosd → nvvidconv → caps_filter → tee → queue → video-renderer
  •                                                                                                                                 |-> queue -> encoder -> parser -> recordbin
    

*/
gst_bin_add_many (GST_BIN (pipeline), source, depay_pre_decode,
tee_pre_decode, queue_pre_decode, decoder, streammux, pgie, nvvidconv,
nvosd, nvvidconv2, cap_filter, tee_post_osd, queue_pre_sink, sink, NULL);

if(prop.integrated) {
gst_bin_add (GST_BIN (pipeline), transform);
}

/* Link the elements together till decoder */
if (!gst_element_link_many (depay_pre_decode, tee_pre_decode,
queue_pre_decode, decoder, NULL)) {
g_printerr (“Elements could not be linked: 1. Exiting.\n”);
return -1;
}

/* Link decoder with streammux */
GstPad *sinkpad, *srcpad;
gchar pad_name_sink[16] = “sink_0”;
gchar pad_name_src[16] = “src”;

sinkpad = gst_element_get_request_pad (streammux, pad_name_sink);
if (!sinkpad) {
g_printerr (“Streammux request sink pad failed. Exiting.\n”);
return -1;
}

srcpad = gst_element_get_static_pad (decoder, pad_name_src);
if (!srcpad) {
g_printerr (“Decoder request src pad failed. Exiting.\n”);
return -1;
}

if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
g_printerr (“Failed to link decoder to stream muxer. Exiting.\n”);
return -1;
}

gst_object_unref (sinkpad);
gst_object_unref (srcpad);

/* Link the remaining elements of the pipeline to streammux */
if(prop.integrated) {
if (!gst_element_link_many (streammux, pgie,
nvvidconv, nvosd, nvvidconv2, cap_filter, tee_post_osd,
queue_pre_sink, transform, sink, NULL)) {
g_printerr (“Elements could not be linked. Exiting.\n”);
return -1;
}
} else {
if (!gst_element_link_many (streammux, pgie,
nvvidconv, nvosd, nvvidconv2, cap_filter, tee_post_osd,
queue_pre_sink, sink, NULL)) {
g_printerr (“Elements could not be linked. Exiting.\n”);
return -1;
}
}

/* Parameters are set before creating record bin

  • User can set additional parameters e.g recorded file path etc.
  • Refer NvDsSRInitParams structure for additional parameters
    */
    params.containerType = SMART_REC_CONTAINER;
    params.cacheSize = CACHE_SIZE_SEC;
    params.defaultDuration = SMART_REC_DEFAULT_DURATION;
    params.callback = smart_record_callback;
    params.fileNamePrefix = bbox_enabled ? “With_BBox” : “Without_BBox”;

if (NvDsSRCreate (&nvdssrCtx, &params) != NVDSSR_STATUS_OK) {
g_printerr (“Failed to create smart record bin”);
return -1;
}

gst_bin_add_many (GST_BIN (pipeline), nvdssrCtx->recordbin, NULL);

if (bbox_enabled) {
/* Encode the data from tee before recording with bbox /
if (enc_type == 0) {
/
Hardware encoder used*/
encoder_post_osd =
gst_element_factory_make (“nvv4l2h264enc”, “encoder-post-osd”);

  } else if (enc_type == 1) {
    /* Software encoder used*/

    swenc_caps =  gst_element_factory_make ("capsfilter", NULL);

    GstCaps *enc_caps = NULL;

    enc_caps = gst_caps_from_string ("video/x-h264, profile=(string)baseline");

    g_object_set (G_OBJECT (swenc_caps), "caps", enc_caps, NULL);
    gst_caps_unref (enc_caps);

    encoder_post_osd =
        gst_element_factory_make ("x264enc", "encoder-post-osd");

    nvvidconv3 = gst_element_factory_make ("nvvideoconvert", "nvvidconv3");
    gst_bin_add_many (GST_BIN (pipeline), swenc_caps, nvvidconv3, NULL);
  }

/* Parse the encoded data after osd component */
parser_post_osd = gst_element_factory_make ("h264parse", "parser-post-osd");

/* Use queue to connect the tee_post_osd to nvencoder */
queue_post_osd = gst_element_factory_make ("queue", "queue-post-osd");

gst_bin_add_many (GST_BIN (pipeline), queue_post_osd, encoder_post_osd,
    parser_post_osd, NULL);

if (enc_type == 0) {
  if (!gst_element_link_many (tee_post_osd, queue_post_osd, encoder_post_osd,
          parser_post_osd, nvdssrCtx->recordbin, NULL)) {
    g_print ("Elements not linked. Exiting. \n");
    return -1;
  }
}
else if (enc_type == 1) {
  /* Link swenc_caps and nvvidconv3 in case of software encoder*/
  if (!gst_element_link_many (tee_post_osd, nvvidconv3, queue_post_osd,
          encoder_post_osd, swenc_caps, parser_post_osd,
          nvdssrCtx->recordbin, NULL)) {
    g_print ("Elements not linked. Exiting. \n");
    return -1;
  }
}

}

if (nvdssrCtx) {
g_timeout_add (SMART_REC_INTERVAL * 1000, smart_record_event_generator,
nvdssrCtx);
}

/* Set the pipeline to “playing” state */
g_print (“Now playing:”);
g_print (" %s", argv[i + 1]);

g_print (“\n”);
gst_element_set_state (pipeline, GST_STATE_PLAYING);

/* Wait till pipeline encounters an error or EOS /
g_print (“Running…\n”);
g_main_loop_run (loop);
if (pipeline && nvdssrCtx) {
if(NvDsSRDestroy (nvdssrCtx) != NVDSSR_STATUS_OK)
g_printerr (“Unable to destroy recording instance\n”);
}
/
Out of the main loop, clean up nicely */
g_print (“Returned, stopping playback\n”);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print (“Deleting pipeline\n”);
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}

Like mentioned in the \opt\nvidia\deepstream\deepstream-6.2\sources\apps\sample_apps\deepstream-testsr\REAME.
4. Smart record needs I-frames to record videos. So if in case "No video stream found" error is encountered, it is quite possible that the from a given rtsp source, I-frames are not received by the application, for a given recording interval.Try changing the rtsp source or update the above mentioned parameters accordingly.
I think you can check the I-frames interval of the rtsp source.

when i give rtph265depay instead of rtph264depay it is working fine and is there any other alternative solution is available for this.

depay_pre_decode = gst_element_factory_make (“rtph265depay”, “h264-depay”);

Please confirm your encoding type. If input stream is h265 but use rtph264depay.I don’t think the pipeline can work well.

and also changed another rtsp source but it showing no video stream found

I have two suggestions.

1.You can dump the stream to a file to confirm the encoding type.
recommend use ffprobe file
It’s work fine for h264.If your stream is h265,try h265parse.

gst-launch-1.0 urisourcebin uri=your_rstpsource  ! queue ! parsebin ! queue  \
! h264parse ! mpegtsmux ! filesink location=rtsp.ts

2.export GST_DEBUG=3 in shell.then post the complete log to here.

Thanks.

jetson@ubuntu:~/Documents/deepstream-testsr-app$ ./deepstream-testsr-app “rtsp://admin:Sieora123@192.168.1.108:554/cam/realmonitor?channel=3&subtype=0”
Hello
Now playing: rtsp://admin:Sieora123@192.168.1.108:554/cam/realmonitor?channel=3&subtype=0

Using winsys: x11
Opening in BLOCKING MODE
0:00:00.508135424 23243 0x55929b0230 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5592993020 Failed to determine interlace mode
0:00:00.508258394 23243 0x55929b0230 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5592993020 Failed to determine interlace mode
0:00:00.508336417 23243 0x55929b0230 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5592993020 Failed to determine interlace mode
0:00:00.508411158 23243 0x55929b0230 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5592993020 Failed to determine interlace mode
0:00:00.508581004 23243 0x55929b0230 WARN v4l2 gstv4l2object.c:4476:gst_v4l2_object_probe_caps:encoder-post-osd:src Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: Unknown error -1
Opening in BLOCKING MODE
0:00:00.685998942 23243 0x55929b0230 WARN v4l2 gstv4l2object.c:4476:gst_v4l2_object_probe_caps:nvv4l2-decoder:src Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: Unknown error -1
0:00:00.686082381 23243 0x55929b0230 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5592560720 Failed to determine interlace mode
0:00:00.686152330 23243 0x55929b0230 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5592560720 Failed to determine interlace mode
0:00:00.686228789 23243 0x55929b0230 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5592560720 Failed to determine interlace mode
WARNING: [TRT]: Using an engine plan file across different models of devices is not recommended and is likely to affect performance or even cause errors.
0:00:09.913286968 23243 0x55929b0230 INFO nvinfer gstnvinfer.cpp:638:gst_nvinfer_logger: NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::deserializeEngineAndBackend() <nvdsinfer_context_impl.cpp:1900> [UID = 1]: deserialized trt engine from :/home/jetson/Documents/deepstream-testsr-app/Primary_Detector/resnet10.caffemodel_b1_gpu0_fp16.engine
INFO: [Implicit Engine Info]: layers num: 3
0 INPUT kFLOAT input_1 3x368x640
1 OUTPUT kFLOAT conv2d_bbox 16x23x40
2 OUTPUT kFLOAT conv2d_cov/Sigmoid 4x23x40

0:00:09.914791682 23243 0x55929b0230 INFO nvinfer gstnvinfer.cpp:638:gst_nvinfer_logger: NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::generateBackendContext() <nvdsinfer_context_impl.cpp:2004> [UID = 1]: Use deserialized engine model: /home/jetson/Documents/deepstream-testsr-app/Primary_Detector/resnet10.caffemodel_b1_gpu0_fp16.engine
0:00:09.948337996 23243 0x55929b0230 INFO nvinfer gstnvinfer_impl.cpp:313:notifyLoadModelStatus: [UID 1]: Load new model:dstestsr_pgie_config.txt sucessfully
Running…
0:00:10.141939343 23243 0x7f10050230 FIXME default gstutils.c:3981:gst_pad_create_stream_id_internal:fakesrc0:src Creating random stream-id, consider implementing a deterministic way of creating a stream-id
Recording started…
In cb_newpad
Failed to link depay loader to rtsp src0:00:12.737668851 23243 0x7f10050000 WARN basesrc gstbasesrc.c:3055:gst_base_src_loop: error: Internal data stream error.
0:00:12.737936408 23243 0x7f10050000 WARN basesrc gstbasesrc.c:3055:gst_base_src_loop: error: streaming stopped, reason not-linked (-1)
ERROR from element udpsrc0: Internal data stream error.
Error details: gstbasesrc.c(3055): gst_base_src_loop (): /GstPipeline:dstest-sr-pipeline/GstRTSPSrc:rtsp-source/GstUDPSrc:udpsrc0:
streaming stopped, reason not-linked (-1)
0:00:12.741211413 23243 0x559297c400 FIXME basesink gstbasesink.c:3145:gst_base_sink_default_event:<src_filesink0> stream-start event without group-id. Consider implementing group-id handling in the upstream elements
0:00:12.749184937 23243 0x55a1549ad0 WARN basesrc gstbasesrc.c:3583:gst_base_src_start_complete: pad not activated yet
0:00:12.750105839 23243 0x55a1549ad0 WARN basesrc gstbasesrc.c:3583:gst_base_src_start_complete: pad not activated yet
0:00:12.774145216 23243 0x7f18074d90 WARN qtdemux qtdemux.c:3031:qtdemux_parse_trex: failed to find fragment defaults for stream 1
0:00:12.778211850 23243 0x7f18074d90 WARN qtdemux qtdemux.c:11716:qtdemux_parse_trak: error: This file is corrupt and cannot be played.
0:00:12.778439771 23243 0x55a1549ad0 WARN discoverer gstdiscoverer.c:1473:handle_message: Got an error [debug:qtdemux.c(11716): qtdemux_parse_trak (): /GstPipeline:Discoverer/GstURIDecodeBin:discoverer-uri/GstDecodeBin:decodebin1/GstQTDemux:qtdemux0], [message:This file is corrupt and cannot be played.]
** ERROR: RunUserCallback:207: No video stream found
Returned, stopping playback
0:00:12.806796255 23243 0x5592e2e0a0 WARN rtspsrc gstrtspsrc.c:5919:gst_rtsp_src_receive_response: receive interrupted
0:00:12.807108396 23243 0x5592e2e0a0 WARN rtspsrc gstrtspsrc.c:7673:gst_rtspsrc_close: TEARDOWN interrupted
Deleting pipeline

one more doubt also i have how to define smart record event stop it it running continuously and storing the video can you give me a solution? and how to verify is this as smart record event stored video or not

can you give some idea of i-frames interval and how to check it that

Hello
Now playing: rtsp://admin:Sieora123@192.168.1.108:554/cam/realmonitor?channel=3&subtype=0

Using winsys: x11
Opening in BLOCKING MODE
0:00:00.395131628 30543 0x55878bc030 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5587774980 Failed to determine interlace mode
0:00:00.395253557 30543 0x55878bc030 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5587774980 Failed to determine interlace mode
0:00:00.395329392 30543 0x55878bc030 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5587774980 Failed to determine interlace mode
0:00:00.395399445 30543 0x55878bc030 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5587774980 Failed to determine interlace mode
0:00:00.395565750 30543 0x55878bc030 WARN v4l2 gstv4l2object.c:4476:gst_v4l2_object_probe_caps:encoder-post-osd:src Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: Unknown error -1
Opening in BLOCKING MODE
0:00:00.560506008 30543 0x55878bc030 WARN v4l2 gstv4l2object.c:4476:gst_v4l2_object_probe_caps:nvv4l2-decoder:src Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: Unknown error -1
0:00:00.560592311 30543 0x55878bc030 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5587467470 Failed to determine interlace mode
0:00:00.560671323 30543 0x55878bc030 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5587467470 Failed to determine interlace mode
0:00:00.560744554 30543 0x55878bc030 WARN v4l2 gstv4l2object.c:2388:gst_v4l2_object_add_interlace_mode:0x5587467470 Failed to determine interlace mode
WARNING: [TRT]: Using an engine plan file across different models of devices is not recommended and is likely to affect performance or even cause errors.
0:00:10.187984933 30543 0x55878bc030 INFO nvinfer gstnvinfer.cpp:638:gst_nvinfer_logger: NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::deserializeEngineAndBackend() <nvdsinfer_context_impl.cpp:1900> [UID = 1]: deserialized trt engine from :/home/jetson/Documents/deepstream-testsr-app/Primary_Detector/resnet10.caffemodel_b1_gpu0_fp16.engine
INFO: [Implicit Engine Info]: layers num: 3
0 INPUT kFLOAT input_1 3x368x640
1 OUTPUT kFLOAT conv2d_bbox 16x23x40
2 OUTPUT kFLOAT conv2d_cov/Sigmoid 4x23x40

0:00:10.189608190 30543 0x55878bc030 INFO nvinfer gstnvinfer.cpp:638:gst_nvinfer_logger: NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::generateBackendContext() <nvdsinfer_context_impl.cpp:2004> [UID = 1]: Use deserialized engine model: /home/jetson/Documents/deepstream-testsr-app/Primary_Detector/resnet10.caffemodel_b1_gpu0_fp16.engine
0:00:10.225832089 30543 0x55878bc030 INFO nvinfer gstnvinfer_impl.cpp:313:notifyLoadModelStatus: [UID 1]: Load new model:dstestsr_pgie_config.txt sucessfully
Running…
0:00:10.447095963 30543 0x7f2004a8f0 FIXME default gstutils.c:3981:gst_pad_create_stream_id_internal:fakesrc0:src Creating random stream-id, consider implementing a deterministic way of creating a stream-id
In cb_newpad
Failed to link depay loader to rtsp src0:00:13.161564346 30543 0x7f2004a370 WARN basesrc gstbasesrc.c:3055:gst_base_src_loop: error: Internal data stream error.
0:00:13.162471496 30543 0x7f2004a370 WARN basesrc gstbasesrc.c:3055:gst_base_src_loop: error: streaming stopped, reason not-linked (-1)
ERROR from element udpsrc1: Internal data stream error.
Error details: gstbasesrc.c(3055): gst_base_src_loop (): /GstPipeline:dstest-sr-pipeline/GstRTSPSrc:rtsp-source/GstUDPSrc:udpsrc1:
streaming stopped, reason not-linked (-1)
Returned, stopping playback
0:00:13.187586404 30543 0x5587d35cf0 WARN rtspsrc gstrtspsrc.c:5919:gst_rtsp_src_receive_response: receive interrupted
0:00:13.187658748 30543 0x5587d35cf0 WARN rtspsrc gstrtspsrc.c:7673:gst_rtspsrc_close: TEARDOWN interrupted
Deleting pipeline

which deep stream app is smart record event store either deepstream test5 or deepstream-testsr

dump your rtsp to file. like the way I mentioned above.Then use ffprobe

ffprobe -select_streams v -show_packets -of xml -i sample_720p.mp4 |grep K_

It will be displayed on terminal about the key frames information

Both deepstream-testsr and test5 used smart record event.
/opt/nvidia/deepstream/deepstream/sources/apps/sample_apps/deepstream-testsr/README help you use smart-record parameters

in deep stream test 5 smart record event is running continuously if i total duration of the video

[source0]
enable=1
#Type - 1=CameraV4L2 2=URI 3=MultiURI 4=RTSP
type=4
uri=rtsp://admin:Sieora123@192.168.1.108:554/cam/realmonitor?channel=3&subtype=0
num-sources=1
gpu-id=0

enable the record

nvbuf-memory-type=0

to store in local or cloud

smart-record = 2
smart-rec-container = 0
smart-rec-duration = 10

store only first five seconds

smart-rec-interval = 5
smart-rec-file-prefix=record.mp4
smart-rec-dir-path=record
smart-rec-cache=300

jetson@ubuntu:~/Documents$ ffmpeg -rtsp_transport tcp -i “rtsp://admin:Sieora123@192.168.1.108:554/cam/realmonitor?channel=3&subtype=0” -c copy output.mp4
ffmpeg version 3.4.11-0ubuntu0.1 Copyright (c) 2000-2022 the FFmpeg developers
built with gcc 7 (Ubuntu/Linaro 7.5.0-3ubuntu1~18.04)
configuration: --prefix=/usr --extra-version=0ubuntu0.1 --toolchain=hardened --libdir=/usr/lib/aarch64-linux-gnu --incdir=/usr/include/aarch64-linux-gnu --enable-gpl --disable-stripping --enable-avresample --enable-avisynth --enable-gnutls --enable-ladspa --enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca --enable-libcdio --enable-libflite --enable-libfontconfig --enable-libfreetype --enable-libfribidi --enable-libgme --enable-libgsm --enable-libmp3lame --enable-libmysofa --enable-libopenjpeg --enable-libopenmpt --enable-libopus --enable-libpulse --enable-librubberband --enable-librsvg --enable-libshine --enable-libsnappy --enable-libsoxr --enable-libspeex --enable-libssh --enable-libtheora --enable-libtwolame --enable-libvorbis --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx265 --enable-libxml2 --enable-libxvid --enable-libzmq --enable-libzvbi --enable-omx --enable-openal --enable-opengl --enable-sdl2 --enable-libdc1394 --enable-libdrm --enable-libiec61883 --enable-chromaprint --enable-frei0r --enable-libopencv --enable-libx264 --enable-shared
libavutil 55. 78.100 / 55. 78.100
libavcodec 57.107.100 / 57.107.100
libavformat 57. 83.100 / 57. 83.100
libavdevice 57. 10.100 / 57. 10.100
libavfilter 6.107.100 / 6.107.100
libavresample 3. 7. 0 / 3. 7. 0
libswscale 4. 8.100 / 4. 8.100
libswresample 2. 9.100 / 2. 9.100
libpostproc 54. 7.100 / 54. 7.100
Input #0, rtsp, from ‘rtsp://admin:Sieora123@192.168.1.108:554/cam/realmonitor?channel=3&subtype=0’:
Metadata:
title : Media Server
Duration: N/A, start: 0.040000, bitrate: N/A
Stream #0:0: Video: hevc (Main), yuv420p(tv), 960x1080, 25 fps, 25 tbr, 90k tbn, 25 tbc
Output #0, mp4, to ‘output.mp4’:
Metadata:
title : Media Server
encoder : Lavf57.83.100
Stream #0:0: Video: hevc (Main) (hev1 / 0x31766568), yuv420p(tv), 960x1080, q=2-31, 25 fps, 25 tbr, 90k tbn, 90k tbc
Stream mapping:
Stream #0:0#0:0 (copy)
Press [q] to stop, [?] for help
[mp4 @ 0x55a5faa020] Timestamps are unset in a packet for stream 0. This is deprecated and will stop working in the future. Fix your code to set the timestamps properly
[mp4 @ 0x55a5faa020] Non-monotonous DTS in output stream 0:0; previous: 0, current: 0; changing to 1. This may result in incorrect timestamps in the output file.
frame= 36 fps=0.0 q=-1.0 size= 0kB time=00:00:01.36 bitrate= 0.3kbits/frame= 49 fps= 48 q=-1.0 size= 0kB time=00:00:01.88 bitrate= 0.2kbits/frame= 62 fps= 40 q=-1.0 size= 0kB time=00:00:02.40 bitrate= 0.1kbits/frame= 75 fps= 36 q=-1.0 size= 0kB time=00:00:02.92 bitrate= 0.1kbits/frame= 88 fps= 34 q=-1.0 size= 0kB time=00:00:03.44 bitrate= 0.1kbits/frame= 101 fps= 32 q=-1.0 size= 256kB time=00:00:03.96 bitrate= 529.7kbits/frame= 114 fps= 31 q=-1.0 size= 256kB time=00:00:04.48 bitrate= 468.2kbits/frame= 127 fps= 31 q=-1.0 size= 256kB time=00:00:05.00 bitrate= 419.5kbits/frame= 140 fps= 30 q=-1.0 size= 256kB time=00:00:05.52 bitrate= 380.0kbits/frame= 153 fps= 29 q=-1.0 size= 256kB time=00:00:06.04 bitrate= 347.3kbits/frame= 166 fps= 29 q=-1.0 size= 256kB time=00:00:06.56 bitrate= 319.7kbits/frame= 179 fps= 29 q=-1.0 size= 256kB time=00:00:07.08 bitrate= 296.3kbits/frame= 192 fps= 28 q=-1.0 size= 256kB time=00:00:07.60 bitrate= 276.0kbits/frame= 205 fps= 28 q=-1.0 size= 512kB time=00:00:08.12 bitrate= 516.6kbits/frame= 218 fps= 28 q=-1.0 size= 512kB time=00:00:08.64 bitrate= 485.5kbits/frame= 231 fps= 28 q=-1.0 size= 512kB time=00:00:09.16 bitrate= 457.9kbits/frame= 244 fps= 28 q=-1.0 size= 512kB time=00:00:09.68 bitrate= 433.3kbits/frame= 257 fps= 27 q=-1.0 size= 512kB time=00:00:10.20 bitrate= 411.2kbits/frame= 270 fps= 27 q=-1.0 size= 512kB time=00:00:10.72 bitrate= 391.3kbits/frame= 283 fps= 27 q=-1.0 size= 512kB time=00:00:11.24 bitrate= 373.2kbits/frame= 296 fps= 27 q=-1.0 size= 512kB time=00:00:11.76 bitrate= 356.7kbits/frame= 309 fps= 27 q=-1.0 size= 768kB time=00:00:12.28 bitrate= 512.4kbits/frame= 322 fps= 27 q=-1.0 size= 768kB time=00:00:12.80 bitrate= 491.5kbits/frame= 335 fps= 27 q=-1.0 size= 768kB time=00:00:13.32 bitrate= 472.4kbits/frame= 348 fps= 27 q=-1.0 size= 768kB time=00:00:13.84 bitrate= 454.6kbits/frame= 361 fps= 27 q=-1.0 size= 768kB time=00:00:14.36 bitrate= 438.1kbits/frame= 374 fps= 27 q=-1.0 size= 768kB time=00:00:14.88 bitrate= 422.8kbits/frame= 387 fps= 27 q=-1.0 size= 768kB time=00:00:15.40 bitrate= 408.6kbits/frame= 400 fps= 27 q=-1.0 size= 768kB time=00:00:15.92 bitrate= 395.2kbits/frame= 413 fps= 26 q=-1.0 size= 1024kB time=00:00:16.44 bitrate= 510.3kbits/frame= 426 fps= 26 q=-1.0 size= 1024kB time=00:00:16.96 bitrate= 494.6kbits/frame= 439 fps= 26 q=-1.0 size= 1024kB time=00:00:17.48 bitrate= 479.9kbits/frame= 452 fps= 26 q=-1.0 size= 1024kB time=00:00:18.00 bitrate= 466.1kbits/frame= 465 fps= 26 q=-1.0 size= 1024kB time=00:00:18.52 bitrate= 453.0kbits/frame= 478 fps= 26 q=-1.0 size= 1024kB time=00:00:19.04 bitrate= 440.6kbits/frame= 491 fps= 26 q=-1.0 size= 1024kB time=00:00:19.56 bitrate= 428.9kbits/frame= 504 fps= 26 q=-1.0 size= 1280kB time=00:00:20.08 bitrate= 522.2kbits/frame= 517 fps= 26 q=-1.0 size= 1280kB time=00:00:20.60 bitrate= 509.0kbits/frame= 530 fps= 26 q=-1.0 size= 1280kB time=00:00:21.12 bitrate= 496.5kbits/frame= 543 fps= 26 q=-1.0 size= 1280kB time=00:00:21.64 bitrate= 484.6kbits/frame= 556 fps= 26 q=-1.0 size= 1280kB time=00:00:22.16 bitrate= 473.2kbits/frame= 569 fps= 26 q=-1.0 size= 1280kB time=00:00:22.68 bitrate= 462.4kbits/frame= 582 fps= 26 q=-1.0 size= 1280kB time=00:00:23.20 bitrate= 452.0kbits/frame= 595 fps= 26 q=-1.0 size= 1280kB time=00:00:23.72 bitrate= 442.1kbits/frame= 608 fps= 26 q=-1.0 size= 1536kB time=00:00:24.24 bitrate= 519.1kbits/frame= 621 fps= 26 q=-1.0 size= 1536kB time=00:00:24.76 bitrate= 508.2kbits/frame= 634 fps= 26 q=-1.0 size= 1536kB time=00:00:25.28 bitrate= 497.8kbits/frame= 647 fps= 26 q=-1.0 size= 1536kB time=00:00:25.80 bitrate= 487.7kbits/frame= 653 fps= 26 q=-1.0 Lsize= 1699kB time=00:00:26.04 bitrate= 534.6kbits/s speed=1.03x

1.deepstream-testsr is only used show the usage of smart-record api,start or stop event is triggered every 10s.

It’s only record stream from rtsp, no codec operation.

2.In deepstream-test5, data is encoded by nvv4l2encoder.

Smart-record will be process the encoded data to mp4/mkv.

actually i could not link rtsp source into rtph264depay

like getting this error

Running…
0:00:10.447095963 30543 0x7f2004a8f0 FIXME default gstutils.c:3981:gst_pad_create_stream_id_internal:fakesrc0:src Creating random stream-id, consider implementing a deterministic way of creating a stream-id
In cb_newpad
Failed to link depay loader to rtsp src0:00:13.161564346 30543 0x7f2004a370 WARN basesrc gstbasesrc.c:3055:gst_base_src_loop: error: Internal data stream error.
0:00:13.162471496 30543 0x7f2004a370 WARN basesrc gstbasesrc.c:3055:gst_base_src_loop: error: streaming stopped, reason not-linked (-1)
ERROR from element udpsrc1: Internal data stream error.
Error details: gstbasesrc.c(3055): gst_base_src_loop (): /GstPipeline:dstest-sr-pipeline/GstRTSPSrc:rtsp-source/GstUDPSrc:udpsrc1:
streaming stopped, reason not-linked (-1)
Returned, stopping playback
0:00:13.187586404 30543 0x5587d35cf0 WARN rtspsrc gstrtspsrc.c:5919:gst_rtsp_src_receive_response: receive interrupted
0:00:13.187658748 30543 0x5587d35cf0 WARN rtspsrc gstrtspsrc.c:7673:gst_rtspsrc_close: TEARDOWN interrupted
Deleting pipeline

There is no update from you for a period,
assuming this is not an issue anymore.
Hence we are closing this topic.
If need further support, please open a new one. Thanks

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.