Hello Nvs,
I am using docker with container deepstream:6.2-level on DGPU 2080Ti.
I have a pipline that rtsp out, this pipline works well
// main function
int main(int argc, char *argv[])
{
GMainLoop *loop = NULL;
// Create various elements
GstElement *pipeline = NULL, *source = NULL, *streammux = NULL, *pgie = NULL, *nvtracker = NULL, *nvvidconv = NULL,
*nvosd = NULL, *nvvidconv_postosd = NULL, *caps = NULL, *encoder = NULL, *rtppay = NULL, *sink = NULL;
g_print("With tracker\n");
GstBus *bus = NULL;
guint bus_watch_id = 0;
GstPad *osd_sink_pad = NULL;
GstCaps *caps_filter = NULL;
guint bitrate = 5000000;
gchar *codec = "H264";
guint updsink_port_num = 5400;
guint rtsp_port_num = 8554;
gchar *rtsp_path = "/ds-test";
int current_device = -1;
cudaGetDevice(¤t_device);
struct cudaDeviceProp prop;
cudaGetDeviceProperties(&prop, current_device);
/* Check input arguments */
if (argc != 2)
{
g_printerr("Usage: %s <H264 filename>\n", argv[0]);
return -1;
}
/* Standard GStreamer initialization */
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
// Create a GStreamer pipeline
pipeline = gst_pipeline_new("ds-tracker-pipeline");
// Create source element
source = create_source_bin(0, argv[1]);
// Create stream multiplexer element
streammux = gst_element_factory_make("nvstreammux", "stream-muxer");
// Create primary inference engine element
pgie = gst_element_factory_make("nvinfer", "primary-nvinference-engine");
// Create object tracker element
nvtracker = gst_element_factory_make("nvtracker", "tracker");
// Create video converter element
nvvidconv = gst_element_factory_make("nvvideoconvert", "nvvideo-converter");
// Create on-screen display element
nvosd = gst_element_factory_make("nvdsosd", "nv-onscreendisplay");
// Create post OSD video converter element
nvvidconv_postosd = gst_element_factory_make("nvvideoconvert", "convertor_postosd");
// Create caps filter element
caps = gst_element_factory_make("capsfilter", "filter");
// Create video encoder element based on codec
if (g_strcmp0(codec, "H264") == 0)
{
encoder = gst_element_factory_make("nvv4l2h264enc", "encoder");
printf("Creating H264 Encoder\n");
}
else if (g_strcmp0(codec, "H265") == 0)
{
encoder = gst_element_factory_make("nvv4l2h265enc", "encoder");
printf("Creating H265 Encoder\n");
}
// Create RTP payloader element based on codec
if (g_strcmp0(codec, "H264") == 0)
{
rtppay = gst_element_factory_make("rtph264pay", "rtppay");
printf("Creating H264 rtppay\n");
}
else if (g_strcmp0(codec, "H265") == 0)
{
rtppay = gst_element_factory_make("rtph265pay", "rtppay");
printf("Creating H265 rtppay\n");
}
// Create UDP sink element
sink = gst_element_factory_make("udpsink", "udpsink");
if (!source || !pgie || !nvtracker || !nvvidconv || !nvosd || !nvvidconv_postosd ||
!caps || !encoder || !rtppay || !sink)
{
g_printerr("One element could not be created. Exiting.\n");
return -1;
}
// Set properties of the stream multiplexer
g_object_set(G_OBJECT(streammux), "batch-size", 1, NULL);
g_object_set(G_OBJECT(streammux), "width", MUXER_OUTPUT_WIDTH, "height",
MUXER_OUTPUT_HEIGHT,
"batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
// Set configuration file path for the primary inference engine
g_object_set(G_OBJECT(pgie), "config-file-path", PGIE_CONFIG_FILE, NULL);
// Set properties for the object tracker
set_tracker_properties(nvtracker);
// Set caps for the caps filter
caps_filter = gst_caps_from_string("video/x-raw(memory:NVMM), format=I420, width=1920, height=1080");
g_object_set(G_OBJECT(caps), "caps", caps_filter, NULL);
gst_caps_unref(caps_filter);
// Set bitrate for the video encoder
g_object_set(G_OBJECT(encoder), "bitrate", bitrate, NULL);
// Additional settings for Jetson Xavier (Aarch64)
if (is_aarch64())
{
g_object_set(G_OBJECT(encoder), "preset-level", 1, NULL);
g_object_set(G_OBJECT(encoder), "insert-sps-pps", 1, NULL);
}
// Set properties for the UDP sink
g_object_set(G_OBJECT(sink), "host", "224.224.255.255", NULL);
g_object_set(G_OBJECT(sink), "port", updsink_port_num, NULL);
g_object_set(G_OBJECT(sink), "async", FALSE, NULL);
g_object_set(G_OBJECT(sink), "sync", 1, NULL);
// Create a bus for the pipeline
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
// Add all elements to the pipeline
gst_bin_add_many(GST_BIN(pipeline),
source, streammux, pgie, nvtracker,
nvvidconv, nvosd, nvvidconv_postosd, caps, encoder, rtppay, sink, NULL);
GstPad *sinkpad, *srcpad;
gchar pad_name_sink[16] = "sink_0";
gchar pad_name_src[16] = "src";
// Link source pad to sink pad
sinkpad = gst_element_get_request_pad(streammux, pad_name_sink);
srcpad = gst_element_get_static_pad(source, pad_name_src);
if (gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK)
{
g_printerr("Failed to link decoder to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref(sinkpad);
gst_object_unref(srcpad);
// Link all remaining elements in the pipeline
if (!gst_element_link_many(streammux, pgie, nvtracker,
nvvidconv, nvosd, nvvidconv_postosd, caps, encoder, rtppay, sink, NULL))
{
g_printerr("Elements could not be linked. Exiting.\n");
return -1;
}
osd_sink_pad = gst_element_get_static_pad(nvosd, "sink");
if (!osd_sink_pad)
g_print("Unable to get sink pad\n");
else
// Add a probe to the OSD sink pad
gst_pad_add_probe(osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER, osd_sink_pad_buffer_probe, NULL, NULL);
g_timeout_add(5000, perf_print_callback, &g_perf_data);
gst_object_unref(osd_sink_pad);
// Create an RTSP server
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
server = gst_rtsp_server_new();
g_object_set(G_OBJECT(server), "service", g_strdup_printf("%d", rtsp_port_num), NULL);
gst_rtsp_server_attach(server, NULL);
mounts = gst_rtsp_server_get_mount_points(server);
factory = gst_rtsp_media_factory_new();
// Set the launch string for the RTSP media factory
gst_rtsp_media_factory_set_launch(factory, g_strdup_printf("( udpsrc name=pay0 port=%d buffer-size=524288 caps=\"application/x-rtp, media=video, clock-rate=90000, encoding-name=(string)%s, payload=96 \" )", updsink_port_num, codec));
gst_rtsp_media_factory_set_shared(factory, TRUE);
gst_rtsp_mount_points_add_factory(mounts, rtsp_path, factory);
g_object_unref(mounts);
printf("\n *** DeepStream: Launched RTSP Streaming at rtsp://localhost:%d%s ***\n\n", rtsp_port_num, rtsp_path);
/* Set the pipeline to "playing" state */
g_print("Using file: %s\n", argv[1]);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print("Running...\n");
g_main_loop_run(loop);
/* Out of the main loop, clean up nicely */
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
g_source_remove(bus_watch_id);
g_main_loop_unref(loop);
return 0;
}
Then I try to change the sink to file sink, I change this this sink like below,
// Create UDP sink element
// sink = gst_element_factory_make("udpsink", "udpsink");
// Change sink to filesink to save the output to a file
sink = gst_element_factory_make("filesink", "filesink");
if (!sink) {
g_printerr("Not all elements could be created.\n");
return -1;
}
g_object_set(G_OBJECT(sink), "location", "output_video.mp4", NULL);
My pipline can still work
0:00:03.519207665 2899760 0x55a975f67440 INFO nvinfer gstnvinfer.cpp:680:gst_nvinfer_logger:<primary-nvinference-engine> NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::generateBackendContext() <nvdsinfer_context_impl.cpp:2012> [UID = 1]: Use deserialized engine model: /app/Deepstream-YOLOv8-Pose-With-Tracker/configs/yolov8s-pose.onnx_b1_gpu0_fp16.engine
0:00:03.608014406 2899760 0x55a975f67440 INFO nvinfer gstnvinfer_impl.cpp:328:notifyLoadModelStatus:<primary-nvinference-engine> [UID 1]: Load new model:configs/dstest1_pgie_YOLOv8-Pose_config.txt sucessfully
Decodebin child added: source
Decodebin child added: decodebin0
Running...
Decodebin child added: qtdemux0
Decodebin child added: multiqueue0
Decodebin child added: h264parse0
Decodebin child added: capsfilter0
Decodebin child added: aacparse0
Decodebin child added: avdec_aac0
Decodebin child added: nvv4l2decoder0
In cb_newpad
gstname=video/x-raw
features=memory:NVMM
In cb_newpad
gstname=audio/x-raw
FPS: 0.00
FPS: 29.99
FPS: 29.98
FPS: 29.99
FPS: 29.97
FPS: 29.97
FPS: 29.97
FPS: 30.18
FPS: 29.97
FPS: 29.97
nvstreammux: Successfully handled EOS for source_id=0
End of stream
Returned, stopping playback
[NvMultiObjectTracker] De-initialized
Deleting pipeline
But I cannot open vedio I create, Could you help look this problem?