• Hardware Platform (Jetson / GPU)
Jetson Orin Nano 8GB
• DeepStream Version
7.1
• JetPack Version (valid for Jetson only)
6.1
• TensorRT Version
10.3
• Camera: Intel RealSense d435i
Running v4l2-ctl --list-formats-ext --device=/dev/video4
returns, among the other things, this:
ioctl: VIDIOC_ENUM_FMT
Type: Video Capture
[0]: 'YUYV' (YUYV 4:2:2)
[...]
Size: Discrete 640x480
Interval: Discrete 0.017s (60.000 fps)
Interval: Discrete 0.033s (30.000 fps)
Interval: Discrete 0.067s (15.000 fps)
Interval: Discrete 0.167s (6.000 fps)
[...]
The camera works as it should, I used it on multiple occasions
• Question
Hello, I’m trying to get a camera feed instead of a file source for my pipeline, based on the example deepstream-test-4.c
.
The original pipeline looks like this
file-source -> h264-parser -> nvh264-decoder -> nvstreammux -> [...]
and the one I wanted to achieve is this
v4l2src -> caps_v4l2src -> vidconvsrc -> nvvidconvsrc -> caps_vidconvsrc -> [...]
Please correct me if there’s anything wrong
- There’s a
nvstreammux
is because in the near future I plan to add multiple cameras - I added a converter because that plugin takes as input NV12 or RGBA sources, and my camera has a YUV2 color space
I’ll attach the main function of my code, assume the flag USE_URI
is set to false.
int
main (int argc, char *argv[])
{
GMainLoop *loop = NULL;
GstElement *pipeline = NULL, *source = NULL, *sink = NULL, *pgie = NULL,
*nvvidconv = NULL, *nvosd = NULL, *nvstreammux = NULL, *nvtracker = NULL;
#ifdef USE_URI
GstElement *h264parser = NULL, *decoder = NULL;
#else
GstElement *caps_v4l2src = NULL, *vidconvsrc = NULL, *nvvidconvsrc = NULL, *caps_vidconvsrc = NULL;
#endif
GstElement *msgconv = NULL, *msgbroker = NULL, *tee = NULL;
GstElement *queue1 = NULL, *queue2 = NULL;
GstBus *bus = NULL;
guint bus_watch_id;
GstPad *osd_sink_pad = NULL;
GstPad *tee_render_pad = NULL;
GstPad *tee_msg_pad = NULL;
GstPad *sink_pad = NULL;
GstPad *src_pad = NULL;
GstPad *pgie_src_pad = NULL;
GOptionContext *ctx = NULL;
GOptionGroup *group = NULL;
GError *error = NULL;
NvDsGieType pgie_type = NVDS_GIE_PLUGIN_INFER;
int current_device = -1;
cudaGetDevice (¤t_device);
struct cudaDeviceProp prop;
cudaGetDeviceProperties (&prop, current_device);
ctx = g_option_context_new ("Nvidia DeepStream Test4");
group = g_option_group_new ("test4", NULL, NULL, NULL, NULL);
g_option_group_add_entries (group, entries);
g_option_context_set_main_group (ctx, group);
g_option_context_add_group (ctx, gst_init_get_option_group ());
if (!g_option_context_parse (ctx, &argc, &argv, &error)) {
g_option_context_free (ctx);
g_printerr ("%s", error->message);
return -1;
}
g_option_context_free (ctx);
if (!proto_lib || !input_file) {
if (argc > 1 && !IS_YAML (argv[1])) {
g_printerr ("missing arguments\n");
g_printerr ("Usage: %s <yml file>\n", argv[0]);
g_printerr
("Usage: %s -i <H264 filename> -p <Proto adaptor library> --conn-str=<Connection string>\n",
argv[0]);
return -1;
} else if (!argv[1]) {
g_printerr ("missing arguments\n");
g_printerr ("Usage: %s <yml file>\n", argv[0]);
g_printerr
("Usage: %s -i <H264 filename> -p <Proto adaptor library> --conn-str=<Connection string>\n",
argv[0]);
return -1;
}
}
loop = g_main_loop_new (NULL, FALSE);
/* Parse inference plugin type */
if (argc > 1 && IS_YAML(argv[1])) {
g_print("%s", argv[1]);
RETURN_ON_PARSER_ERROR(nvds_parse_gie_type(&pgie_type, argv[1],
"primary-gie"));
}
/* Create gstreamer elements */
/* Create Pipeline element that will form a connection of other elements */
pipeline = gst_pipeline_new ("dstest4-pipeline");
#ifdef USE_URI
/* Source element for reading from the file */
source = gst_element_factory_make ("filesrc", "file-source");
/* Since the data format in the input file is elementary h264 stream,
* we need a h264parser */
h264parser = gst_element_factory_make ("h264parse", "h264-parser");
/* Use nvdec_h264 for hardware accelerated decode on GPU */
decoder = gst_element_factory_make ("nvv4l2decoder", "nvv4l2-decoder");
#else
caps_v4l2src = gst_element_factory_make("capsfilter", "v4l2src_caps");
vidconvsrc = gst_element_factory_make("v4l2src", "usb-cam-source");
nvvidconvsrc = gst_element_factory_make("nvvideoconvert", "convertor_src2");
caps_vidconvsrc = gst_element_factory_make("capsfilter", "nvmm_caps");
if (!caps_v4l2src || !vidconvsrc || !nvvidconvsrc || !caps_vidconvsrc) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
#endif
nvstreammux = gst_element_factory_make ("nvstreammux", "nvstreammux");
/* Use nvinfer or nvinferserver to run inferencing on decoder's output,
* behaviour of inferencing is set through config file */
if (pgie_type == NVDS_GIE_PLUGIN_INFER_SERVER) {
pgie = gst_element_factory_make ("nvinferserver", "primary-nvinference-engine");
} else {
pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");
}
/* We need to have a tracker to track the identified objects */
nvtracker = gst_element_factory_make ("nvtracker", "tracker");
/* Use convertor to convert from NV12 to RGBA as required by nvosd */
nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");
/* Create OSD to draw on the converted RGBA buffer */
nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");
/* Create msg converter to generate payload from buffer metadata */
msgconv = gst_element_factory_make ("nvmsgconv", "nvmsg-converter");
/* Create msg broker to send payload to server */
msgbroker = gst_element_factory_make ("nvmsgbroker", "nvmsg-broker");
/* Create tee to render buffer and send message simultaneously */
tee = gst_element_factory_make ("tee", "nvsink-tee");
/* Create queues */
queue1 = gst_element_factory_make ("queue", "nvtee-que1");
queue2 = gst_element_factory_make ("queue", "nvtee-que2");
/* Finally render the osd output */
if (display_off) {
sink = gst_element_factory_make ("fakesink", "nvvideo-renderer");
} else if (prop.integrated) {
sink = gst_element_factory_make ("nv3dsink", "nv3d-sink");
} else {
#ifdef __aarch64__
sink = gst_element_factory_make ("nv3dsink", "nvvideo-renderer");
#else
sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
#endif
}
#ifdef USE_URI
if (!pipeline || !source || !h264parser || !decoder || !nvstreammux || !pgie
|| !nvtracker || !nvvidconv || !nvosd || !msgconv || !msgbroker || !tee
|| !queue1 || !queue2 || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
#endif
if (!set_tracker_properties(nvtracker)) {
g_printerr ("Failed to set tracker properties. Exiting.\n");
return -1;
}
if (argc > 1 && IS_YAML (argv[1])) {
// nvds_parse_*: GstElement * element, gchar * cfg_file_path, const char * group
#ifdef USE_URI
RETURN_ON_PARSER_ERROR(nvds_parse_file_source(source, argv[1], "sourceuri"));
#else
// caps_v4l2src.set_property('caps', Gst.Caps.from_string("video/x-raw, framerate=30/1"))
GstCaps *caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "YUY2",
"width", G_TYPE_INT, 640,
"height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 30, 1,
NULL);
g_object_set(G_OBJECT(caps_v4l2src), "caps", caps, NULL);
// caps_vidconvsrc.set_property('caps', Gst.Caps.from_string("video/x-raw(memory:NVMM)"))
GstCaps *caps1 = gst_caps_new_simple("video/x-raw",
"memory", G_TYPE_STRING, "NVMM",
NULL);
g_object_set(G_OBJECT(caps_vidconvsrc), "caps", caps1, NULL);
#endif
RETURN_ON_PARSER_ERROR(nvds_parse_streammux(nvstreammux, argv[1], "streammux"));
RETURN_ON_PARSER_ERROR(nvds_parse_gie(pgie, argv[1], "primary-gie"));
g_object_set (G_OBJECT (msgconv), "config", "dstest4_msgconv_config.yml",
NULL);
RETURN_ON_PARSER_ERROR(nvds_parse_msgconv (msgconv, argv[1], "msgconv"));
RETURN_ON_PARSER_ERROR(nvds_parse_msgbroker(msgbroker, argv[1], "msgbroker"));
RETURN_ON_PARSER_ERROR(nvds_parse_tracker(nvtracker, argv[1], "tracker"));
if (display_off) {
RETURN_ON_PARSER_ERROR(nvds_parse_fake_sink (sink, argv[1], "sink"));
}
else if (prop.integrated) {
RETURN_ON_PARSER_ERROR(nvds_parse_3d_sink (sink, argv[1], "sink"));
}
else {
#ifdef __aarch64__
RETURN_ON_PARSER_ERROR(nvds_parse_3d_sink (sink, argv[1], "sink"));
#else
RETURN_ON_PARSER_ERROR(nvds_parse_egl_sink (sink, argv[1], "sink"));
#endif
}
msg2p_meta = ds_test4_parse_meta_type(argv[1], "msgconv");
g_print("msg2p_meta = %d\n", msg2p_meta);
} else {
/* we set the input filename to the source element */
#ifdef USE_URI
g_object_set (G_OBJECT (source), "location", input_file, NULL);
#else
#endif
g_object_set (G_OBJECT (nvstreammux), "batch-size", 1, NULL);
g_object_set (G_OBJECT (nvstreammux), "width", MUXER_OUTPUT_WIDTH, "height",
MUXER_OUTPUT_HEIGHT,
"batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
/* Set all the necessary properties of the nvinfer element,
* the necessary ones are : */
g_object_set (G_OBJECT (pgie), "config-file-path", PGIE_CONFIG_FILE, NULL);
g_object_set (G_OBJECT (msgconv), "config", MSCONV_CONFIG_FILE, NULL);
g_object_set (G_OBJECT (msgconv), "payload-type", schema_type, NULL);
g_object_set (G_OBJECT (msgconv), "msg2p-newapi", msg2p_meta, NULL);
g_object_set (G_OBJECT (msgconv), "frame-interval", frame_interval, NULL);
g_object_set (G_OBJECT (msgbroker), "proto-lib", proto_lib,
"conn-str", conn_str, "sync", FALSE, NULL);
if (topic) {
g_object_set (G_OBJECT (msgbroker), "topic", topic, NULL);
}
if (cfg_file) {
g_object_set (G_OBJECT (msgbroker), "config", cfg_file, NULL);
}
g_object_set (G_OBJECT (sink), "sync", TRUE, NULL);
}
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* Set up the pipeline */
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (pipeline),
source, nvstreammux, pgie, nvtracker,
nvvidconv, nvosd, tee, queue1, queue2, msgconv, msgbroker, sink, NULL);
#ifdef USE_URI
gst_bin_add_many(GST_BIN (pipeline), h264parser, decoder, NULL);
/* we link the elements together */
/* file-source -> h264-parser -> nvh264-decoder -> nvstreammux ->
* pgie -> nvvidconv -> nvosd -> tee -> video-renderer
* |
* |-> msgconv -> msgbroker */
sink_pad = gst_element_request_pad_simple (nvstreammux, "sink_0");
if (!sink_pad) {
g_printerr ("Streammux request sink pad failed. Exiting.\n");
return -1;
}
src_pad = gst_element_get_static_pad (decoder, "src");
if (!src_pad) {
g_printerr ("Decoder request src pad failed. Exiting.\n");
return -1;
}
if (gst_pad_link (src_pad, sink_pad) != GST_PAD_LINK_OK) {
g_printerr ("Failed to link decoder to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref (sink_pad);
gst_object_unref (src_pad);
if (!gst_element_link_many (source, h264parser, decoder, NULL)) {
g_printerr ("Elements could not be linked. Exiting.\n");
return -1;
}
#else
gst_bin_add_many(GST_BIN (pipeline), caps_v4l2src, vidconvsrc, nvvidconvsrc, caps_vidconvsrc, NULL);
sink_pad = gst_element_request_pad_simple (nvstreammux, "sink_0");
if (!sink_pad) {
g_printerr ("Streammux request sink pad failed. Exiting.\n");
return -1;
}
src_pad = gst_element_get_static_pad (caps_vidconvsrc, "src");
if (gst_pad_link (src_pad, sink_pad) != GST_PAD_LINK_OK) {
g_printerr ("Failed to link caps_vidconvsrc to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref (sink_pad);
gst_object_unref (src_pad);
if (!gst_element_link_many (source, caps_v4l2src, vidconvsrc, nvvidconvsrc, NULL)) {
g_printerr ("Elements (source, caps_v4l2src, vidconvsrc, nvvidconvsrc) could not be linked. Exiting.\n");
return -1;
}
#endif
if (!gst_element_link_many (nvstreammux, pgie, nvtracker, nvvidconv, nvosd, tee, NULL)) {
g_printerr ("Elements could not be linked. Exiting.\n");
return -1;
}
if (!gst_element_link_many (queue1, msgconv, msgbroker, NULL)) {
g_printerr ("Elements could not be linked. Exiting.\n");
return -1;
}
if (!gst_element_link (queue2, sink)) {
g_printerr ("Elements could not be linked. Exiting.\n");
return -1;
}
sink_pad = gst_element_get_static_pad (queue1, "sink");
tee_msg_pad = gst_element_request_pad_simple (tee, "src_%u");
tee_render_pad = gst_element_request_pad_simple (tee, "src_%u");
if (!tee_msg_pad || !tee_render_pad) {
g_printerr ("Unable to get request pads\n");
return -1;
}
if (gst_pad_link (tee_msg_pad, sink_pad) != GST_PAD_LINK_OK) {
g_printerr ("Unable to link tee and message converter\n");
gst_object_unref (sink_pad);
return -1;
}
gst_object_unref (sink_pad);
sink_pad = gst_element_get_static_pad (queue2, "sink");
if (gst_pad_link (tee_render_pad, sink_pad) != GST_PAD_LINK_OK) {
g_printerr ("Unable to link tee and render\n");
gst_object_unref (sink_pad);
return -1;
}
gst_object_unref (sink_pad);
pgie_src_pad = gst_element_get_static_pad (pgie, "src");
/* Create Context for Object Encoding.
* Takes GPU ID as a parameter. Passed by user through commandline.
* Initialized as 0. */
NvDsObjEncCtxHandle obj_ctx_handle = nvds_obj_enc_create_context (0);
if (!obj_ctx_handle) {
g_print ("Unable to create context\n");
return -1;
}
if (!pgie_src_pad)
g_print ("Unable to get src pad\n");
else
gst_pad_add_probe (pgie_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
pgie_src_pad_buffer_probe, (gpointer) obj_ctx_handle, NULL);
gst_object_unref (pgie_src_pad);
/* Lets add probe to get informed of the meta data generated, we add probe to
* the sink pad of the osd element, since by that time, the buffer would have
* had got all the metadata. */
osd_sink_pad = gst_element_get_static_pad (nvosd, "sink");
if (!osd_sink_pad)
g_print ("Unable to get sink pad\n");
else {
if (msg2p_meta == 0) { //generate payload using eventMsgMeta
// g_print ("msg2p_meta == 0\n");
gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
osd_sink_pad_buffer_metadata_probe, NULL, NULL);
} else { //generate payload using NVDS_CUSTOM_MSG_BLOB
// g_print ("\nmsg2p_meta == 1, in teoria dovrebbe inviare\n");
gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
osd_sink_pad_buffer_image_probe, (gpointer) obj_ctx_handle, NULL);
}
}
gst_object_unref (osd_sink_pad);
/* Set the pipeline to "playing" state */
if (argc > 1 && IS_YAML (argv[1])) {
g_print ("Using file: %s\n", argv[1]);
} else {
g_print ("Now playing: %s\n", input_file);
}
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait till pipeline encounters an error or EOS */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
nvds_obj_enc_destroy_context (obj_ctx_handle);
g_free (cfg_file);
g_free (input_file);
g_free (topic);
g_free (conn_str);
g_free (proto_lib);
/* Release the request pads from the tee, and unref them */
gst_element_release_request_pad (tee, tee_msg_pad);
gst_element_release_request_pad (tee, tee_render_pad);
gst_object_unref (tee_msg_pad);
gst_object_unref (tee_render_pad);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
Compiling with the environment variable GST_DEBUG=4
returns this
(deepstream-test4-app:129494): GStreamer-CRITICAL **: 16:12:37.054: gst_bin_add_many: assertion 'GST_IS_ELEMENT (element_1)' failed
0:00:00.347153760 129494 0xaaaae90b2400 INFO GST_ELEMENT_PADS gstelement.c:1013:gst_element_get_static_pad: no such pad 'sink_0' in element "nvstreammux"
0:00:00.347223712 129494 0xaaaae90b2400 INFO GST_ELEMENT_PADS gstelement.c:759:gst_element_add_pad:<nvstreammux> adding pad 'sink_0'
0:00:00.347254816 129494 0xaaaae90b2400 INFO GST_ELEMENT_PADS gstelement.c:1016:gst_element_get_static_pad: found pad nvmm_caps:src
0:00:00.347282880 129494 0xaaaae90b2400 INFO GST_PADS gstpad.c:2382:gst_pad_link_prepare: trying to link nvmm_caps:src and nvstreammux:sink_0
0:00:00.347295296 129494 0xaaaae90b2400 INFO GST_PADS gstpad.c:2432:gst_pad_link_prepare: pads have wrong hierarchy
0:00:00.347308000 129494 0xaaaae90b2400 INFO GST_PADS gstpad.c:2532:gst_pad_link_full: link between nvmm_caps:src and nvstreammux:sink_0 failed: wrong hierarchy
Failed to link caps_vidconvsrc to stream muxer. Exiting.
Any advice on how to proceed?