How to add a v4l2 source?

• Hardware Platform (Jetson / GPU)
Jetson Orin Nano 8GB
• DeepStream Version
7.1
• JetPack Version (valid for Jetson only)
6.1
• TensorRT Version
10.3
• Camera: Intel RealSense d435i
Running v4l2-ctl --list-formats-ext --device=/dev/video4 returns, among the other things, this:

ioctl: VIDIOC_ENUM_FMT
	Type: Video Capture

	[0]: 'YUYV' (YUYV 4:2:2)
        [...]
		Size: Discrete 640x480
			Interval: Discrete 0.017s (60.000 fps)
			Interval: Discrete 0.033s (30.000 fps)
			Interval: Discrete 0.067s (15.000 fps)
			Interval: Discrete 0.167s (6.000 fps)
        [...]

The camera works as it should, I used it on multiple occasions

• Question
Hello, I’m trying to get a camera feed instead of a file source for my pipeline, based on the example deepstream-test-4.c.

The original pipeline looks like this

file-source -> h264-parser -> nvh264-decoder -> nvstreammux -> [...]

and the one I wanted to achieve is this

v4l2src -> caps_v4l2src -> vidconvsrc -> nvvidconvsrc -> caps_vidconvsrc -> [...]

Please correct me if there’s anything wrong

  • There’s a nvstreammux is because in the near future I plan to add multiple cameras
  • I added a converter because that plugin takes as input NV12 or RGBA sources, and my camera has a YUV2 color space

I’ll attach the main function of my code, assume the flag USE_URI is set to false.

int
main (int argc, char *argv[])
{
  GMainLoop *loop = NULL;
  GstElement *pipeline = NULL, *source = NULL, *sink = NULL, *pgie = NULL,
  *nvvidconv = NULL, *nvosd = NULL, *nvstreammux = NULL, *nvtracker = NULL;
  
  #ifdef USE_URI
  GstElement *h264parser = NULL, *decoder = NULL; 
  #else
  GstElement *caps_v4l2src = NULL, *vidconvsrc = NULL, *nvvidconvsrc = NULL, *caps_vidconvsrc = NULL;
  #endif

  GstElement *msgconv = NULL, *msgbroker = NULL, *tee = NULL;
  GstElement *queue1 = NULL, *queue2 = NULL;
  GstBus *bus = NULL;
  guint bus_watch_id;
  GstPad *osd_sink_pad = NULL;
  GstPad *tee_render_pad = NULL;
  GstPad *tee_msg_pad = NULL;
  GstPad *sink_pad = NULL;
  GstPad *src_pad = NULL;
  GstPad *pgie_src_pad = NULL;
  GOptionContext *ctx = NULL;
  GOptionGroup *group = NULL;
  GError *error = NULL;
  NvDsGieType pgie_type = NVDS_GIE_PLUGIN_INFER;

  int current_device = -1;
  cudaGetDevice (&current_device);
  struct cudaDeviceProp prop;
  cudaGetDeviceProperties (&prop, current_device);

  ctx = g_option_context_new ("Nvidia DeepStream Test4");
  group = g_option_group_new ("test4", NULL, NULL, NULL, NULL);
  g_option_group_add_entries (group, entries);

  g_option_context_set_main_group (ctx, group);
  g_option_context_add_group (ctx, gst_init_get_option_group ());


  if (!g_option_context_parse (ctx, &argc, &argv, &error)) {
    g_option_context_free (ctx);
    g_printerr ("%s", error->message);
    return -1;
  }
  g_option_context_free (ctx);

  if (!proto_lib || !input_file) {
    if (argc > 1 && !IS_YAML (argv[1])) {
      g_printerr ("missing arguments\n");
      g_printerr ("Usage: %s <yml file>\n", argv[0]);
      g_printerr
          ("Usage: %s -i <H264 filename> -p <Proto adaptor library> --conn-str=<Connection string>\n",
          argv[0]);
      return -1;
    } else if (!argv[1]) {
      g_printerr ("missing arguments\n");
      g_printerr ("Usage: %s <yml file>\n", argv[0]);
      g_printerr
          ("Usage: %s -i <H264 filename> -p <Proto adaptor library> --conn-str=<Connection string>\n",
          argv[0]);
      return -1;
    }
  }

  loop = g_main_loop_new (NULL, FALSE);

  /* Parse inference plugin type */
  if (argc > 1 && IS_YAML(argv[1])) {
    g_print("%s", argv[1]);
    RETURN_ON_PARSER_ERROR(nvds_parse_gie_type(&pgie_type, argv[1],
                "primary-gie"));
  }

  /* Create gstreamer elements */
  /* Create Pipeline element that will form a connection of other elements */
  pipeline = gst_pipeline_new ("dstest4-pipeline");
  
  #ifdef USE_URI
  /* Source element for reading from the file */
  source = gst_element_factory_make ("filesrc", "file-source");

  /* Since the data format in the input file is elementary h264 stream,
  * we need a h264parser */
  h264parser = gst_element_factory_make ("h264parse", "h264-parser");

  /* Use nvdec_h264 for hardware accelerated decode on GPU */
  decoder = gst_element_factory_make ("nvv4l2decoder", "nvv4l2-decoder");
  #else

  caps_v4l2src = gst_element_factory_make("capsfilter", "v4l2src_caps");
  vidconvsrc = gst_element_factory_make("v4l2src", "usb-cam-source");
  nvvidconvsrc = gst_element_factory_make("nvvideoconvert", "convertor_src2");
  caps_vidconvsrc = gst_element_factory_make("capsfilter", "nvmm_caps");

  if (!caps_v4l2src || !vidconvsrc || !nvvidconvsrc || !caps_vidconvsrc) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }
  
  #endif

  nvstreammux = gst_element_factory_make ("nvstreammux", "nvstreammux");

  /* Use nvinfer or nvinferserver to run inferencing on decoder's output,
   * behaviour of inferencing is set through config file */
  if (pgie_type == NVDS_GIE_PLUGIN_INFER_SERVER) {
    pgie = gst_element_factory_make ("nvinferserver", "primary-nvinference-engine");
  } else {
    pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");
  }

  /* We need to have a tracker to track the identified objects */
  nvtracker = gst_element_factory_make ("nvtracker", "tracker");

  /* Use convertor to convert from NV12 to RGBA as required by nvosd */
  nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");

  /* Create OSD to draw on the converted RGBA buffer */
  nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");

  /* Create msg converter to generate payload from buffer metadata */
  msgconv = gst_element_factory_make ("nvmsgconv", "nvmsg-converter");

  /* Create msg broker to send payload to server */
  msgbroker = gst_element_factory_make ("nvmsgbroker", "nvmsg-broker");

  /* Create tee to render buffer and send message simultaneously */
  tee = gst_element_factory_make ("tee", "nvsink-tee");

  /* Create queues */
  queue1 = gst_element_factory_make ("queue", "nvtee-que1");
  queue2 = gst_element_factory_make ("queue", "nvtee-que2");

  /* Finally render the osd output */
  if (display_off) {
    sink = gst_element_factory_make ("fakesink", "nvvideo-renderer");
  } else if (prop.integrated) {
    sink = gst_element_factory_make ("nv3dsink", "nv3d-sink");
  } else {
#ifdef __aarch64__
    sink = gst_element_factory_make ("nv3dsink", "nvvideo-renderer");
#else
    sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
#endif
  }

#ifdef USE_URI
  if (!pipeline || !source || !h264parser || !decoder || !nvstreammux || !pgie
      || !nvtracker || !nvvidconv || !nvosd || !msgconv || !msgbroker || !tee
      || !queue1 || !queue2 || !sink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }
#endif


  if (!set_tracker_properties(nvtracker)) {
    g_printerr ("Failed to set tracker properties. Exiting.\n");
    return -1;
  }

  if (argc > 1 && IS_YAML (argv[1])) {
    // nvds_parse_*: GstElement * element, gchar * cfg_file_path, const char * group
#ifdef USE_URI
    RETURN_ON_PARSER_ERROR(nvds_parse_file_source(source, argv[1], "sourceuri"));
#else

    // caps_v4l2src.set_property('caps', Gst.Caps.from_string("video/x-raw, framerate=30/1"))
    GstCaps *caps = gst_caps_new_simple("video/x-raw",
                            "format", G_TYPE_STRING, "YUY2",
                            "width", G_TYPE_INT, 640,
                            "height", G_TYPE_INT, 480,
                            "framerate", GST_TYPE_FRACTION, 30, 1,
                            NULL);

    g_object_set(G_OBJECT(caps_v4l2src), "caps", caps, NULL);


    // caps_vidconvsrc.set_property('caps', Gst.Caps.from_string("video/x-raw(memory:NVMM)"))
    GstCaps *caps1 = gst_caps_new_simple("video/x-raw",
                                    "memory", G_TYPE_STRING, "NVMM",
                                    NULL);

    g_object_set(G_OBJECT(caps_vidconvsrc), "caps", caps1, NULL);

#endif

    RETURN_ON_PARSER_ERROR(nvds_parse_streammux(nvstreammux, argv[1], "streammux"));

    RETURN_ON_PARSER_ERROR(nvds_parse_gie(pgie, argv[1], "primary-gie"));

    g_object_set (G_OBJECT (msgconv), "config", "dstest4_msgconv_config.yml",
        NULL);

    RETURN_ON_PARSER_ERROR(nvds_parse_msgconv (msgconv, argv[1], "msgconv"));

    RETURN_ON_PARSER_ERROR(nvds_parse_msgbroker(msgbroker, argv[1], "msgbroker"));

    RETURN_ON_PARSER_ERROR(nvds_parse_tracker(nvtracker, argv[1], "tracker"));

    if (display_off) {
      RETURN_ON_PARSER_ERROR(nvds_parse_fake_sink (sink, argv[1], "sink"));
    }
    else if (prop.integrated) {
      RETURN_ON_PARSER_ERROR(nvds_parse_3d_sink (sink, argv[1], "sink"));
    }
    else {
#ifdef __aarch64__
      RETURN_ON_PARSER_ERROR(nvds_parse_3d_sink (sink, argv[1], "sink"));
#else
      RETURN_ON_PARSER_ERROR(nvds_parse_egl_sink (sink, argv[1], "sink"));
#endif
    }

    msg2p_meta = ds_test4_parse_meta_type(argv[1], "msgconv");
    g_print("msg2p_meta = %d\n", msg2p_meta);
  } else {
    /* we set the input filename to the source element */

#ifdef USE_URI
    g_object_set (G_OBJECT (source), "location", input_file, NULL);
#else

#endif

    g_object_set (G_OBJECT (nvstreammux), "batch-size", 1, NULL);

    g_object_set (G_OBJECT (nvstreammux), "width", MUXER_OUTPUT_WIDTH, "height",
        MUXER_OUTPUT_HEIGHT,
        "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);

    /* Set all the necessary properties of the nvinfer element,
     * the necessary ones are : */
    g_object_set (G_OBJECT (pgie), "config-file-path", PGIE_CONFIG_FILE, NULL);

    g_object_set (G_OBJECT (msgconv), "config", MSCONV_CONFIG_FILE, NULL);
    g_object_set (G_OBJECT (msgconv), "payload-type", schema_type, NULL);
    g_object_set (G_OBJECT (msgconv), "msg2p-newapi", msg2p_meta, NULL);
    g_object_set (G_OBJECT (msgconv), "frame-interval", frame_interval, NULL);

    g_object_set (G_OBJECT (msgbroker), "proto-lib", proto_lib,
        "conn-str", conn_str, "sync", FALSE, NULL);

    if (topic) {
      g_object_set (G_OBJECT (msgbroker), "topic", topic, NULL);
    }

    if (cfg_file) {
      g_object_set (G_OBJECT (msgbroker), "config", cfg_file, NULL);
    }

    g_object_set (G_OBJECT (sink), "sync", TRUE, NULL);
  }
  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* Set up the pipeline */
  /* we add all elements into the pipeline */
  gst_bin_add_many (GST_BIN (pipeline),
      source, nvstreammux, pgie, nvtracker,
      nvvidconv, nvosd, tee, queue1, queue2, msgconv, msgbroker, sink, NULL);

#ifdef USE_URI
  gst_bin_add_many(GST_BIN (pipeline), h264parser, decoder, NULL);
  /* we link the elements together */
  /* file-source -> h264-parser -> nvh264-decoder -> nvstreammux ->
   * pgie -> nvvidconv -> nvosd -> tee -> video-renderer
   *                                      |
   *                                      |-> msgconv -> msgbroker  */

  sink_pad = gst_element_request_pad_simple (nvstreammux, "sink_0");
  if (!sink_pad) {
    g_printerr ("Streammux request sink pad failed. Exiting.\n");
    return -1;
  }

  src_pad = gst_element_get_static_pad (decoder, "src");
  if (!src_pad) {
    g_printerr ("Decoder request src pad failed. Exiting.\n");
    return -1;
  }

  if (gst_pad_link (src_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Failed to link decoder to stream muxer. Exiting.\n");
    return -1;
  }

  gst_object_unref (sink_pad);
  gst_object_unref (src_pad);

  if (!gst_element_link_many (source, h264parser, decoder, NULL)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }
#else

  gst_bin_add_many(GST_BIN (pipeline), caps_v4l2src, vidconvsrc, nvvidconvsrc, caps_vidconvsrc, NULL);
  
  sink_pad = gst_element_request_pad_simple (nvstreammux, "sink_0");

  if (!sink_pad) {
    g_printerr ("Streammux request sink pad failed. Exiting.\n");
    return -1;
  }

  src_pad = gst_element_get_static_pad (caps_vidconvsrc, "src");
  if (gst_pad_link (src_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Failed to link caps_vidconvsrc to stream muxer. Exiting.\n");
    return -1;
  }

  gst_object_unref (sink_pad);
  gst_object_unref (src_pad);

  if (!gst_element_link_many (source, caps_v4l2src, vidconvsrc, nvvidconvsrc, NULL)) {
    g_printerr ("Elements (source, caps_v4l2src, vidconvsrc, nvvidconvsrc) could not be linked. Exiting.\n");
    return -1;
  }

#endif

  if (!gst_element_link_many (nvstreammux, pgie, nvtracker, nvvidconv, nvosd, tee, NULL)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link_many (queue1, msgconv, msgbroker, NULL)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link (queue2, sink)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  sink_pad = gst_element_get_static_pad (queue1, "sink");
  tee_msg_pad = gst_element_request_pad_simple (tee, "src_%u");
  tee_render_pad = gst_element_request_pad_simple (tee, "src_%u");
  if (!tee_msg_pad || !tee_render_pad) {
    g_printerr ("Unable to get request pads\n");
    return -1;
  }

  if (gst_pad_link (tee_msg_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Unable to link tee and message converter\n");
    gst_object_unref (sink_pad);
    return -1;
  }

  gst_object_unref (sink_pad);

  sink_pad = gst_element_get_static_pad (queue2, "sink");
  if (gst_pad_link (tee_render_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Unable to link tee and render\n");
    gst_object_unref (sink_pad);
    return -1;
  }

  gst_object_unref (sink_pad);

  pgie_src_pad = gst_element_get_static_pad (pgie, "src");
  /* Create Context for Object Encoding.
   * Takes GPU ID as a parameter. Passed by user through commandline.
   * Initialized as 0. */
  NvDsObjEncCtxHandle obj_ctx_handle = nvds_obj_enc_create_context (0);
  if (!obj_ctx_handle) {
    g_print ("Unable to create context\n");
    return -1;
  }
  if (!pgie_src_pad)
    g_print ("Unable to get src pad\n");
  else
    gst_pad_add_probe (pgie_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
        pgie_src_pad_buffer_probe, (gpointer) obj_ctx_handle, NULL);
  gst_object_unref (pgie_src_pad);

  /* Lets add probe to get informed of the meta data generated, we add probe to
   * the sink pad of the osd element, since by that time, the buffer would have
   * had got all the metadata. */
  osd_sink_pad = gst_element_get_static_pad (nvosd, "sink");
  if (!osd_sink_pad)
    g_print ("Unable to get sink pad\n");
  else {
    if (msg2p_meta == 0) {        //generate payload using eventMsgMeta
      // g_print ("msg2p_meta == 0\n");
      gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
          osd_sink_pad_buffer_metadata_probe, NULL, NULL);
    } else {                //generate payload using NVDS_CUSTOM_MSG_BLOB
      // g_print ("\nmsg2p_meta == 1, in teoria dovrebbe inviare\n");
      gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
          osd_sink_pad_buffer_image_probe, (gpointer) obj_ctx_handle, NULL);
    }
  }
  gst_object_unref (osd_sink_pad);

  /* Set the pipeline to "playing" state */
  if (argc > 1 && IS_YAML (argv[1])) {
    g_print ("Using file: %s\n", argv[1]);
  } else {
    g_print ("Now playing: %s\n", input_file);
  }
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* Wait till pipeline encounters an error or EOS */
  g_print ("Running...\n");
  g_main_loop_run (loop);

  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");

  nvds_obj_enc_destroy_context (obj_ctx_handle);
  g_free (cfg_file);
  g_free (input_file);
  g_free (topic);
  g_free (conn_str);
  g_free (proto_lib);

  /* Release the request pads from the tee, and unref them */
  gst_element_release_request_pad (tee, tee_msg_pad);
  gst_element_release_request_pad (tee, tee_render_pad);
  gst_object_unref (tee_msg_pad);
  gst_object_unref (tee_render_pad);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));
  g_source_remove (bus_watch_id);
  g_main_loop_unref (loop);
  return 0;
}

Compiling with the environment variable GST_DEBUG=4 returns this

(deepstream-test4-app:129494): GStreamer-CRITICAL **: 16:12:37.054: gst_bin_add_many: assertion 'GST_IS_ELEMENT (element_1)' failed
0:00:00.347153760 129494 0xaaaae90b2400 INFO        GST_ELEMENT_PADS gstelement.c:1013:gst_element_get_static_pad: no such pad 'sink_0' in element "nvstreammux"
0:00:00.347223712 129494 0xaaaae90b2400 INFO        GST_ELEMENT_PADS gstelement.c:759:gst_element_add_pad:<nvstreammux> adding pad 'sink_0'
0:00:00.347254816 129494 0xaaaae90b2400 INFO        GST_ELEMENT_PADS gstelement.c:1016:gst_element_get_static_pad: found pad nvmm_caps:src
0:00:00.347282880 129494 0xaaaae90b2400 INFO                GST_PADS gstpad.c:2382:gst_pad_link_prepare: trying to link nvmm_caps:src and nvstreammux:sink_0
0:00:00.347295296 129494 0xaaaae90b2400 INFO                GST_PADS gstpad.c:2432:gst_pad_link_prepare: pads have wrong hierarchy
0:00:00.347308000 129494 0xaaaae90b2400 INFO                GST_PADS gstpad.c:2532:gst_pad_link_full: link between nvmm_caps:src and nvstreammux:sink_0 failed: wrong hierarchy
Failed to link caps_vidconvsrc to stream muxer. Exiting.

Any advice on how to proceed?

You can debug with gst-launch-1.0 first, and then rewrite the pipeline as source code

Please refer to this FAQ

1 Like

Hello and thanks for replying

Starting from the link you provided, I wrote this command to get a working working pipeline

gst-launch-1.0 v4l2src device=/dev/video4 io-mode=2  ! 'video/x-raw,format=YUY2,width=1920,height=1080,framerate=30/1' ! videoconvert  ! 'video/x-raw,format=I420' ! nvvideoconvert  ! 'video/x-raw(memory:NVMM),format=NV12' ! mux.sink_0 nvstreammux name=mux width=1920 height=1080 batch-size=1 batched-push-timeout=25000 ! nvvideoconvert ! fpsdisplaysink video-sink=autovideosink sync=false text-overlay=true

I rewrote the pipeline as follows

int
main (int argc, char *argv[])
{
  GMainLoop *loop = NULL;
  GstElement *pipeline = NULL, *source = NULL, *sink = NULL, *pgie = NULL,
  *nvvidconv = NULL, *nvosd = NULL, *nvstreammux = NULL, *nvtracker = NULL;
  
  GstElement *caps_v4l2src = NULL, *vidconvsrc = NULL, *nvvidconvsrc = NULL, *caps_vidconvsrc = NULL;
  
  GstElement *msgconv = NULL, *msgbroker = NULL, *tee = NULL;
  GstElement *queue1 = NULL, *queue2 = NULL;
  GstBus *bus = NULL;
  guint bus_watch_id;
  GstPad *osd_sink_pad = NULL;
  GstPad *tee_render_pad = NULL;
  GstPad *tee_msg_pad = NULL;
  GstPad *sink_pad = NULL;
  GstPad *src_pad = NULL;
  GstPad *pgie_src_pad = NULL;
  GOptionContext *ctx = NULL;
  GOptionGroup *group = NULL;
  GError *error = NULL;
  NvDsGieType pgie_type = NVDS_GIE_PLUGIN_INFER;

  int current_device = -1;
  cudaGetDevice (&current_device);
  struct cudaDeviceProp prop;
  cudaGetDeviceProperties (&prop, current_device);

  ctx = g_option_context_new ("Nvidia DeepStream Test4");
  group = g_option_group_new ("test4", NULL, NULL, NULL, NULL);
  g_option_group_add_entries (group, entries);

  g_option_context_set_main_group (ctx, group);
  g_option_context_add_group (ctx, gst_init_get_option_group ());


  if (!g_option_context_parse (ctx, &argc, &argv, &error)) {
    g_option_context_free (ctx);
    g_printerr ("%s", error->message);
    return -1;
  }
  g_option_context_free (ctx);

  if (!proto_lib || !input_file) {
    if (argc > 1 && !IS_YAML (argv[1])) {
      g_printerr ("missing arguments\n");
      g_printerr ("Usage: %s <yml file>\n", argv[0]);
      g_printerr
          ("Usage: %s -i <H264 filename> -p <Proto adaptor library> --conn-str=<Connection string>\n",
          argv[0]);
      return -1;
    } else if (!argv[1]) {
      g_printerr ("missing arguments\n");
      g_printerr ("Usage: %s <yml file>\n", argv[0]);
      g_printerr
          ("Usage: %s -i <H264 filename> -p <Proto adaptor library> --conn-str=<Connection string>\n",
          argv[0]);
      return -1;
    }
  }

  loop = g_main_loop_new (NULL, FALSE);

  /* Parse inference plugin type */
  if (argc > 1 && IS_YAML(argv[1])) {
    g_print("%s", argv[1]);
    RETURN_ON_PARSER_ERROR(nvds_parse_gie_type(&pgie_type, argv[1],
                "primary-gie"));
  }

  /* Create gstreamer elements */
  /* Create Pipeline element that will form a connection of other elements */
  pipeline = gst_pipeline_new ("dstest4-pipeline");
  

  source = gst_element_factory_make("v4l2src", "usb-cam-source");
  caps_v4l2src = gst_element_factory_make("capsfilter", "v4l2src_caps");
  vidconvsrc = gst_element_factory_make("videoconvert", "vidconvsrc");
  nvvidconvsrc = gst_element_factory_make("nvvideoconvert", "nvvideoconvert_0");
  caps_vidconvsrc = gst_element_factory_make("capsfilter", "nvmm_caps");

  if (!caps_v4l2src || !vidconvsrc || !nvvidconvsrc || !caps_vidconvsrc) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  nvstreammux = gst_element_factory_make ("nvstreammux", "nvstreammux");

  /* Use nvinfer or nvinferserver to run inferencing on decoder's output,
   * behaviour of inferencing is set through config file */
  if (pgie_type == NVDS_GIE_PLUGIN_INFER_SERVER) {
    pgie = gst_element_factory_make ("nvinferserver", "primary-nvinference-engine");
  } else {
    pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");
  }

  /* We need to have a tracker to track the identified objects */
  nvtracker = gst_element_factory_make ("nvtracker", "tracker");

  /* Use convertor to convert from NV12 to RGBA as required by nvosd */
  nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");

  /* Create OSD to draw on the converted RGBA buffer */
  nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");

  /* Create msg converter to generate payload from buffer metadata */
  msgconv = gst_element_factory_make ("nvmsgconv", "nvmsg-converter");

  /* Create msg broker to send payload to server */
  msgbroker = gst_element_factory_make ("nvmsgbroker", "nvmsg-broker");

  /* Create tee to render buffer and send message simultaneously */
  tee = gst_element_factory_make ("tee", "nvsink-tee");

  /* Create queues */
  queue1 = gst_element_factory_make ("queue", "nvtee-que1");
  queue2 = gst_element_factory_make ("queue", "nvtee-que2");

  /* Finally render the osd output */
  if (display_off) {
    sink = gst_element_factory_make ("fakesink", "nvvideo-renderer");
  } else if (prop.integrated) {
    sink = gst_element_factory_make ("nv3dsink", "nv3d-sink");
  } else {
#ifdef __aarch64__
    sink = gst_element_factory_make ("nv3dsink", "nvvideo-renderer");
#else
    sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
#endif
  }

  if (!pipeline || !source || !caps_v4l2src || !vidconvsrc || !nvvidconvsrc || !caps_vidconvsrc
       || !nvstreammux || !pgie || !nvtracker || !nvvidconv || !nvosd || !msgconv || !msgbroker || !tee
      || !queue1 || !queue2 || !sink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  if (!set_tracker_properties(nvtracker)) {
    g_printerr ("Failed to set tracker properties. Exiting.\n");
    return -1;
  }

  if (argc > 1 && IS_YAML (argv[1])) {
    // nvds_parse_*: GstElement * element, gchar * cfg_file_path, const char * group

    GstCaps *caps = gst_caps_from_string("video/x-raw,format=YUY2,width=1920,height=1080,framerate=30/1");
    g_object_set(G_OBJECT(caps_v4l2src), "caps", caps, NULL);

    GstCaps *caps1 = gst_caps_from_string("video/x-raw,format=I420");
    g_object_set(G_OBJECT(caps_vidconvsrc), "caps", caps1, NULL);

    gst_caps_unref(caps);
    gst_caps_unref(caps1);

    RETURN_ON_PARSER_ERROR(nvds_parse_streammux(nvstreammux, argv[1], "streammux"));

    RETURN_ON_PARSER_ERROR(nvds_parse_gie(pgie, argv[1], "primary-gie"));

    g_object_set (G_OBJECT (msgconv), "config", "dstest4_msgconv_config.yml",
        NULL);

    RETURN_ON_PARSER_ERROR(nvds_parse_msgconv (msgconv, argv[1], "msgconv"));

    RETURN_ON_PARSER_ERROR(nvds_parse_msgbroker(msgbroker, argv[1], "msgbroker"));

    RETURN_ON_PARSER_ERROR(nvds_parse_tracker(nvtracker, argv[1], "tracker"));

    if (display_off) {
      RETURN_ON_PARSER_ERROR(nvds_parse_fake_sink (sink, argv[1], "sink"));
    }
    else if (prop.integrated) {
      RETURN_ON_PARSER_ERROR(nvds_parse_3d_sink (sink, argv[1], "sink"));
    }
    else {
#ifdef __aarch64__
      RETURN_ON_PARSER_ERROR(nvds_parse_3d_sink (sink, argv[1], "sink"));
#else
      RETURN_ON_PARSER_ERROR(nvds_parse_egl_sink (sink, argv[1], "sink"));
#endif
    }

    msg2p_meta = ds_test4_parse_meta_type(argv[1], "msgconv");
    g_print("msg2p_meta = %d\n", msg2p_meta);
  } else {
    /* we set the input filename to the source element */


    g_object_set (G_OBJECT (source), "device", "/dev/video4", NULL);
    g_object_set (G_OBJECT (source), "width", 1920, NULL);
    g_object_set (G_OBJECT (source), "heigth", 480, NULL);

    g_object_set (G_OBJECT (nvstreammux), "batch-size", 1, NULL);

    g_object_set (G_OBJECT (nvstreammux), "width", MUXER_OUTPUT_WIDTH, "height",
        MUXER_OUTPUT_HEIGHT, "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);

    /* Set all the necessary properties of the nvinfer element,
     * the necessary ones are : */
    g_object_set (G_OBJECT (pgie), "config-file-path", PGIE_CONFIG_FILE, NULL);

    g_object_set (G_OBJECT (msgconv), "config", MSCONV_CONFIG_FILE, NULL);
    g_object_set (G_OBJECT (msgconv), "payload-type", schema_type, NULL);
    g_object_set (G_OBJECT (msgconv), "msg2p-newapi", msg2p_meta, NULL);
    g_object_set (G_OBJECT (msgconv), "frame-interval", frame_interval, NULL);

    g_object_set (G_OBJECT (msgbroker), "proto-lib", proto_lib,
        "conn-str", conn_str, "sync", FALSE, NULL);

    if (topic) {
      g_object_set (G_OBJECT (msgbroker), "topic", topic, NULL);
    }

    if (cfg_file) {
      g_object_set (G_OBJECT (msgbroker), "config", cfg_file, NULL);
    }

    g_object_set (G_OBJECT (sink), "sync", TRUE, NULL);
  }
  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* Set up the pipeline */
  /* we add all elements into the pipeline */

  gst_bin_add_many (GST_BIN (pipeline), source, caps_v4l2src,
    vidconvsrc, nvvidconvsrc, caps_vidconvsrc, nvstreammux, pgie, nvtracker,
    nvvidconv, nvosd, tee, queue1, queue2, msgconv, msgbroker, sink, NULL);

  sink_pad = gst_element_request_pad_simple (nvstreammux, "sink_0");
  
  if (!sink_pad) {
    g_printerr ("Streammux request sink pad failed. Exiting.\n");
    return -1;
  }

  src_pad = gst_element_get_static_pad (caps_vidconvsrc, "src");
  
  if (gst_pad_link (src_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Failed to link caps_vidconvsrc to stream muxer. Exiting.\n");
    return -1;
  }

  if (!gst_element_link_many (source, caps_v4l2src, NULL)) {
    g_printerr ("Elements (source, caps_v4l2src) could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link_many (vidconvsrc, nvvidconvsrc, NULL)) {
    g_printerr ("Elements (vidconvsrc, nvvidconvsrc) could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link_many (nvstreammux, pgie, nvtracker, nvvidconv, nvosd, tee, NULL)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link_many (queue1, msgconv, msgbroker, NULL)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link (queue2, sink)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  sink_pad = gst_element_get_static_pad (queue1, "sink");
  tee_msg_pad = gst_element_request_pad_simple (tee, "src_%u");
  tee_render_pad = gst_element_request_pad_simple (tee, "src_%u");

  if (!tee_msg_pad || !tee_render_pad) {
    g_printerr ("Unable to get request pads\n");
    return -1;
  }

  if (gst_pad_link (tee_msg_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Unable to link tee and message converter\n");
    gst_object_unref (sink_pad);
    return -1;
  }

  gst_object_unref (sink_pad);

  sink_pad = gst_element_get_static_pad (queue2, "sink");
  if (gst_pad_link (tee_render_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Unable to link tee and render\n");
    gst_object_unref (sink_pad);
    return -1;
  }

  gst_object_unref (sink_pad);

  pgie_src_pad = gst_element_get_static_pad (pgie, "src");
  /* Create Context for Object Encoding.
   * Takes GPU ID as a parameter. Passed by user through commandline.
   * Initialized as 0. */
  NvDsObjEncCtxHandle obj_ctx_handle = nvds_obj_enc_create_context (0);
  if (!obj_ctx_handle) {
    g_print ("Unable to create context\n");
    return -1;
  }
  if (!pgie_src_pad)
    g_print ("Unable to get src pad\n");
  else
    gst_pad_add_probe (pgie_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
        pgie_src_pad_buffer_probe, (gpointer) obj_ctx_handle, NULL);
  gst_object_unref (pgie_src_pad);

  /* Lets add probe to get informed of the meta data generated, we add probe to
   * the sink pad of the osd element, since by that time, the buffer would have
   * had got all the metadata. */
  osd_sink_pad = gst_element_get_static_pad (nvosd, "sink");
  if (!osd_sink_pad)
    g_print ("Unable to get sink pad\n");
  else {
    if (msg2p_meta == 0) {        //generate payload using eventMsgMeta
      // g_print ("msg2p_meta == 0\n");
      gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
          osd_sink_pad_buffer_metadata_probe, NULL, NULL);
    } else {                //generate payload using NVDS_CUSTOM_MSG_BLOB
      // g_print ("\nmsg2p_meta == 1, in teoria dovrebbe inviare\n");
      gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
          osd_sink_pad_buffer_image_probe, (gpointer) obj_ctx_handle, NULL);
    }
  }
  gst_object_unref (osd_sink_pad);

  /* Set the pipeline to "playing" state */
  if (argc > 1 && IS_YAML (argv[1])) {
    g_print ("Using file: %s\n", argv[1]);
  } else {
    g_print ("Now playing: %s\n", input_file);
  }
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* Wait till pipeline encounters an error or EOS */
  g_print ("Running...\n");
  g_main_loop_run (loop);

  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");

  nvds_obj_enc_destroy_context (obj_ctx_handle);
  g_free (cfg_file);
  g_free (input_file);
  g_free (topic);
  g_free (conn_str);
  g_free (proto_lib);

  /* Release the request pads from the tee, and unref them */
  gst_element_release_request_pad (tee, tee_msg_pad);
  gst_element_release_request_pad (tee, tee_render_pad);
  gst_object_unref (tee_msg_pad);
  gst_object_unref (tee_render_pad);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));
  g_source_remove (bus_watch_id);
  g_main_loop_unref (loop);
  return 0;
}
int
main (int argc, char *argv[])
{
  GMainLoop *loop = NULL;
  GstElement *pipeline = NULL, *source = NULL, *sink = NULL, *pgie = NULL,
  *nvvidconv = NULL, *nvosd = NULL, *nvstreammux = NULL, *nvtracker = NULL;
  
  GstElement *caps_v4l2src = NULL, *vidconvsrc = NULL, *nvvidconvsrc = NULL, *caps_vidconvsrc = NULL;
  
  GstElement *msgconv = NULL, *msgbroker = NULL, *tee = NULL;
  GstElement *queue1 = NULL, *queue2 = NULL;
  GstBus *bus = NULL;
  guint bus_watch_id;
  GstPad *osd_sink_pad = NULL;
  GstPad *tee_render_pad = NULL;
  GstPad *tee_msg_pad = NULL;
  GstPad *sink_pad = NULL;
  GstPad *src_pad = NULL;
  GstPad *pgie_src_pad = NULL;
  GOptionContext *ctx = NULL;
  GOptionGroup *group = NULL;
  GError *error = NULL;
  NvDsGieType pgie_type = NVDS_GIE_PLUGIN_INFER;

  int current_device = -1;
  cudaGetDevice (&current_device);
  struct cudaDeviceProp prop;
  cudaGetDeviceProperties (&prop, current_device);

  ctx = g_option_context_new ("Nvidia DeepStream Test4");
  group = g_option_group_new ("test4", NULL, NULL, NULL, NULL);
  g_option_group_add_entries (group, entries);

  g_option_context_set_main_group (ctx, group);
  g_option_context_add_group (ctx, gst_init_get_option_group ());


  if (!g_option_context_parse (ctx, &argc, &argv, &error)) {
    g_option_context_free (ctx);
    g_printerr ("%s", error->message);
    return -1;
  }
  g_option_context_free (ctx);

  if (!proto_lib || !input_file) {
    if (argc > 1 && !IS_YAML (argv[1])) {
      g_printerr ("missing arguments\n");
      g_printerr ("Usage: %s <yml file>\n", argv[0]);
      g_printerr
          ("Usage: %s -i <H264 filename> -p <Proto adaptor library> --conn-str=<Connection string>\n",
          argv[0]);
      return -1;
    } else if (!argv[1]) {
      g_printerr ("missing arguments\n");
      g_printerr ("Usage: %s <yml file>\n", argv[0]);
      g_printerr
          ("Usage: %s -i <H264 filename> -p <Proto adaptor library> --conn-str=<Connection string>\n",
          argv[0]);
      return -1;
    }
  }

  loop = g_main_loop_new (NULL, FALSE);

  /* Parse inference plugin type */
  if (argc > 1 && IS_YAML(argv[1])) {
    g_print("%s", argv[1]);
    RETURN_ON_PARSER_ERROR(nvds_parse_gie_type(&pgie_type, argv[1],
                "primary-gie"));
  }

  /* Create gstreamer elements */
  /* Create Pipeline element that will form a connection of other elements */
  pipeline = gst_pipeline_new ("dstest4-pipeline");
  

  source = gst_element_factory_make("v4l2src", "usb-cam-source");
  caps_v4l2src = gst_element_factory_make("capsfilter", "v4l2src_caps");
  vidconvsrc = gst_element_factory_make("videoconvert", "vidconvsrc");
  nvvidconvsrc = gst_element_factory_make("nvvideoconvert", "nvvideoconvert_0");
  caps_vidconvsrc = gst_element_factory_make("capsfilter", "nvmm_caps");

  if (!caps_v4l2src || !vidconvsrc || !nvvidconvsrc || !caps_vidconvsrc) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  nvstreammux = gst_element_factory_make ("nvstreammux", "nvstreammux");

  /* Use nvinfer or nvinferserver to run inferencing on decoder's output,
   * behaviour of inferencing is set through config file */
  if (pgie_type == NVDS_GIE_PLUGIN_INFER_SERVER) {
    pgie = gst_element_factory_make ("nvinferserver", "primary-nvinference-engine");
  } else {
    pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");
  }

  /* We need to have a tracker to track the identified objects */
  nvtracker = gst_element_factory_make ("nvtracker", "tracker");

  /* Use convertor to convert from NV12 to RGBA as required by nvosd */
  nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");

  /* Create OSD to draw on the converted RGBA buffer */
  nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");

  /* Create msg converter to generate payload from buffer metadata */
  msgconv = gst_element_factory_make ("nvmsgconv", "nvmsg-converter");

  /* Create msg broker to send payload to server */
  msgbroker = gst_element_factory_make ("nvmsgbroker", "nvmsg-broker");

  /* Create tee to render buffer and send message simultaneously */
  tee = gst_element_factory_make ("tee", "nvsink-tee");

  /* Create queues */
  queue1 = gst_element_factory_make ("queue", "nvtee-que1");
  queue2 = gst_element_factory_make ("queue", "nvtee-que2");

  /* Finally render the osd output */
  if (display_off) {
    sink = gst_element_factory_make ("fakesink", "nvvideo-renderer");
  } else if (prop.integrated) {
    sink = gst_element_factory_make ("nv3dsink", "nv3d-sink");
  } else {
#ifdef __aarch64__
    sink = gst_element_factory_make ("nv3dsink", "nvvideo-renderer");
#else
    sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
#endif
  }

  if (!pipeline || !source || !caps_v4l2src || !vidconvsrc || !nvvidconvsrc || !caps_vidconvsrc
       || !nvstreammux || !pgie || !nvtracker || !nvvidconv || !nvosd || !msgconv || !msgbroker || !tee
      || !queue1 || !queue2 || !sink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  if (!set_tracker_properties(nvtracker)) {
    g_printerr ("Failed to set tracker properties. Exiting.\n");
    return -1;
  }

  if (argc > 1 && IS_YAML (argv[1])) {
    // nvds_parse_*: GstElement * element, gchar * cfg_file_path, const char * group

    GstCaps *caps = gst_caps_from_string("video/x-raw,format=YUY2,width=1920,height=1080,framerate=30/1");
    g_object_set(G_OBJECT(caps_v4l2src), "caps", caps, NULL);

    GstCaps *caps1 = gst_caps_from_string("video/x-raw,format=I420");
    g_object_set(G_OBJECT(caps_vidconvsrc), "caps", caps1, NULL);

    gst_caps_unref(caps);
    gst_caps_unref(caps1);

    RETURN_ON_PARSER_ERROR(nvds_parse_streammux(nvstreammux, argv[1], "streammux"));

    RETURN_ON_PARSER_ERROR(nvds_parse_gie(pgie, argv[1], "primary-gie"));

    g_object_set (G_OBJECT (msgconv), "config", "dstest4_msgconv_config.yml",
        NULL);

    RETURN_ON_PARSER_ERROR(nvds_parse_msgconv (msgconv, argv[1], "msgconv"));

    RETURN_ON_PARSER_ERROR(nvds_parse_msgbroker(msgbroker, argv[1], "msgbroker"));

    RETURN_ON_PARSER_ERROR(nvds_parse_tracker(nvtracker, argv[1], "tracker"));

    if (display_off) {
      RETURN_ON_PARSER_ERROR(nvds_parse_fake_sink (sink, argv[1], "sink"));
    }
    else if (prop.integrated) {
      RETURN_ON_PARSER_ERROR(nvds_parse_3d_sink (sink, argv[1], "sink"));
    }
    else {
#ifdef __aarch64__
      RETURN_ON_PARSER_ERROR(nvds_parse_3d_sink (sink, argv[1], "sink"));
#else
      RETURN_ON_PARSER_ERROR(nvds_parse_egl_sink (sink, argv[1], "sink"));
#endif
    }

    msg2p_meta = ds_test4_parse_meta_type(argv[1], "msgconv");
    g_print("msg2p_meta = %d\n", msg2p_meta);
  } else {
    /* we set the input filename to the source element */


    g_object_set (G_OBJECT (source), "device", "/dev/video4", NULL);
    g_object_set (G_OBJECT (source), "width", 1920, NULL);
    g_object_set (G_OBJECT (source), "heigth", 480, NULL);

    g_object_set (G_OBJECT (nvstreammux), "batch-size", 1, NULL);

    g_object_set (G_OBJECT (nvstreammux), "width", MUXER_OUTPUT_WIDTH, "height",
        MUXER_OUTPUT_HEIGHT, "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);

    /* Set all the necessary properties of the nvinfer element,
     * the necessary ones are : */
    g_object_set (G_OBJECT (pgie), "config-file-path", PGIE_CONFIG_FILE, NULL);

    g_object_set (G_OBJECT (msgconv), "config", MSCONV_CONFIG_FILE, NULL);
    g_object_set (G_OBJECT (msgconv), "payload-type", schema_type, NULL);
    g_object_set (G_OBJECT (msgconv), "msg2p-newapi", msg2p_meta, NULL);
    g_object_set (G_OBJECT (msgconv), "frame-interval", frame_interval, NULL);

    g_object_set (G_OBJECT (msgbroker), "proto-lib", proto_lib,
        "conn-str", conn_str, "sync", FALSE, NULL);

    if (topic) {
      g_object_set (G_OBJECT (msgbroker), "topic", topic, NULL);
    }

    if (cfg_file) {
      g_object_set (G_OBJECT (msgbroker), "config", cfg_file, NULL);
    }

    g_object_set (G_OBJECT (sink), "sync", TRUE, NULL);
  }
  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* Set up the pipeline */
  /* we add all elements into the pipeline */

  gst_bin_add_many (GST_BIN (pipeline), source, caps_v4l2src,
    vidconvsrc, nvvidconvsrc, caps_vidconvsrc, nvstreammux, pgie, nvtracker,
    nvvidconv, nvosd, tee, queue1, queue2, msgconv, msgbroker, sink, NULL);

  sink_pad = gst_element_request_pad_simple (nvstreammux, "sink_0");
  
  if (!sink_pad) {
    g_printerr ("Streammux request sink pad failed. Exiting.\n");
    return -1;
  }

  src_pad = gst_element_get_static_pad (caps_vidconvsrc, "src");
  
  if (gst_pad_link (src_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Failed to link caps_vidconvsrc to stream muxer. Exiting.\n");
    return -1;
  }

  if (!gst_element_link_many (source, caps_v4l2src, NULL)) {
    g_printerr ("Elements (source, caps_v4l2src) could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link_many (vidconvsrc, nvvidconvsrc, NULL)) {
    g_printerr ("Elements (vidconvsrc, nvvidconvsrc) could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link_many (nvstreammux, pgie, nvtracker, nvvidconv, nvosd, tee, NULL)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link_many (queue1, msgconv, msgbroker, NULL)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link (queue2, sink)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  sink_pad = gst_element_get_static_pad (queue1, "sink");
  tee_msg_pad = gst_element_request_pad_simple (tee, "src_%u");
  tee_render_pad = gst_element_request_pad_simple (tee, "src_%u");

  if (!tee_msg_pad || !tee_render_pad) {
    g_printerr ("Unable to get request pads\n");
    return -1;
  }

  if (gst_pad_link (tee_msg_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Unable to link tee and message converter\n");
    gst_object_unref (sink_pad);
    return -1;
  }

  gst_object_unref (sink_pad);

  sink_pad = gst_element_get_static_pad (queue2, "sink");
  if (gst_pad_link (tee_render_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Unable to link tee and render\n");
    gst_object_unref (sink_pad);
    return -1;
  }

  gst_object_unref (sink_pad);

  pgie_src_pad = gst_element_get_static_pad (pgie, "src");
  /* Create Context for Object Encoding.
   * Takes GPU ID as a parameter. Passed by user through commandline.
   * Initialized as 0. */
  NvDsObjEncCtxHandle obj_ctx_handle = nvds_obj_enc_create_context (0);
  if (!obj_ctx_handle) {
    g_print ("Unable to create context\n");
    return -1;
  }
  if (!pgie_src_pad)
    g_print ("Unable to get src pad\n");
  else
    gst_pad_add_probe (pgie_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
        pgie_src_pad_buffer_probe, (gpointer) obj_ctx_handle, NULL);
  gst_object_unref (pgie_src_pad);

  /* Lets add probe to get informed of the meta data generated, we add probe to
   * the sink pad of the osd element, since by that time, the buffer would have
   * had got all the metadata. */
  osd_sink_pad = gst_element_get_static_pad (nvosd, "sink");
  if (!osd_sink_pad)
    g_print ("Unable to get sink pad\n");
  else {
    if (msg2p_meta == 0) {        //generate payload using eventMsgMeta
      // g_print ("msg2p_meta == 0\n");
      gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
          osd_sink_pad_buffer_metadata_probe, NULL, NULL);
    } else {                //generate payload using NVDS_CUSTOM_MSG_BLOB
      // g_print ("\nmsg2p_meta == 1, in teoria dovrebbe inviare\n");
      gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
          osd_sink_pad_buffer_image_probe, (gpointer) obj_ctx_handle, NULL);
    }
  }
  gst_object_unref (osd_sink_pad);

  /* Set the pipeline to "playing" state */
  if (argc > 1 && IS_YAML (argv[1])) {
    g_print ("Using file: %s\n", argv[1]);
  } else {
    g_print ("Now playing: %s\n", input_file);
  }
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* Wait till pipeline encounters an error or EOS */
  g_print ("Running...\n");
  g_main_loop_run (loop);

  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");

  nvds_obj_enc_destroy_context (obj_ctx_handle);
  g_free (cfg_file);
  g_free (input_file);
  g_free (topic);
  g_free (conn_str);
  g_free (proto_lib);

  /* Release the request pads from the tee, and unref them */
  gst_element_release_request_pad (tee, tee_msg_pad);
  gst_element_release_request_pad (tee, tee_render_pad);
  gst_object_unref (tee_msg_pad);
  gst_object_unref (tee_render_pad);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));
  g_source_remove (bus_watch_id);
  g_main_loop_unref (loop);
  return 0;
}

but I keep receiving this error

ERROR from element usb-cam-source: Internal data stream error.
Error details: ../libs/gst/base/gstbasesrc.c(3127): gst_base_src_loop (): /GstPipeline:dstest4-pipeline/GstV4l2Src:usb-cam-source:
streaming stopped, reason not-negotiated (-4)
Returned, stopping playback
Deleting pipeline

Debugging with GST_DEBUG=4 returns these lines (i believe these are the most important, specifically the part where it says Z16):

0:00:00.717339200 35351 0xaaaae919a400 INFO              GST_STATES gstelement.c:2806:gst_element_continue_state:<usb-cam-source> completed state change to PAUSED
0:00:00.717361920 35351 0xaaaae919a400 INFO              GST_STATES gstelement.c:2706:_priv_gst_element_state_changed:<usb-cam-source> notifying about state-changed READY to PAUSED (VOID_PENDING pending)
0:00:00.717378336 35351 0xaaaae9430a40 INFO        GST_ELEMENT_PADS gstelement.c:1013:gst_element_get_static_pad: no such pad 'sink' in element "usb-cam-source"
0:00:00.717428576 35351 0xaaaae919a400 INFO              GST_STATES gstbin.c:2971:gst_bin_change_state_func:<dstest4-pipeline> child 'usb-cam-source' changed state to 3(PAUSED) successfully without preroll
0:00:00.717452512 35351 0xaaaae919a400 INFO                pipeline gstpipeline.c:533:gst_pipeline_change_state:<dstest4-pipeline> pipeline is live
0:00:00.717467520 35351 0xaaaae919a400 INFO              GST_STATES gstelement.c:2778:gst_element_continue_state:<dstest4-pipeline> committing state from READY to PAUSED, pending PLAYING, next PLAYING
0:00:00.717478016 35351 0xaaaae919a400 INFO              GST_STATES gstelement.c:2706:_priv_gst_element_state_changed:<dstest4-pipeline> notifying about state-changed READY to PAUSED (PLAYING pending)
0:00:00.717491616 35351 0xaaaae919a400 INFO              GST_STATES gstelement.c:2786:gst_element_continue_state:<dstest4-pipeline> continue state change PAUSED to PLAYING, final PLAYING
0:00:00.717529344 35351 0xaaaae9430a40 INFO                 v4l2src gstv4l2src.c:550:gst_v4l2src_query_preferred_size:<usb-cam-source> Detect input 0 as `Input 1`
0:00:00.717566688 35351 0xaaaae9430a40 INFO                    v4l2 gstv4l2object.c:1214:gst_v4l2_object_fill_format_list:<usb-cam-source:src> got 1 format(s):
0:00:00.717581792 35351 0xaaaae9430a40 INFO                    v4l2 gstv4l2object.c:1218:gst_v4l2_object_fill_format_list:<usb-cam-source:src>   Z16 
0:00:00.717610720 35351 0xaaaae9430a40 INFO                    v4l2 gstv4l2object.c:4587:gst_v4l2_object_probe_caps:<usb-cam-source:src> probed caps: EMPTY
0:00:00.717636544 35351 0xaaaae9430a40 INFO                GST_PADS gstpad.c:4357:gst_pad_peer_query:<v4l2src_caps:src> pad has no peer
0:00:00.717668096 35351 0xaaaae9430a40 WARN                 basesrc gstbasesrc.c:3127:gst_base_src_loop:<usb-cam-source> error: Internal data stream error.
0:00:00.717653824 35351 0xaaaae919a400 INFO               GST_EVENT gstevent.c:1530:gst_event_new_latency: creating latency event 0:00:00.000000000
0:00:00.717680192 35351 0xaaaae9430a40 WARN                 basesrc gstbasesrc.c:3127:gst_base_src_loop:<usb-cam-source> error: streaming stopped, reason not-negotiated (-4)
0:00:00.717716768 35351 0xaaaae919a400 INFO                     bin gstbin.c:2759:gst_bin_do_latency_func:<dstest4-pipeline> configured latency of 0:00:00.000000000
0:00:00.717736416 35351 0xaaaae9430a40 INFO        GST_ERROR_SYSTEM gstelement.c:2271:gst_element_message_full_with_details:<usb-cam-source> posting message: Internal data stream error.
0:00:00.717770720 35351 0xaaaae919a400 INFO              GST_STATES gstbin.c:2479:gst_bin_element_set_state:<nv3d-sink> current READY pending PAUSED, desired next PLAYING
0:00:00.717782016 35351 0xaaaae919a400 INFO              GST_STATES gstbin.c:2935:gst_bin_change_state_func:<dstest4-pipeline> child 'nv3d-sink' is changing state asynchronously to PLAYING
0:00:00.717783360 35351 0xaaaae9430a40 INFO        GST_ERROR_SYSTEM gstelement.c:2298:gst_element_message_full_with_details:<usb-cam-source> posted error message: Internal data stream error.
0:00:00.717800960 35351 0xaaaae919a400 INFO              GST_STATES gstbin.c:2479:gst_bin_element_set_state:<nvmsg-broker> current READY pending PAUSED, desired next PLAYING
0:00:00.717828672 35351 0xaaaae9430a40 INFO                    task gsttask.c:368:gst_task_func:<usb-cam-source:src> Task going to paused

Any clue on how to proceed?

videoconvert is useless and reduce performance.
Just need to copy the data from cpu to gpu and convert it to nv12, nvvideoconvert is enough

Try the following pipeline

gst-launch-1.0 v4l2src device=/dev/video4  !  'video/x-raw,format=YUY2,width=1920,height=1080,framerate=30/1'  ! nvvideoconvert  ! 'video/x-raw(memory:NVMM),format=NV12' ! mux.sink_0 nvstreammux name=mux width=1920 height=1080 batch-size=1 batched-push-timeout=33333 live-source=true ! nvvideoconvert ! nv3dsink

Try to modify the values ​​of batched-push-timeout and live-source in the code

I removed videoconvert as you asked, but I have the same exact errors as before. Just to clarify, my current pipeline looks like this:

v4l2src -> caps_v4l2src -> nvvidconvsrc -> caps_nvvidconv -> nvstreammux -> ...

I set the caps like this

    GstCaps *caps = gst_caps_from_string("video/x-raw,format=YUY2,width=1920,height=1080,framerate=30/1");
    g_object_set(G_OBJECT(caps_v4l2src), "caps", caps, NULL);
    
    GstCaps *caps1 = gst_caps_from_string("video/x-raw(memory:NVMM),format=NV12");
    g_object_set(G_OBJECT(caps_nvvidconvsrc), "caps", caps1, NULL);

    gst_caps_unref(caps);
    gst_caps_unref(caps1);

The whole code looks like this (starting from deepstream-test-1)

int
main (int argc, char *argv[])
{
  GMainLoop *loop = NULL;
  GstElement *pipeline = NULL, *source = NULL, *sink = NULL, *nvvidconvsrc = NULL, *pgie = NULL,
  *nvvidconv = NULL, *nvosd = NULL, *nvstreammux = NULL, *nvtracker = NULL;
  
  GstElement *caps_v4l2src = NULL, *caps_nvvidconvsrc = NULL;

  GstElement *msgconv = NULL, *msgbroker = NULL, *tee = NULL;
  GstElement *queue1 = NULL, *queue2 = NULL;
  GstBus *bus = NULL;
  guint bus_watch_id;
  GstPad *osd_sink_pad = NULL;
  GstPad *tee_render_pad = NULL;
  GstPad *tee_msg_pad = NULL;
  GstPad *sink_pad = NULL;
  GstPad *src_pad = NULL;
  GstPad *pgie_src_pad = NULL;
  GOptionContext *ctx = NULL;
  GOptionGroup *group = NULL;
  GError *error = NULL;
  NvDsGieType pgie_type = NVDS_GIE_PLUGIN_INFER;

  int current_device = -1;
  cudaGetDevice (&current_device);
  struct cudaDeviceProp prop;
  cudaGetDeviceProperties (&prop, current_device);

  ctx = g_option_context_new ("Nvidia DeepStream Test4");
  group = g_option_group_new ("test4", NULL, NULL, NULL, NULL);
  g_option_group_add_entries (group, entries);

  g_option_context_set_main_group (ctx, group);
  g_option_context_add_group (ctx, gst_init_get_option_group ());


  if (!g_option_context_parse (ctx, &argc, &argv, &error)) {
    g_option_context_free (ctx);
    g_printerr ("%s", error->message);
    return -1;
  }
  g_option_context_free (ctx);

  if (!proto_lib || !input_file) {
    if (argc > 1 && !IS_YAML (argv[1])) {
      g_printerr ("missing arguments\n");
      g_printerr ("Usage: %s <yml file>\n", argv[0]);
      g_printerr
          ("Usage: %s -i <H264 filename> -p <Proto adaptor library> --conn-str=<Connection string>\n",
          argv[0]);
      return -1;
    } else if (!argv[1]) {
      g_printerr ("missing arguments\n");
      g_printerr ("Usage: %s <yml file>\n", argv[0]);
      g_printerr
          ("Usage: %s -i <H264 filename> -p <Proto adaptor library> --conn-str=<Connection string>\n",
          argv[0]);
      return -1;
    }
  }

  loop = g_main_loop_new (NULL, FALSE);

  /* Parse inference plugin type */
  if (argc > 1 && IS_YAML(argv[1])) {
    g_print("%s", argv[1]);
    RETURN_ON_PARSER_ERROR(nvds_parse_gie_type(&pgie_type, argv[1],
                "primary-gie"));
  }

  /* Create gstreamer elements */
  /* Create Pipeline element that will form a connection of other elements */
  pipeline = gst_pipeline_new ("dstest4-pipeline");
  

  source = gst_element_factory_make("v4l2src", "usb-cam-source");
  caps_v4l2src = gst_element_factory_make("capsfilter", "v4l2src_caps");
  nvvidconvsrc = gst_element_factory_make("nvvideoconvert", "nvvidconvsrc");
  caps_nvvidconvsrc = gst_element_factory_make("capsfilter", "nvmm_caps");

  if (!caps_v4l2src || !nvvidconvsrc || !caps_nvvidconvsrc) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  nvstreammux = gst_element_factory_make ("nvstreammux", "nvstreammux");

  /* Use nvinfer or nvinferserver to run inferencing on decoder's output,
   * behaviour of inferencing is set through config file */
  if (pgie_type == NVDS_GIE_PLUGIN_INFER_SERVER) {
    pgie = gst_element_factory_make ("nvinferserver", "primary-nvinference-engine");
  } else {
    pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");
  }

  /* We need to have a tracker to track the identified objects */
  nvtracker = gst_element_factory_make ("nvtracker", "tracker");

  /* Use convertor to convert from NV12 to RGBA as required by nvosd */
  nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");

  /* Create OSD to draw on the converted RGBA buffer */
  nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");

  /* Create msg converter to generate payload from buffer metadata */
  msgconv = gst_element_factory_make ("nvmsgconv", "nvmsg-converter");

  /* Create msg broker to send payload to server */
  msgbroker = gst_element_factory_make ("nvmsgbroker", "nvmsg-broker");

  /* Create tee to render buffer and send message simultaneously */
  tee = gst_element_factory_make ("tee", "nvsink-tee");

  /* Create queues */
  queue1 = gst_element_factory_make ("queue", "nvtee-que1");
  queue2 = gst_element_factory_make ("queue", "nvtee-que2");

  /* Finally render the osd output */
  if (display_off) {
    sink = gst_element_factory_make ("fakesink", "nvvideo-renderer");
  } else if (prop.integrated) {
    sink = gst_element_factory_make ("nv3dsink", "nv3d-sink");
  } else {
#ifdef __aarch64__
    sink = gst_element_factory_make ("nv3dsink", "nvvideo-renderer");
#else
    sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
#endif
  }

  if (!pipeline || !source || !caps_v4l2src || !nvvidconvsrc || !caps_nvvidconvsrc
       || !nvstreammux || !pgie || !nvtracker || !nvvidconv || !nvosd || !msgconv || !msgbroker || !tee
      || !queue1 || !queue2 || !sink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  if (!set_tracker_properties(nvtracker)) {
    g_printerr ("Failed to set tracker properties. Exiting.\n");
    return -1;
  }

  if (argc > 1 && IS_YAML (argv[1])) {
    // nvds_parse_*: GstElement * element, gchar * cfg_file_path, const char * group

    GstCaps *caps = gst_caps_from_string("video/x-raw,format=YUY2,width=1920,height=1080,framerate=30/1");
    g_object_set(G_OBJECT(caps_v4l2src), "caps", caps, NULL);
    
    GstCaps *caps1 = gst_caps_from_string("video/x-raw(memory:NVMM),format=NV12");
    g_object_set(G_OBJECT(caps_nvvidconvsrc), "caps", caps1, NULL);

    gst_caps_unref(caps);
    gst_caps_unref(caps1);
   
    RETURN_ON_PARSER_ERROR(nvds_parse_streammux(nvstreammux, argv[1], "streammux"));

    RETURN_ON_PARSER_ERROR(nvds_parse_gie(pgie, argv[1], "primary-gie"));

    g_object_set (G_OBJECT (msgconv), "config", "dstest4_msgconv_config.yml",
        NULL);

    RETURN_ON_PARSER_ERROR(nvds_parse_msgconv (msgconv, argv[1], "msgconv"));

    RETURN_ON_PARSER_ERROR(nvds_parse_msgbroker(msgbroker, argv[1], "msgbroker"));

    RETURN_ON_PARSER_ERROR(nvds_parse_tracker(nvtracker, argv[1], "tracker"));

    if (display_off) {
      RETURN_ON_PARSER_ERROR(nvds_parse_fake_sink (sink, argv[1], "sink"));
    }
    else if (prop.integrated) {
      RETURN_ON_PARSER_ERROR(nvds_parse_3d_sink (sink, argv[1], "sink"));
    }
    else {
#ifdef __aarch64__
      RETURN_ON_PARSER_ERROR(nvds_parse_3d_sink (sink, argv[1], "sink"));
#else
      RETURN_ON_PARSER_ERROR(nvds_parse_egl_sink (sink, argv[1], "sink"));
#endif
    }

    msg2p_meta = ds_test4_parse_meta_type(argv[1], "msgconv");
    g_print("msg2p_meta = %d\n", msg2p_meta);
  } else {
    /* we set the input filename to the source element */


    g_object_set (G_OBJECT (source), "device", "/dev/video4", NULL);
    g_object_set (G_OBJECT (source), "width", 1920, NULL);
    g_object_set (G_OBJECT (source), "heigth", 1080, NULL);

    g_object_set (G_OBJECT (nvstreammux), "batch-size", 1, NULL);

    g_object_set (G_OBJECT (nvstreammux), "width", MUXER_OUTPUT_WIDTH, NULL);
    g_object_set (G_OBJECT (nvstreammux), "height", MUXER_OUTPUT_HEIGHT, NULL);
    g_object_set (G_OBJECT (nvstreammux), "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
    g_object_set (G_OBJECT (nvstreammux), "live-source", TRUE, NULL);

    /* Set all the necessary properties of the nvinfer element,
     * the necessary ones are : */
    g_object_set (G_OBJECT (pgie), "config-file-path", PGIE_CONFIG_FILE, NULL);

    g_object_set (G_OBJECT (msgconv), "config", MSCONV_CONFIG_FILE, NULL);
    g_object_set (G_OBJECT (msgconv), "payload-type", schema_type, NULL);
    g_object_set (G_OBJECT (msgconv), "msg2p-newapi", msg2p_meta, NULL);
    g_object_set (G_OBJECT (msgconv), "frame-interval", frame_interval, NULL);

    g_object_set (G_OBJECT (msgbroker), "proto-lib", proto_lib,
        "conn-str", conn_str, "sync", FALSE, NULL);

    if (topic) {
      g_object_set (G_OBJECT (msgbroker), "topic", topic, NULL);
    }

    if (cfg_file) {
      g_object_set (G_OBJECT (msgbroker), "config", cfg_file, NULL);
    }

    g_object_set (G_OBJECT (sink), "sync", TRUE, NULL);
  }
  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* Set up the pipeline */
  /* we add all elements into the pipeline */

  gst_bin_add_many (GST_BIN (pipeline), source, caps_v4l2src,
    nvvidconvsrc, caps_nvvidconvsrc, nvstreammux, pgie, nvtracker,
    nvvidconv, nvosd, tee, queue1, queue2, msgconv, msgbroker, sink, NULL);

  sink_pad = gst_element_request_pad_simple (nvstreammux, "sink_0");
  
  if (!sink_pad) {
    g_printerr ("Streammux request sink pad failed. Exiting.\n");
    return -1;
  }

  src_pad = gst_element_get_static_pad (caps_nvvidconvsrc, "src");
  
  if (gst_pad_link (src_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Failed to link caps_nvvidconvsrc to stream muxer. Exiting.\n");
    return -1;
  }

  if (!gst_element_link_many (source, caps_v4l2src, nvvidconvsrc, NULL)) {
    g_printerr ("Elements (source, caps_v4l2src, nvvidconvsrc) could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link_many (nvstreammux, pgie, nvtracker, nvvidconv, nvosd, tee, NULL)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link_many (queue1, msgconv, msgbroker, NULL)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  if (!gst_element_link (queue2, sink)) {
    g_printerr ("Elements could not be linked. Exiting.\n");
    return -1;
  }

  sink_pad = gst_element_get_static_pad (queue1, "sink");
  tee_msg_pad = gst_element_request_pad_simple (tee, "src_%u");
  tee_render_pad = gst_element_request_pad_simple (tee, "src_%u");

  if (!tee_msg_pad || !tee_render_pad) {
    g_printerr ("Unable to get request pads\n");
    return -1;
  }

  if (gst_pad_link (tee_msg_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Unable to link tee and message converter\n");
    gst_object_unref (sink_pad);
    return -1;
  }

  gst_object_unref (sink_pad);

  sink_pad = gst_element_get_static_pad (queue2, "sink");
  if (gst_pad_link (tee_render_pad, sink_pad) != GST_PAD_LINK_OK) {
    g_printerr ("Unable to link tee and render\n");
    gst_object_unref (sink_pad);
    return -1;
  }

  gst_object_unref (sink_pad);

  pgie_src_pad = gst_element_get_static_pad (pgie, "src");
  /* Create Context for Object Encoding.
   * Takes GPU ID as a parameter. Passed by user through commandline.
   * Initialized as 0. */
  NvDsObjEncCtxHandle obj_ctx_handle = nvds_obj_enc_create_context (0);
  if (!obj_ctx_handle) {
    g_print ("Unable to create context\n");
    return -1;
  }
  if (!pgie_src_pad)
    g_print ("Unable to get src pad\n");
  else
    gst_pad_add_probe (pgie_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
        pgie_src_pad_buffer_probe, (gpointer) obj_ctx_handle, NULL);
  gst_object_unref (pgie_src_pad);

  /* Lets add probe to get informed of the meta data generated, we add probe to
   * the sink pad of the osd element, since by that time, the buffer would have
   * had got all the metadata. */
  osd_sink_pad = gst_element_get_static_pad (nvosd, "sink");
  if (!osd_sink_pad)
    g_print ("Unable to get sink pad\n");
  else {
    if (msg2p_meta == 0) {        //generate payload using eventMsgMeta
      // g_print ("msg2p_meta == 0\n");
      gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
          osd_sink_pad_buffer_metadata_probe, NULL, NULL);
    } else {                //generate payload using NVDS_CUSTOM_MSG_BLOB
      // g_print ("\nmsg2p_meta == 1, in teoria dovrebbe inviare\n");
      gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
          osd_sink_pad_buffer_image_probe, (gpointer) obj_ctx_handle, NULL);
    }
  }
  gst_object_unref (osd_sink_pad);

  /* Set the pipeline to "playing" state */
  if (argc > 1 && IS_YAML (argv[1])) {
    g_print ("Using file: %s\n", argv[1]);
  } else {
    g_print ("Now playing: %s\n", input_file);
  }
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* Wait till pipeline encounters an error or EOS */
  g_print ("Running...\n");
  g_main_loop_run (loop);

  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");

  nvds_obj_enc_destroy_context (obj_ctx_handle);
  g_free (cfg_file);
  g_free (input_file);
  g_free (topic);
  g_free (conn_str);
  g_free (proto_lib);

  /* Release the request pads from the tee, and unref them */
  gst_element_release_request_pad (tee, tee_msg_pad);
  gst_element_release_request_pad (tee, tee_render_pad);
  gst_object_unref (tee_msg_pad);
  gst_object_unref (tee_render_pad);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));
  g_source_remove (bus_watch_id);
  g_main_loop_unref (loop);
  return 0;
}

This is the part that worries me the most, and I think the problem might lie there, since it gets the Z16 format, instead of the YUV2 one I specified:

0:00:00.667316672 374363 0xaaaaf47dba40 INFO                    v4l2 gstv4l2object.c:1214:gst_v4l2_object_fill_format_list:<usb-cam-source:src> got 1 format(s):
0:00:00.667329888 374363 0xaaaaf4545400 INFO              GST_STATES gstbin.c:2928:gst_bin_change_state_func:<dstest4-pipeline> child 'nvtee-que2' changed state to 4(PLAYING) successfully
0:00:00.667350688 374363 0xaaaaf47dba40 INFO                    v4l2 gstv4l2object.c:1218:gst_v4l2_object_fill_format_list:<usb-cam-source:src>   Z16 

If you want to check the whole output with GST_DEBUG=4, here it is
output.txt (116.4 KB)

Does this command line work fine? If it works, I think the camera output is not Z16 format.

Try changing the width and height in caps to 640x480

Yes it works correctly, sorry I forgot to mention it in the previous message. It works as well with the resolution 640x480.

I changed the resolution in my code in the variablecaps_v4l2src to 640x480 as well, but it results in the same error.

I have another camera available, should I use it and see if it makes any difference?

You can try other cameras

Alternatively, you can use the following simple test program. I am not sure whether the above code can work properly.
If gst-launch-1.0 can run correctly, the following code should also work

gcc -o v4l2 v4l2.c `pkg-config --cflags --libs gstreamer-1.0`
#include <gst/gst.h>

int main(int argc, char *argv[]) {
  GstElement *pipeline;
  GError *error = NULL;

  // Initialize GStreamer
  gst_init(&argc, &argv);

  // Create the pipeline using gst_launch
  const gchar *pipeline_description =
      "v4l2src device=/dev/video4 ! "
      "video/x-raw,format=YUY2,width=1920,height=1080,framerate=30/1 ! "
      "nvvideoconvert ! "
      "video/x-raw(memory:NVMM),format=NV12 ! "
      "nvstreammux name=mux width=1920 height=1080 batch-size=1 "
      "batched-push-timeout=33333 live-source=true ! "
      "nvvideoconvert ! "
      "nv3dsink";

  pipeline = gst_parse_launch(pipeline_description, &error);

  if (error) {
    g_printerr("Error creating pipeline: %s\n", error->message);
    g_error_free(error);
    return -1;
  }

  // Start playing
  gst_element_set_state(pipeline, GST_STATE_PLAYING);

  // Wait until error or EOS
  GstBus *bus = gst_element_get_bus(pipeline);
  GstMessage *msg = gst_bus_timed_pop_filtered(
      bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

  // Free resources
  if (msg != NULL) {
    GError *err;
    gchar *debug_info;

    switch (GST_MESSAGE_TYPE(msg)) {
    case GST_MESSAGE_ERROR:
      gst_message_parse_error(msg, &err, &debug_info);
      g_printerr("Error received from element %s: %s\n",
                 GST_OBJECT_NAME(msg->src), err->message);
      g_printerr("Debugging information: %s\n",
                 debug_info ? debug_info : "none");
      g_clear_error(&err);
      g_free(debug_info);
      break;
    case GST_MESSAGE_EOS:
      g_print("End-Of-Stream reached.\n");
      break;
    default:
      g_printerr("Unexpected message received.\n");
      break;
    }
    gst_message_unref(msg);
  }

  // Free the bus
  gst_object_unref(bus);

  // Free the pipeline
  gst_element_set_state(pipeline, GST_STATE_NULL);
  gst_object_unref(pipeline);

  return 0;
}

It works if I replace

"nvstreammux name=mux width=1920 height=1080 batch-size=1 "

with

"mux.sink_0 nvstreammux name=mux width=1920 height=1080 batch-size=1 "

but I would lose the capabilities I need for my task, i.e. being able to send frames via Kafka.

And again debugging with GST_DEBUG=4 shows no mention of Z16, which makes me further believe that’s the cause of the problem. I’m gonna investigate further and write if I have some news.

Ok, I see, this means the problem is not caused by the camera, please try to debug your code

The remaining elements do not have the correct link, causing the pipeline to not run properly.

I fixed it by parsing correctly the the configuration .yml file, and by linking caps_nvvidconvsrc. Thanks for your support!

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.