basic rtsp->decode->display pipeline not working

I am trying to implement a pipeline to read a rtsp stream from an URI and display the video content on the screen. I do so with a uridecodebin plugin, demuxing audio/video. The decoder nvv4l2decoder is loaded dynamically. The rest of the pipeline is supposed to convert the format of the video and push it to be displayed. At run time I have a “Warning: not negotiated WARNING from element capsfilter0: not negotiated” and the pipeling gets stuck on this.

I attach an example code to reproduce the issue.
I use Deepstream SDK 4.0
Could you please give me a hint of what I am doing wrong?

Many thanks in advance,


#include <gst/gst.h>

/* NVIDIA Decoder source pad memory feature. This feature signifies that source
 * pads having this capability will push GstBuffers containing cuda buffers. */

static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data)
  GMainLoop *loop = (GMainLoop *) data;
  switch (GST_MESSAGE_TYPE (msg)) {
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      gchar *debug;
      GError *error;
      gst_message_parse_warning (msg, &error, &debug);
      g_printerr ("WARNING from element %s: %s\n",
          GST_OBJECT_NAME (msg->src), error->message);
      g_free (debug);
      g_printerr ("Warning: %s\n", error->message);
      g_error_free (error);
      gchar *debug;
      GError *error;
      gst_message_parse_error (msg, &error, &debug);
      g_printerr ("ERROR from element %s: %s\n",
          GST_OBJECT_NAME (msg->src), error->message);
      if (debug)
        g_printerr ("Error details: %s\n", debug);
      g_free (debug);
      g_error_free (error);
      g_main_loop_quit (loop);
  return TRUE;

static void
cb_newpad (GstElement * decodebin, GstPad * decoder_src_pad, GstElement * next_element)
    g_print ("In cb_newpad\n");
    GstPad *sink_pad = NULL;
    sink_pad = gst_element_get_static_pad (next_element, "sink");
    GstCaps *sink_caps = gst_pad_get_current_caps (sink_pad);
    g_print("***** %d\n", gst_caps_get_size (sink_caps));
    const GstStructure *sink_str = gst_caps_get_structure (sink_caps, 0);
    const gchar *sink_pad_type = gst_structure_get_name (sink_str);

    GstCaps *src_caps = gst_pad_get_current_caps (decoder_src_pad);
    const GstStructure *src_str = gst_caps_get_structure (src_caps, 0);
    const gchar *new_pad_type = gst_structure_get_name (src_str);
    GstCapsFeatures *features = gst_caps_get_features (src_caps, 0);

    /* Need to check if the pad created by the decodebin is for video and not audio. */
    if (g_str_has_prefix (new_pad_type, "video/x-raw")) {
      /* Link the decodebin pad only if decodebin has picked nvidia
       * decoder plugin nvdec_*. We do this by checking if the pad caps contain
       * NVMM memory features. */
        if (gst_caps_features_contains (features, GST_CAPS_FEATURES_NVMM)) {
            /* Attempt the link */
            GstPadLinkReturn ret = gst_pad_link (decoder_src_pad, sink_pad);
            if (GST_PAD_LINK_FAILED (ret))
                g_print ("Type is '%s' but link failed.\n", new_pad_type);
                g_print ("Link succeeded (source is '%s' and sink is '%s').\n", new_pad_type, sink_pad_type);
    /* Unreference the new pad's caps, if we got them */
    if (src_caps != NULL)
        gst_caps_unref (src_caps);
    if (sink_caps != NULL)
        gst_caps_unref (sink_caps);

    /* Unreference the sink pad */
    gst_object_unref (sink_pad);

int main(int argc, char *argv[]) {
    GstElement *pipeline = NULL, *source = NULL, *convert = NULL, *sink = NULL;
    GMainLoop *loop = NULL;
    GstBus *bus;
    GstStateChangeReturn ret;

    /* Initialize GStreamer */
    gst_init (&argc, &argv);
    loop = g_main_loop_new (NULL, FALSE);

    /* Create the elements */
    source = gst_element_factory_make ("uridecodebin", "source");
    convert = gst_element_factory_make ("videoconvert", "convert");
    sink = gst_element_factory_make("xvimagesink", NULL);

    /* Create the empty pipeline */
    pipeline = gst_pipeline_new ("test-pipeline");

    /* check whether all elements are created*/
    if (!pipeline || !source || !convert || !sink) {
        g_printerr ("Not all elements could be created.\n");
        return -1;

    /* Build the pipeline without linking source with the rest of the elements */
    gst_bin_add_many (GST_BIN (pipeline), source, convert, sink, NULL);
    if (!gst_element_link (convert, sink)) {
        g_printerr ("Elements could not be linked.\n");
        gst_object_unref (pipeline);
        return -1;

    /* Set properties */
    g_object_set (source, "uri", "rtsp://", NULL);

    /* Connect to the pad-added signal */
    g_signal_connect (G_OBJECT (source), "pad-added", G_CALLBACK (cb_newpad), convert);

    /* we add a message handler */
    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    guint bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);

    /* Start playing */
    ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr ("Unable to set the pipeline to the playing state.\n");
        gst_object_unref (pipeline);
        return -1;

    /* Wait till pipeline encounters an error or EOS */
    g_print ("Running...\n");
    g_main_loop_run (loop);

    /* Free resources */
    gst_object_unref (bus);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
    g_source_remove (bus_watch_id);
    g_main_loop_unref (loop);
    return 0;

Please use nvvideoconvert to construct the pipeline like:

$ gst-launch-1.0 uridecodebin uri=rtsp:// ! nvvideoconvert ! 'video/x-raw' ! xvimagesink

We have nveglglessink and nvoverlaysink which can render video/x-raw(memory:NVMM), please also try

$ gst-launch-1.0 uridecodebin uri=rtsp:// ! nvvideoconvert ! nvoverlaysink

Thanks, changing videoconvert to nvvideoconvert does the job!