How to create gstreamer pipeline with parallel branches having different FPS using tee plugin

• GForce GTX 1080Ti
• DeepStream Version 5.1
• TensorRT Version 7.2.3
• NVIDIA GPU Driver Version 455

Hi I want to create a gstreamer pipeline with two branches having different FPS. The C++ code I wrote is given below

#include <iostream>
#include <string.h>
#include <gst/gst.h>
#include <gst/app/app.h>

using namespace std;

GstElement *src, *dbin, *conv, *tee, *mux, *parse, *pipeline;

GstElement *queue1,*videorate1, *conv1, *jenc1, *sink1;

GstElement *queue2,*videorate2, *conv2, *jenc2, *sink2;

GMainLoop *loop;

static gboolean
message_cb (GstBus * bus, GstMessage * message, gpointer user_data)
  //Cpipeline *obj_pipeline = (Cpipeline*)user_data;
  switch (GST_MESSAGE_TYPE (message)) {
      GError *err = NULL;
      gchar *name, *debug = NULL;

      name = gst_object_get_path_string (message->src);
      gst_message_parse_error (message, &err, &debug);

      g_printerr ("ERROR: from element %s: %s\n", name, err->message);
      if (debug != NULL)
        g_printerr ("Additional debug info:\n%s\n", debug);

      g_error_free (err);
      g_free (debug);
      g_free (name);

      g_main_loop_quit (loop);
      GError *err = NULL;
      gchar *name, *debug = NULL;

      name = gst_object_get_path_string (message->src);
      gst_message_parse_warning (message, &err, &debug);

      g_printerr ("ERROR: from element %s: %s\n", name, err->message);
      if (debug != NULL)
        g_printerr ("Additional debug info:\n%s\n", debug);

      g_error_free (err);
      g_free (debug);
      g_free (name);
      g_print ("\nGot EOS\n");
      g_main_loop_quit (loop);

  return TRUE;

static void pad_added_handler (GstElement *src, GstPad *new_pad, gpointer x) 

    GstPad *sink_pad = gst_element_get_static_pad (parse, "sink");
    GstPadLinkReturn ret;
    GstCaps *new_pad_caps = NULL;
    GstStructure *new_pad_struct = NULL;
    const gchar *new_pad_type = NULL;

    /* If our converter is already linked, we have nothing to do here */
    if (gst_pad_is_linked (sink_pad)) {
    g_print ("We are already linked. Ignoring.\n");
    goto exit;

    new_pad_caps = gst_pad_get_current_caps (new_pad);
    new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
    new_pad_type = gst_structure_get_name (new_pad_struct);

    if (!g_str_has_prefix (new_pad_type, "video/x-h264")) {
    g_print ("It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
    goto exit;
    ret = gst_pad_link (new_pad, sink_pad);
    if (GST_PAD_LINK_FAILED (ret)) {
    g_print ("Type is '%s' but link failed.\n", new_pad_type);
    goto exit;

    /* Unreference the new pad's caps, if we got them */
    if (new_pad_caps != NULL)
        gst_caps_unref (new_pad_caps);

    /* Unreference the sink pad */
    gst_object_unref (sink_pad);


int main()
    gst_init (NULL, NULL);
    pipeline = gst_pipeline_new (NULL);
    src = gst_element_factory_make ("filesrc", NULL);
    mux = gst_element_factory_make("qtdemux",NULL);
    parse = gst_element_factory_make("h264parse",NULL);
    dbin = gst_element_factory_make ("nvv4l2decoder", NULL);
    conv = gst_element_factory_make ("nvvideoconvert", NULL);
    tee = gst_element_factory_make ("tee", NULL);

    std::string url = "VD19_peoplewalking.mp4";

    if (!pipeline || !src || !dbin || !conv || !tee || !mux || !parse) {
    g_error ("Failed to create elements");
    return -1;

    g_object_set (src, "location", url.c_str(), NULL);

    gst_bin_add_many (GST_BIN (pipeline), src, dbin, mux, parse, conv, tee, NULL);

    if (!gst_element_link_many(src,mux,NULL) || !gst_element_link_many(parse,dbin,conv, tee,NULL) )//|| !gst_element_link_many (conv, tee, NULL))
    g_error("Failed to link elements");
    return -3;
    g_signal_connect (mux, "pad-added", G_CALLBACK (pad_added_handler), NULL);

    //First Branch creation

    GstPadTemplate *templ;
    templ =
    gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee),

    GstPad *teepad1 = gst_element_request_pad (tee, templ, NULL, NULL);

    queue1 = gst_element_factory_make ("queue", NULL);
    videorate1 = gst_element_factory_make("videorate",NULL);
    conv1 = gst_element_factory_make ("nvvideoconvert", NULL);
    //jenc = gst_element_factory_make ("jpegenc",NULL);
    sink1 = gst_element_factory_make ("autovideosink", NULL);
    //sink = gst_element_factory_make ("appsink", NULL);

    g_object_set (G_OBJECT(videorate1), "rate", 1.0, NULL);

    gst_bin_add_many (GST_BIN (pipeline),  queue1, videorate1, conv1,  sink1, NULL);
    if (!gst_element_link_many ( queue1,   conv1,  videorate1, sink1, NULL))
    g_error ("Failed to link elements");

    GstPad *sinkpad = gst_element_get_static_pad ( queue1, "sink");
    gst_pad_link ( teepad1, sinkpad);
    gst_object_unref (sinkpad);
    //First Branch creation ends

    //Second Branc

    GstPadTemplate *templ2;
    templ2 =
    gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee),

    GstPad *teepad2 = gst_element_request_pad (tee, templ2, NULL, NULL);
    queue2 = gst_element_factory_make ("queue", NULL);
    videorate2 = gst_element_factory_make("videorate",NULL);
    conv2 = gst_element_factory_make ("nvvideoconvert", NULL);
    sink2 = gst_element_factory_make ("autovideosink", NULL);

    g_object_set (G_OBJECT(videorate2), "rate", 0.5, NULL);

    gst_bin_add_many (GST_BIN (pipeline),  queue2, videorate2, conv2,  sink2, NULL);
    if (!gst_element_link_many ( queue2,   conv2,  videorate2, sink2, NULL))
    g_error ("Failed to link elements");

    GstPad *sinkpad2 = gst_element_get_static_pad ( queue2, "sink");
    gst_pad_link ( teepad2, sinkpad2);
    gst_object_unref (sinkpad2);

    //Second brach creation ends 

    GstBus *bus;
    loop = g_main_loop_new (NULL, FALSE);
    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_signal_watch (bus);
    g_signal_connect (G_OBJECT (bus), "message", G_CALLBACK (message_cb), NULL);
    gst_object_unref (GST_OBJECT (bus));

    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    g_main_loop_run (loop);
    gst_element_set_state (pipeline, GST_STATE_NULL);

    g_main_loop_unref (loop);

    gst_object_unref (pipeline);


I am able to run the code but the streams are not played as expected. Both streams get stuck in between while running the code.

Am I missing something?

Sorry for the late response, we will do the investigation soon.

@kayccc Thanks for the response. Please see the command that I am running with multiple branches and with different fps from terminal.

gst-launch-1.0 filesrc location=VD19_peoplewalking.mp4 ! qtdemux ! h264parse ! nvv4l2decoder ! tee name=t ! queue ! videorate ! “video/x-raw(ANY),framerate=1/1” ! nvvideoconvert ! autovideosink t. ! videorate ! “video/x-raw(ANY),framerate=30/1” ! nvvideoconvert ! autovideosink

In my code I am setting the property rate of plugin videorate. I could not find framerate property for videorate plugin ( gst-instpect-1.0 videorate)

The following pipeline works well in my device.

gst-launch-1.0 filesrc location=/opt/nvidia/deepstream/deepstream-6.0/samples/streams/sample_1080p_h264.mp4 ! qtdemux ! h264parse ! nvv4l2decoder ! tee name=t t. ! queue ! videorate ! 'video/x-raw(memory:NVMM),framerate=2/1' ! nvvideoconvert ! nvegltransform ! nveglglessink t. ! videorate ! 'video/x-raw(memory:NVMM),framerate=15/1' ! nvvideoconvert ! nvegltransform ! nveglglessink

It has nothing to do with deepstream. Please make sure you are familiar with gstreamer before you start with deepstream.

@Fiona.Chen, Yes you are right, I am also able to run the command successfully though terminal. But the c++ code is not running as expected, parallel branches get struct while I run the code. could you please suggest any solutions.

Please check you code. Your code should be exactly the same to the “gst-launch-1.0” pipeline. Please debug your code by yourself.

Instead of “rate” property one should use “max-rate”

g_object_set (G_OBJECT (videorate1), "max-rate", 1, NULL);

Solves my issue. Thanks