Delay in video

Hello!
I have Jetson Nano computer with a camera e-CAM30_CUNANO connected by MIPI.
On the Jetson Nano side I run pipeline:
gst-launch-1.0 v4l2src device=/dev/video0 ! “video/x-raw, format=(string)UYVY, width=(int)640, height=(int)480” ! nvvidconv ! “video/x-raw(memory:NVMM), format=(string)I420” ! omxh264enc qp-range=35,35:35,35:-1,-1 ! mpegtsmux ! udpsink clients=192.168.223.103:5000 sync=false

On client side I wrote this code for receiving video:
JetsonVideoTransmissionV1.h

    class JetsonVideoTransmissionV1 : public InputSource {
    public:
        JetsonVideoTransmissionV1() : InputSource(INPUT_KIND_VIDEO, "JetsonVideoTransmissionV1")
        {
            m_pipeline = nullptr;
            m_src = nullptr;
            m_demuxer = nullptr;
            m_h264parse = nullptr;
            m_avdec_h264 = nullptr;
            m_videoconvert = nullptr;
            m_video_queue = nullptr;
            m_video_sink = nullptr;
        }
        virtual unsigned getData(void *p, size_t size, time_processing_t& time);
        virtual void configure(Config::Section *sec, bool onfly);
        virtual void start();
        virtual void stop();
        virtual ~JetsonVideoTransmissionV1();
        GstElement* getVideoQueue(){return m_video_queue;}
        unsigned long long getVideo(cv::Mat &frame);
    private:
        int m_rotate;
        unsigned m_show_video_not_point;
        unsigned m_black_point_level;
        unsigned m_white_point_level;

        int m_cam_resolution;
        
        std::string m_ip_address;
        int m_port;
        unsigned long m_timeout;

        long m_max_buffer;
        bool m_video_sync;
        bool m_video_async;
        bool m_video_drop;

        GstElement *m_pipeline;
        GstStateChangeReturn m_state;
        GstElement *m_src;
        GstElement *m_demuxer;
        GstElement *m_h264parse;
        GstElement *m_avdec_h264;
        GstElement *m_videoconvert;
        GstElement *m_video_queue;
        GstElement *m_video_sink;

        ACE_Thread_Mutex m_mutex;
        //ACE_Condition<ACE_Thread_Mutex> m_condition;

        cv::Mat m_frame;
    };

JetsonVideoTransmissionV1.cpp

#include "JetsonVideoTransmissionV1.h"
#include "filter.h"

#include <vector>
#include <algorithm>
#include <string>
#include <map>

// list of most possible data source for gstreamer
const std::vector<std::string> SOURCE_LIST{
    "filesrc",
    "multifilesrc",
    "souphttpsrc",
    "giosrc",
    "rtspsrc",
    "fakesrc",
    "v4l2src",
    "udpsrc"
};

// https://www.e-consystems.com/ar0330-lowlight-cameramodule.asp

enum RESOL_CAM30_CUNANO{CAM30_VGA, CAM30_HD, CAM30_FULLHD, CAM30_3_MP, CAM30_3_4_MP};

const std::map<RESOL_CAM30_CUNANO, std::pair<int, int>> CAM30_CUNANO_RESOLUTION{
    {CAM30_VGA,     {640,   480}},
    {CAM30_HD,      {1280,  720}},
    {CAM30_FULLHD,  {1920,  1080}},
    {CAM30_3_MP,    {2304,  1296}},
    {CAM30_3_4_MP,  {2304,  1536}}
};

static void on_pad_added_video(GstElement *element, GstPad *pad, gpointer data)
{
    JetsonVideoTransmissionV1 * jetVideo = (JetsonVideoTransmissionV1 *)data;
    GstPad *sinkpad;
    GstElement *video_queue = jetVideo->getVideoQueue();
    LOG_INFO("%s%i: Dynamic pad created, linking tsdemux/video_queue\n", jetVideo->getName(), jetVideo->index());

    sinkpad = gst_element_get_static_pad(video_queue, "sink");
    gst_pad_link(pad, sinkpad);
    gst_object_unref(sinkpad);
}

JetsonVideoTransmissionV1::~JetsonVideoTransmissionV1(){
  
    m_frame.release();
    if (m_pipeline != nullptr)
    {
        m_state = gst_element_set_state(GST_ELEMENT(m_pipeline), GST_STATE_NULL);
        gst_object_unref(GST_OBJECT(m_pipeline));
        m_pipeline = nullptr;
    }
}

static gboolean bus_call(GstBus *bus, GstMessage *message, gpointer data)
{
    //const GstStructure *st = gst_message_get_structure(message);
    
    /*const gchar *type_name = GST_MESSAGE_TYPE_NAME(message);
    const gchar *src_name = GST_MESSAGE_SRC_NAME(message);
    GST_LOG("New %s message from %s: %s", type_name, src_name, st ? gst_structure_get_name(st) : "(null)");*/
    switch (GST_MESSAGE_TYPE(message))
    {
    /*case GST_MESSAGE_INFO:
        GError *error;
        gst_message_parse_info(message, &error, &parsed_text);
        g_print("Info: %s\n", parsed_text);
        g_error_free(error);
        break;*/
    case GST_MESSAGE_ERROR:
    {
        GError *err;
        gchar *parsed_text;
        gst_message_parse_error(message, &err, &parsed_text);
        g_print("Error: %s\n", err->message);
        g_free(parsed_text);
        g_error_free(err);
        break;
    }
    case GST_MESSAGE_EOS:
        g_print("End of stream\n");
        break;
    /*case GST_MESSAGE_STREAM_START:
        GstStreamStatusType stat_type;
        gst_message_parse_stream_status(message, &stat_type, nullptr);
        
        g_print("Starting stream: %i\n", (int)stat_type);
        break;
    case GST_MESSAGE_ELEMENT:
        if (gst_structure_has_name(st, "GstUDPSrcTimeout"))
        {
            g_print("Timeout\n");
        }
        break;*/
    default:
        g_print("Something other\n");
        break;
    }
    return TRUE;
}

void JetsonVideoTransmissionV1::configure(Config::Section *sec, bool onfly){

    Assert(sec);
    setIndex(sec);

    if (!gst_is_initialized())
    {
        GError *err_init = nullptr;
        if (!gst_init_check(nullptr, nullptr, &err_init))
        {
            LOG_ERROR("%s%i: gstreamer is not initialized with message: %s\n", this->getName(), this->index(), err_init->message);
        }
        if (err_init != nullptr)
        {
            g_error_free(err_init);
        }
    }

    m_ip_address = sec->getString("ip_address");
    if(m_ip_address == "")
    {
        InputSourcesException::raise(__FILE__, __LINE__, "IP address are not set!\n");
    }
    
    m_port = sec->getInt("port");
    if (!m_port)
    {
        InputSourcesException::raise(__FILE__, __LINE__, "Port are not set!\n");
    }
    
    m_timeout = (unsigned long)sec->getInt("timeout");
    if (!m_timeout)
    {
        LOG_INFO("%s%i: timeout is not set\n", this->getName(), this->index());
        m_timeout = 0;
    }
    
    m_max_buffer = sec->getInt("max_buffers");
    if (m_max_buffer < 0)
    {
        LOG_INFO("%s%i: max buffer is not set\n", this->getName(), this->index());
        m_max_buffer = 0;
    }

    std::string src_kind = sec->getString("src_kind"); // we think it is udpsrc
    if (std::find(SOURCE_LIST.begin(), SOURCE_LIST.end(), src_kind) == SOURCE_LIST.end())
    {
        InputSourcesException::raise(__FILE__, __LINE__, "Unsupported kind source data: %s\n", src_kind.c_str());
    }
    std::string pipeline_name = std::string(this->getName()) + std::to_string(this->index()) + "_pipeline";

    m_video_drop = sec->getBoolean("video_drop");
    
    m_pipeline = gst_pipeline_new(pipeline_name.c_str());
    m_src = gst_element_factory_make(src_kind.c_str(), nullptr);
    m_demuxer = gst_element_factory_make("tsdemux", nullptr);
    m_video_queue = gst_element_factory_make("queue", nullptr);
    m_h264parse = gst_element_factory_make("h264parse", nullptr);
    m_avdec_h264 = gst_element_factory_make("avdec_h264", nullptr);
    m_videoconvert = gst_element_factory_make("videoconvert", nullptr);
    m_video_sink = gst_element_factory_make("appsink", nullptr);

    if (!m_pipeline || !m_src || !m_demuxer || !m_video_queue || !m_avdec_h264 || !m_avdec_h264 || !m_videoconvert || !m_video_sink)
    {
        InputSourcesException::raise(__FILE__, __LINE__, "Could not construct pipeline %s!\n", gst_element_get_name(m_pipeline));
    }
    else
    {
        LOG_INFO("%s%i: pipeline %s is constructed\n", this->getName(), this->index(), gst_element_get_name(m_pipeline));
    }

    g_object_set(G_OBJECT(m_src), "address", m_ip_address.c_str(), nullptr);
    g_object_set(G_OBJECT(m_src), "port", m_port, nullptr);
    g_object_set(G_OBJECT(m_src), "timeout", m_timeout, nullptr);

    gchar *ip;
    g_object_get(G_OBJECT(m_src), "address", &ip, nullptr);
    LOG_INFO("%s%i: udpsrc IP address is set %s\n", this->getName(), this->index(), ip);

    gint port;
    g_object_get(G_OBJECT(m_src), "port", &port, nullptr);
    LOG_INFO("%s%i: udpsrc port is set %i\n", this->getName(), this->index(), port);

    guint timeout;
    g_object_get(G_OBJECT(m_src), "timeout", &timeout, nullptr);
    LOG_INFO("%s%i: udpsrc timeout is set %u nsec\n", this->getName(), this->index(), timeout);

    gst_bin_add_many(GST_BIN(m_pipeline), m_src, m_demuxer, m_video_queue, m_h264parse, m_avdec_h264, m_videoconvert, m_video_sink, nullptr);

    if (!gst_element_link(m_src, m_demuxer))
    {
        LOG_ERROR("%s%i: udpsrc and tsdemux are not linked\n", this->getName(), this->index());
    }
    if (!gst_element_link_many(m_video_queue, m_h264parse, m_avdec_h264, m_videoconvert, m_video_sink, nullptr))
    {
        LOG_ERROR("%s%i: video_queue, h264parse, avdec_h264, videoconvert, video_sink are not linked\n", this->getName(), this->index());
    }

    g_signal_connect(m_demuxer, "pad-added", G_CALLBACK(on_pad_added_video), this);

    g_object_set(m_video_sink, "emit-signals", TRUE, "max-buffers", m_max_buffer, nullptr);
    g_object_set(m_video_sink, "drop", m_video_drop, nullptr);

    gboolean emit_signal_v;
    g_object_get(G_OBJECT(m_video_sink), "emit-signals", &emit_signal_v, nullptr);
    LOG_INFO("%s%i: appsink video emit-signals is set %s\n", this->getName(), this->index(), (emit_signal_v ? "true" : "false"));

    guint max_buffers_v;
    g_object_get(G_OBJECT(m_video_sink), "max-buffers", &max_buffers_v, nullptr);
    LOG_INFO("%s%i: appsink video max-buffers is set %u\n", this->getName(), this->index(), max_buffers_v);

    gboolean m_video_drop;
    g_object_get(G_OBJECT(m_video_sink), "drop", &m_video_drop, nullptr);
    LOG_INFO("%s%i: appsink video drop is set %s\n", this->getName(), this->index(), (m_video_drop ? "true" : "false"));

    GstBus *bus;
    guint bus_watch_id;
    bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
    bus_watch_id = gst_bus_add_watch(bus, bus_call, nullptr);
    gst_object_unref(bus);

    m_cam_resolution = sec->getInt("cam_resolution");
    if (m_cam_resolution < 0 || m_cam_resolution > 4)
    {
        m_cam_resolution = 1;
        LOG_WARNING("%s%i: error in cam_reolution setting, set default = 1\n", this->getName(), this->index());
    }
    RESOL_CAM30_CUNANO resol = static_cast<RESOL_CAM30_CUNANO>(m_cam_resolution);
    m_vConf.m_width = CAM30_CUNANO_RESOLUTION.at(resol).first;
    m_vConf.m_height = CAM30_CUNANO_RESOLUTION.at(resol).second;

    LOG_INFO("%s%i: width: %i\t height: %i\n", this->getName(), this->index(), m_vConf.m_width, m_vConf.m_height);

   /*
   some code
 */

    m_state = gst_element_set_state(GST_ELEMENT(m_pipeline), GST_STATE_PLAYING);
    m_state = gst_element_set_state(GST_ELEMENT(m_pipeline), GST_STATE_PAUSED);
    return;
}

void JetsonVideoTransmissionV1::start(){
    
    m_mutex.acquire();
    m_state = gst_element_set_state(GST_ELEMENT(m_pipeline), GST_STATE_PLAYING);
    if(m_state == GST_STATE_CHANGE_FAILURE){
        InputSourcesException::raise(__FILE__, __LINE__, "Can not start %s\n", gst_element_get_name(m_pipeline));
    }
    m_started = true;
    m_mutex.release();
    return;
}


void JetsonVideoTransmissionV1::stop(){
    
    m_state = gst_element_set_state(GST_ELEMENT(m_pipeline), GST_STATE_PAUSED);
    m_started = false;
    return;
}

unsigned JetsonVideoTransmissionV1::getData(void *p, size_t size, time_processing_t& time){

    cv::Mat gsFrame;
    
    unsigned long long time_pts = getVideo(m_frame);

    if (m_frame.empty())
    {
        LOG_INFO("%s%i::getData: Failed to read image\n", this->getName(), this->index());
        return ++m_frame_id;
    }

    /*
         some code
   */

    time = (ACE_OS::gettimeofday().get_msec() - HW_MANAGER->getStartTime()) / 1000.0;
    return ++m_frame_id;
}

unsigned long long JetsonVideoTransmissionV1::getVideo(cv::Mat &frame)
{
    unsigned long long time_pts;

    GstSample *sample_video;
    sample_video = gst_app_sink_pull_sample(GST_APP_SINK(m_video_sink));

    if (sample_video)
    {
        GstBuffer *buffer_video = nullptr;
        GstCaps *caps_video = nullptr;
        GstStructure *structure_video;
        GstMapInfo map_info_video;

        int width, height;
        caps_video = gst_sample_get_caps(sample_video);
        if (!caps_video)
        {
            LOG_ERROR("%s%i::getData: could not get snapshot format\n", this->getName(), this->index());
            return 0;
        }
        structure_video = gst_caps_get_structure(caps_video, 0);

        /* we need to get the final caps on the buffer to get the size */
        bool res;
        res = gst_structure_get_int(structure_video, "width", &width);
        res = gst_structure_get_int(structure_video, "height", &height);

        if (!res)
        {
            LOG_ERROR("%s%i::getData: could not get snapshot dimension\n", this->getName(), this->index());
            return 0;
        }
        buffer_video = gst_sample_get_buffer(sample_video);
        time_pts = buffer_video->pts;
        gst_buffer_map(buffer_video, &map_info_video, (GstMapFlags)(GST_MAP_READ));
        frame = cv::Mat(cv::Size(width, height * 3 / 2), CV_8UC1, (char *)map_info_video.data, cv::Mat::AUTO_STEP);
        gst_buffer_unmap(buffer_video, &map_info_video);
        gst_sample_unref(sample_video);
    }
    else
    {
        LOG_ERROR("%s%i::getData: Failed to read image\n", this->getName(), this->index());
        return 0;
    }

    return time_pts;
}

This code works but received video has delay about 2-3 seconds.

Is it possible to fix delay by using gstreamer tools?
P.S.
gstreamer version 1.14.5
In OpenCV using cv::VideoCapture delay is absent with the same pipeline in Jetson Nano side…

Hi,
It looks similar to


Please set small IDR interval for a try.

DaneLLL, thank you for your answer!

I found parameter “config-interval” for h264parse element by gst-inspect util.
When I set this parameter to 15 and some something else the delay is keep about the same ~ 2-3 sec.

gst-inspect-1.0 h264parse
    ...
    Element Properties:
      name                : The name of the object
                        flags: readable, writable
                        String. Default: "h264parse0"
      parent              : The parent of the object
                        flags: readable, writable
                        Object of type "GstObject"
      disable-passthrough : Force processing (disables passthrough)
                        flags: readable, writable
                        Boolean. Default: false
      config-interval     : Send SPS and PPS Insertion Interval in seconds (sprop parameter sets will be multiplexed in the data stream when detected.) (0 = disabled, -1 = send with every IDR frame)
                        flags: readable, writable
                        Integer. Range: -1 - 3600 Default: 0 

May be there is something else parameters for varying?

Hi,
There is hardware decoder in Jetson Nano. Please run both server and client on Jetson Nano and check if there is less delay.
[Server]

$ gst-launch-1.0 v4l2src device=/dev/video0 ! 'video/x-raw, format=(string)UYVY, width=(int)640, height=(int)480' ! nvvidconv ! 'video/x-raw(memory:NVMM), format=(string)I420' ! nvv4l2h264enc maxperf-enable=1 insert-sps-pps=1 ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5000 sync=false

[Client]

$ gst-launch-1.0 udpsrc port=5000 ! 'application/x-rtp,encoding-name=H264,payload=96' ! rtph264depay ! h264parse ! nvv4l2decoder enable-max-performance=1 ! nvoverlaysink sync=false

A user has mentioned there is more delay in using tsdemux. FYR.
Udp h264 to nv3dsink latency optimization

DaneLLL, Thank you for your answer!

I saw OpenCV code for VideoCapture wrapping and wrote code like in OpenCV. Some main changes are:
In JetsonVideoTransmissionV1.cpp:

// some code
    if (!gst_is_initialized())
        {
            GError *err_init = nullptr;
            if (!gst_init_check(nullptr, nullptr, &err_init))
            {
                LOG_ERROR("%s%i: gstreamer is not initialized with message: %s\n", this->getName(), this->index(), err_init->message);
            }
            if (err_init != nullptr)
            {
                g_error_free(err_init);
            }
        }
        m_blocksize_udpsrc = sec->getInt("blocksize_udpsrc");
        m_num_buffers = sec->getInt("num_buffers");
        m_do_timestamp = sec->getBoolean("do_timestamp");
        m_port = sec->getInt("port");
        if (!m_port)
        {
            InputSourcesException::raise(__FILE__, __LINE__, "Port are not set!\n");
        }
        //m_multicast_iface = sec->getString("");
        /*m_uri = sec->getString("");
        if(m_uri == "")
        {
            m_uri = "udp://0.0.0.0:5004";
        }*/
        m_buffer_size = sec->getInt("buffer_size");
        m_timeout = (unsigned long)sec->getInt("timeout");
        m_skip_first_bytes = sec->getInt("skip_first_bytes");
        m_close_socket = sec->getBoolean("close_socket");
        m_auto_multicast = sec->getBoolean("auto_multicast");
        m_reuse = sec->getBoolean("reuse");
        m_ip_address = sec->getString("ip_address");
        if(m_ip_address == "")
        {
            InputSourcesException::raise(__FILE__, __LINE__, "IP address are not set!\n");
        }
        m_loop = sec->getBoolean("loop");
        m_retrieve_sender_address = sec->getBoolean("retrieve_sender_address");

        // tsdemux
        m_parse_private_sections = sec->getBoolean("parse_private_sections");
        m_program_number = sec->getInt("program_number");
        m_emit_stats = sec->getBoolean("emit_stats");

        // queue property; gst-inspect-1.0 queue
        //m_current_level_buffers = (unsigned)sec->getInt("current_level_buffers");
        //m_current_level_bytes = (unsigned)sec->getInt("current_level_bytes");
        //m_current_level_time = (unsigned long)sec->getInt("current_level_time");
        m_max_size_buffers = (unsigned)sec->getInt("max_size_buffers");
        m_max_size_bytes = (unsigned)sec->getInt("max_size_bytes");
        m_max_size_time = (unsigned long)sec->getInt("max_size_time");
        m_min_threshold_buffers = (unsigned)sec->getInt("min_threshold_buffers");
        m_min_threshold_bytes = (unsigned)sec->getInt("min_threshold_bytes");
        m_min_threshold_time = (unsigned long)sec->getInt("min_threshold_time");
        m_silent = sec->getBoolean("silent");
        m_flush_on_eos = sec->getBoolean("flush_on_eos");

        // h264parse
        m_disable_passthrough = sec->getBoolean("disable_passthrough");
        m_config_interval = sec->getInt("config_interval");

        // advec_h264
        m_direct_rendering = sec->getBoolean("direct_rendering");
        m_debug_mv = sec->getBoolean("debug_mv");
        m_max_threads = sec->getInt("max_threads");
        m_output_corrupt = sec->getBoolean("output_corrupt");

        // videoconvert
        m_qos_videoconvert = sec->getBoolean("qos_videoconvert");
        m_dither_quantization = (unsigned)sec->getInt("dither_quantization");
        m_alpha_value = (double)sec->getFloat("alpha_value");
        m_n_threads = (unsigned)sec->getInt("n_threads");

        // appsink
        m_video_sync = sec->getBoolean("video_sync");
        m_max_lateness = sec->getInt("max_lateness");
        m_qos_appsink = sec->getBoolean("qos_appsink");
        m_video_async = sec->getBoolean("video_async");
        m_ts_offset = sec->getInt("ts_offset");
        m_enable_last_sample = sec->getBoolean("enable_last_sample");
        m_blocksize_appsink = (unsigned)sec->getInt("blocksize_appsink");
        m_render_delay = (unsigned)sec->getInt("render_delay");
        m_throttle_time = (unsigned)sec->getInt("throttle_time");
        m_max_bitrate = (unsigned)sec->getInt("max_bitrate");
        //m_eos = sec->getBoolean("eos");
        m_emit_signals = sec->getBoolean("emit_signals");
        m_max_buffers = sec->getInt("max_buffers");
        if (m_max_buffers < 0)
        {
            m_max_buffers = 0;
        }
        m_video_drop = sec->getBoolean("video_drop");
        m_wait_on_eos = sec->getBoolean("wait_on_eos");
        m_buffer_list = sec->getBoolean("buffer_list");

        std::string pipeline_name = std::string(this->getName()) + std::to_string(this->index()) + "_pipeline";

        m_pipeline_description = sec->getString("pipeline_description");
        m_pipeline_description.erase(std::remove(m_pipeline_description.begin(), m_pipeline_description.end(), '\n'), m_pipeline_description.end());

        GError *err = nullptr;
        m_pipeline = gst_parse_launch(m_pipeline_description.c_str(), &err);

        if(err != nullptr)
        {
            LOG_ERROR("%s%i: error message: %s\n", this->getName(), this->index(), err->message);
            g_error_free(err);
            InputSourcesException::raise(__FILE__, __LINE__, "Pipeline %s did not constructed\n", pipeline_name.c_str());
        }else
        {
            g_object_set(G_OBJECT(m_pipeline), "name", pipeline_name.c_str(), NULL);
            LOG_INFO("%s%i: pipeline %s is constructed\n", this->getName(), this->index(), gst_element_get_name(m_pipeline));
        }

        // start parsing pipeline for finding separate elements
        GstIterator *it = gst_bin_iterate_elements(GST_BIN(m_pipeline));

        GstElement *element = nullptr;
        gboolean done = false;
        gchar *names = nullptr;
        std::vector<std::string> names_list;
        GValue value = G_VALUE_INIT;

        while (!done)
        {
            switch (gst_iterator_next(it, &value))
            {
            case GST_ITERATOR_OK:
                element = GST_ELEMENT(g_value_get_object(&value));
                names = gst_element_get_name(element);
                names_list.push_back(names);
                g_value_unset(&value);
                break;
            case GST_ITERATOR_RESYNC:
                gst_iterator_resync(it);
                break;
            case GST_ITERATOR_ERROR:
            case GST_ITERATOR_DONE:
                done = TRUE;
                break;
            }
        }
        gst_iterator_free(it);
        // end parsing pipeline
        
        std::vector<std::string> pipe_elem_list;
        std::string delimiter = " ! ";
        std::string m_pipeline_description_copy = m_pipeline_description;
        size_t pos = 0;
        std::string token;
        while ((pos = m_pipeline_description_copy.find(delimiter)) != std::string::npos)
        {
            pipe_elem_list.push_back(m_pipeline_description_copy.substr(0, pos));
            m_pipeline_description_copy.erase(0, pos + delimiter.length());
        }

        pipe_elem_list.push_back(m_pipeline_description_copy);
        
        if(pipe_elem_list.size() != names_list.size())
        {
            InputSourcesException::raise(__FILE__, __LINE__, "Different size of element in pipeline descriptor and element in pipeline after parsing!\n");
        }

        m_pipeline_element.resize(pipe_elem_list.size());


        for (size_t j = 0; j < pipe_elem_list.size(); j++)
        {
            for (size_t i = 0; i < names_list.size(); i++)
            {
                if (pipe_elem_list[j].find(names_list[i]) != std::string::npos)
                {
                    if (names_list[i] == "udpsrc0")
                    {
                        m_src = gst_bin_get_by_name(GST_BIN(m_pipeline), names_list[i].c_str());

                        g_object_set(G_OBJECT(m_src), "blocksize", m_blocksize_udpsrc, nullptr);
                        g_object_set(G_OBJECT(m_src), "num-buffers", m_num_buffers, nullptr);
                        g_object_set(G_OBJECT(m_src), "do-timestamp", m_do_timestamp, nullptr);
                        g_object_set(G_OBJECT(m_src), "port", m_port, nullptr);
                        g_object_set(G_OBJECT(m_src), "buffer-size", m_buffer_size, nullptr);
                        g_object_set(G_OBJECT(m_src), "timeout", m_timeout, nullptr);
                        g_object_set(G_OBJECT(m_src), "skip-first-bytes", m_skip_first_bytes, nullptr);
                        g_object_set(G_OBJECT(m_src), "close-socket", m_close_socket, nullptr);
                        g_object_set(G_OBJECT(m_src), "auto-multicast", m_auto_multicast, nullptr);
                        g_object_set(G_OBJECT(m_src), "reuse", m_reuse, nullptr);
                        g_object_set(G_OBJECT(m_src), "address", m_ip_address.c_str(), nullptr);
                        g_object_set(G_OBJECT(m_src), "loop", m_loop, nullptr);
                        g_object_set(G_OBJECT(m_src), "retrieve-sender-address", m_retrieve_sender_address, nullptr);
                        
                        printElementProperty(m_src);
                        break;
                    }else if(names_list[i] == "tsdemux0")
                    {
                        m_demuxer = gst_bin_get_by_name(GST_BIN(m_pipeline), names_list[i].c_str());

                        g_object_set(G_OBJECT(m_demuxer), "parse-private-sections", m_parse_private_sections, nullptr);
                        g_object_set(G_OBJECT(m_demuxer), "program-number", m_program_number, nullptr);
                        g_object_set(G_OBJECT(m_demuxer), "emit-stats", m_emit_stats, nullptr);
                        
                        printElementProperty(m_demuxer);
                        break;
                    }
                    else if (names_list[i] == "queue0")
                    {
                        m_video_queue = gst_bin_get_by_name(GST_BIN(m_pipeline), names_list[i].c_str());

                        //g_object_set(G_OBJECT(m_video_queue), "current-level-buffers", m_current_level_buffers, nullptr);
                        //g_object_set(G_OBJECT(m_video_queue), "current-level-bytes", m_current_level_bytes, nullptr);
                        //g_object_set(G_OBJECT(m_video_queue), "current-level-time", m_current_level_time, nullptr);
                        g_object_set(G_OBJECT(m_video_queue), "max-size-buffers", m_max_size_buffers, nullptr);
                        g_object_set(G_OBJECT(m_video_queue), "max-size-bytes", m_max_size_bytes, nullptr);
                        g_object_set(G_OBJECT(m_video_queue), "max-size-time", m_max_size_time, nullptr);
                        g_object_set(G_OBJECT(m_video_queue), "min-threshold-buffers", m_min_threshold_buffers, nullptr);
                        g_object_set(G_OBJECT(m_video_queue), "min-threshold-bytes", m_min_threshold_bytes, nullptr);
                        g_object_set(G_OBJECT(m_video_queue), "min-threshold-time", m_min_threshold_time, nullptr);
                        g_object_set(G_OBJECT(m_video_queue), "silent", m_silent, nullptr);
                        g_object_set(G_OBJECT(m_video_queue), "flush-on-eos", m_flush_on_eos, nullptr);

                        printElementProperty(m_video_queue);
                        break;
                    }
                    else if (names_list[i] == "h264parse0")
                    {
                        m_h264parse = gst_bin_get_by_name(GST_BIN(m_pipeline), names_list[i].c_str());

                        g_object_set(G_OBJECT(m_h264parse), "disable-passthrough", m_disable_passthrough, nullptr);
                        g_object_set(G_OBJECT(m_h264parse), "config-interval", m_config_interval, nullptr);
                        
                        printElementProperty(m_h264parse);
                        break;
                    }
                    else if (names_list[i] == "avdec_h264-0")
                    {
                        m_avdec_h264 = gst_bin_get_by_name(GST_BIN(m_pipeline), names_list[i].c_str());

                        g_object_set(G_OBJECT(m_avdec_h264), "direct-rendering", m_direct_rendering, nullptr);
                        g_object_set(G_OBJECT(m_avdec_h264), "debug-mv", m_debug_mv, nullptr);
                        g_object_set(G_OBJECT(m_avdec_h264), "max-threads", m_max_threads, nullptr);
                        g_object_set(G_OBJECT(m_avdec_h264), "output-corrupt", m_output_corrupt, nullptr);

                        printElementProperty(m_avdec_h264);
                        break;
                    }
                    else if (names_list[i] == "videoconvert0")
                    {
                        m_videoconvert = gst_bin_get_by_name(GST_BIN(m_pipeline), names_list[i].c_str());

                        g_object_set(G_OBJECT(m_videoconvert), "qos", m_qos_videoconvert, nullptr);
                        g_object_set(G_OBJECT(m_videoconvert), "dither-quantization", m_dither_quantization, nullptr);
                        g_object_set(G_OBJECT(m_videoconvert), "alpha-value", m_alpha_value, nullptr);
                        g_object_set(G_OBJECT(m_videoconvert), "n-threads", m_n_threads, nullptr);

                        printElementProperty(m_videoconvert);
                        break;
                    }
                    else if (names_list[i] == "appsink0")
                    {
                        m_video_sink = gst_bin_get_by_name(GST_BIN(m_pipeline), names_list[i].c_str());

                        g_object_set(m_video_sink, "sync", m_video_sync, nullptr);
                        g_object_set(m_video_sink, "max-lateness", m_max_lateness, nullptr);
                        g_object_set(m_video_sink, "qos", m_qos_appsink, nullptr);
                        g_object_set(m_video_sink, "async", m_video_async, nullptr);
                        g_object_set(m_video_sink, "ts-offset", m_ts_offset, nullptr);
                        g_object_set(m_video_sink, "enable-last-sample", m_enable_last_sample, nullptr);
                        g_object_set(m_video_sink, "blocksize", m_blocksize_appsink, nullptr);
                        g_object_set(m_video_sink, "render-delay", m_render_delay, nullptr);
                        g_object_set(m_video_sink, "throttle-time", m_throttle_time, nullptr);
                        g_object_set(m_video_sink, "max-bitrate", m_max_bitrate, nullptr);
                        //g_object_set(m_video_sink, "eos", m_eos, nullptr);
                        g_object_set(m_video_sink, "emit-signals", m_emit_signals, nullptr);
                        g_object_set(m_video_sink, "max-buffers", m_max_buffers, nullptr);
                        g_object_set(m_video_sink, "drop", m_video_drop, nullptr);
                        g_object_set(m_video_sink, "wait-on-eos", m_wait_on_eos, nullptr);
                        g_object_set(m_video_sink, "buffer-list", m_buffer_list, nullptr);

                        printElementProperty(m_video_sink);
                        break;
                    }
                    else
                    {
                        LOG_WARNING("%s%i: unknown element in pipeline description\n", this->getName(), this->index());
                        break;
                    }
                }
    }
}
// some code

In JetsonVideoTransmissionV1.h:

//some code

    // udpsrc property; gst-inspect-1.0 udpsrc
    int m_blocksize_udpsrc;                 // default 4096
    int m_num_buffers;                      // default -1
    bool m_do_timestamp;                    // default true
    int m_port;                             // default 5004
    std::string m_multicast_iface;          // default null
    std::string m_uri;                      // default "udp://0.0.0.0:5004"
    int m_buffer_size;                      // default 0
    unsigned long m_timeout;                // default 0 (0 = disabled) ns
    int m_skip_first_bytes;                 // default 0
    bool m_close_socket;                    // default true
    bool m_auto_multicast;                  // default true
    bool m_reuse;                           // default true
    std::string m_ip_address;               // default "0.0.0.0"
    bool m_loop;                            // default true
    bool m_retrieve_sender_address;         // default true

    // tsdemux property; gst-inspect-1.0 tsdemux
    bool m_parse_private_sections;          // default true
    int m_program_number;                   // default -1
    bool m_emit_stats;                      // default false

    // queue property; gst-inspect-1.0 queue
    unsigned m_current_level_buffers;       // default 0, readable
    unsigned m_current_level_bytes;         // default 0, readable
    unsigned long m_current_level_time;     // default 0, readable
    unsigned m_max_size_buffers;            // default 200
    unsigned m_max_size_bytes;              // default 10485760
    unsigned long m_max_size_time;          // default 1000000000 ns
    unsigned m_min_threshold_buffers;       // default 0
    unsigned m_min_threshold_bytes;         // default 0
    unsigned long m_min_threshold_time;     // default 0
    //_GstQueueLeaky m_leaky;
    bool m_silent;                          // default false
    bool m_flush_on_eos;                    // default false

    // h264parse property; gst-inspect-1.0 h264parse
    bool m_disable_passthrough;             // default false
    int m_config_interval;                  // default 0

    // advec_h264 property; gst-inspect-1.0 advec_h264
    bool m_direct_rendering;                // default true
    bool m_debug_mv;                        // default false
    int m_max_threads;                      // default 0
    bool m_output_corrupt;                  // default true

    // videoconvert property
    bool m_qos_videoconvert;                // default true
    unsigned m_dither_quantization;         // default 1
    double m_alpha_value;                   // default 1.0
    unsigned m_n_threads;                   // default 1

    // appsink property; gst-inspect-1.0 appsink
    bool m_video_sync;                      // default true
    long m_max_lateness;                    // default -1 (-1 = unlimited) ns
    bool m_qos_appsink;                     // default false
    bool m_video_async;                     // default true
    long m_ts_offset;                       // default 0 ns
    bool m_enable_last_sample;              // default true
    unsigned m_blocksize_appsink;           // default 4096
    unsigned long m_render_delay;           // default 0 ns
    unsigned long m_throttle_time;          // default 0 (0 = disabled)
    unsigned long m_max_bitrate;            // default 0 (0 = disabled)
    bool m_eos;                             // default true, readable
    bool m_emit_signals;                    // default true
    long m_max_buffers;                     // default 0 (0 = unlimited)
    bool m_video_drop;                      // default false
    bool m_wait_on_eos;                     // default true
    bool m_buffer_list;                     // default false

    std::vector<std::pair<GstElement *, std::string>> m_pipeline_element;

// some code

Using this version of code I did not got any delay like before 2-3 s.
It is interesting why making a pipeline by parsing program works properly but with one made manually not…?
May be in manual the pipeline construction some main detail is lost?

Thank you!