Regarding the issue of camera streaming delay when using gstreamer decoding on Jetson Orin

Hello there, our intention was to obtain the frames from a web-cam using gstreamer on Jetson. We create a pipeline and appsink and used a callback function to retrieve the data. The frame can be displayed however the latency became larger and larger while the frame rate became relatively low. How can I utilize Jetson and gstream properly, on which derection should we dig deeper?

The following is the main code section

static GstFlowReturn new_sample (GstElement *sink, gpointer user_data){
        ROS_INFO("callback start!");
        auto start = std::chrono::high_resolution_clock::now();
        GstSample *sample;
        GstBuffer *buffer;
        GstCaps *caps;
        GstStructure *s;
        gint width, height;	//The size of the image

        GstFlowReturn ret1;
        Buffer buffer1;

        ros::NodeHandle nh;
        it = new image_transport::ImageTransport(nh);
        Image_Pub = it->advertise("cpp_cv_img_pub", 1);

        /* Retrieve the buffer */
        g_signal_emit_by_name (sink, "pull-sample", &sample);
        if (sample){
            g_print ("*  ");
            caps = gst_sample_get_caps (sample);
            if (!caps) {
                g_print ("gst_sample_get_caps fail\n");
                gst_sample_unref (sample);
                return GST_FLOW_ERROR;
            }
            s = gst_caps_get_structure (caps, 0);
            gboolean res;
            res = gst_structure_get_int (s, "width", &width);		//Get the width of the image

            g_print ("width: %d,  ", width);
            res |= gst_structure_get_int (s, "height", &height);	//Obtain the height of the image
            g_print ("height: %d \n", height);

            if (!res) {
                g_print ("gst_structure_get_int fail\n");
                gst_sample_unref (sample);
                return GST_FLOW_ERROR;
            }
 

            buffer = gst_sample_get_buffer (sample);		
 
            if(!buffer){
                g_print ("gst_sample_get_buffer fail\n");
                gst_sample_unref (sample);
                return GST_FLOW_ERROR;
            }        
            GstMapInfo map;
            ROS_INFO("nvbufsurface start!");
            // NvBufSurface *surface = NULL;
            // memset (&map, 0, sizeof (map));

            if (gst_buffer_map (buffer, &map, GST_MAP_READ)){	                   	
                    g_print("jpg size = %ld \n", map.size);
                    // surface = (NvBufSurface *) map.data;
                    // if(!surface)
                    // {
                    //     ROS_INFO("surface null!"); 
                    // }                 
                    // cv::Mat src(cv::Size(surface->surfaceList[0].width, surface->surfaceList[0].height), CV_8UC3); 
                    cv::Mat src(cv::Size(width, height), CV_8UC3, (char*)map.data, cv::Mat::AUTO_STEP);
                    buffer1.pushFrame(src);
                    // cv::Mat in_mat =
		            //     cv::Mat (surface->surfaceList[0].height, surface->surfaceList[0].width,
		            //     CV_8UC4, surface->surfaceList[0].mappedAddr.addr[0],
		            //     surface->surfaceList[0].pitch);
                    ROS_INFO("in_mat finish!");
                    // cv::cvtColor(in_mat, src, cv::COLOR_RGBA2BGR);
                    // cv::cvtColor(src, src, cv::COLOR_RGBA2BGR);
                    ROS_INFO("get src!");
                    cv::Mat frame = buffer1.popFrame();
                    msg = 
                    cv_bridge::CvImage(img_header // std_msgs::Header()
                            , "bgr8"
                            , frame
                            ).toImageMsg();
                    Image_Pub.publish(msg);
                    ROS_INFO("callback finish");    
                    gst_buffer_unmap (buffer, &map);	//Unmap
                }
            auto end = std::chrono::high_resolution_clock::now();
            std::chrono::duration<double> elapsed = end - start;
            std::cout << "Program runtime: " << elapsed.count() << " second" << std::endl;
            gst_sample_unref (sample);	//Release resources
            return GST_FLOW_OK;
        }
    return GST_FLOW_OK ;
 
}
``

define CAPS “video/x-raw,width=1920,height=1080,format=BGR,fFrameRate=30/1” //Set the video format for appsink output

int main(int argc, char argv[]) {
ros::init(argc, argv, “cpp_demo”);
ros::AsyncSpinner spinner(0);
spinner.start();
/
Configure appsink */
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;

/* Initialize GStreamer */
gst_init (&argc, &argv);
g_printerr("gst_init \n");

/* Create the elements 
*/
data.source = gst_element_factory_make ("rtspsrc", "source");
g_object_set (G_OBJECT (data.source), "latency", 0, NULL);
g_object_set (G_OBJECT (data.source), "short-header", true, NULL);
// g_object_set (G_OBJECT (data.source), "buffer-mode", sync, NULL);
data.depay = gst_element_factory_make ("rtph264depay", "depay");
data.queue = gst_element_factory_make ("queue", "Queue");
data.parse = gst_element_factory_make ("h264parse", "parse");
data.avdec = gst_element_factory_make ("nvv4l2decoder", "decoder");
data.conv = gst_element_factory_make ("nvvidconv", "convert");
// if(!data.conv)
// {
//     g_printerr ("conv elements could be created.\n");
//     return -1;
// }
// g_object_set(data.conv, "nvbuf-memory-type", 1, NULL);

// data.caps = gst_caps_from_string("video/x-raw(memory:NVMM), width=1920, height=1080, format=(string)RGBA");
// data.cap_filter_nvvidconv = gst_element_factory_make("capsfilter", "src_cap_filter_nvvidconv");
// if(!data.cap_filter_nvvidconv)
// {
//     g_printerr ("cap_filter_nvvidconv elements could be created.\n");
//     return -1;
// }
// g_object_set (G_OBJECT (data.cap_filter_nvvidconv), "caps", data.caps, NULL);
// gst_caps_unref (data.caps);

data.convert = gst_element_factory_make("videoconvert","Videoconvert");
data.sink = gst_element_factory_make ("appsink", "sink"); 
g_printerr("gst element factory make \n");


GstCaps *video_caps;
gchar *video_caps_text;
video_caps_text = g_strdup_printf (CAPS);
video_caps = gst_caps_from_string (video_caps_text);

if(!video_caps){
g_printerr("gst_caps_from_string fail\n");
return -1;
}

g_object_set (data.source, "location", "rtsp://admin:zpmc123456@192.168.1.64:554/h264/main/ch1/main/av_stream", NULL);
/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);

g_object_set (data.sink, "caps", video_caps, NULL);

g_object_set (data.sink, "emit-signals", true, NULL);
g_object_set (data.sink, "sync", false, NULL);
g_object_set (data.sink, "async", true, NULL);
// g_object_set (data.sink, "max-buffers", 1, NULL);
// g_object_set (data.sink, "max-lateness", 10, NULL);
g_signal_connect (data.sink, "new-sample", G_CALLBACK (new_sample), &data);
g_printerr("gst signal connect \n");

data.pipeline = gst_pipeline_new ("test-pipeline");

if (!data.pipeline || !data.source ||!data.depay || !data.queue || !data.parse|| !data.avdec || !data.conv || !data.convert|| !data.sink) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
}

/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.depay, data.queue, data.parse, data.avdec, data.conv, data.convert, data.sink, NULL);

if (!gst_element_link_many (data.depay, data.queue, data.parse, data.avdec, data.conv, data.convert, data.sink, NULL)) {
    g_printerr ("Elements could not be linked.\n");
    gst_object_unref (data.pipeline);
    return -1;
}
ROS_INFO("pipeline link finish");



/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (data.pipeline);
gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
gst_object_unref (bus);



/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (data.pipeline);
    return -1;
}



/* Create a GLib Main Loop and set it to run */
data.main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.main_loop);




/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);

// ros::waitForShutdown();
return 0;

}

In the above code, I used ROS to publish images.

I would be extremely grateful if you could help me.

Hello,

This topic might get better exposure in the Jetson forums. I will move it over for you.

Hi,
Please run the script to enable system in maximum performance mode:
VPI - Vision Programming Interface: Performance Benchmark

And set the property to nvv4l2decoder:

  disable-dpb         : Set to disable DPB buffer for low latency
                        flags: readable, writable
                        Boolean. Default: false
  enable-max-performance: Set to enable max performance
                        flags: readable, writable
                        Boolean. Default: false

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.