Hello NVIDIA Engineers,
I am trying to use the GStreamer video transcoding plugin nvvidconv to convert 1920x1080 YUV422@8bit frames from five cameras into RGBA format frames. However, I noticed that the conversion doesn’t seem to be processed in parallel. When I check the results, the timestamp between each frame from the five cameras is about 4-5 milliseconds apart.
Is it possible to perform this frame format conversion for five streams in parallel? If so, could you provide guidance on how to achieve this with the Jetson AGX Orin?
Thank you!
code:
#include <gst/gst.h>
#include <signal.h>
#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <sensor_msgs/Image.h>
#include <gst/app/gstappsink.h>
#include <opencv2/opencv.hpp>
#include <cv_bridge/cv_bridge.h>
const char *video_dev;
GstFlowReturn new_sample_callback(GstAppSink *appsink, gpointer user_data) {
auto t = ros::Time::now(); //Get system timestamp
GstSample *sample = gst_app_sink_pull_sample(appsink);
if (sample) {
GstBuffer *buffer = gst_sample_get_buffer(sample);
GstMapInfo map;
if(!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
g_printerr("%s: %s: %d: Failed to map buffer.\n", __FILE__, __FUNCTION__, __LINE__);
}
cv::Mat rgba_frame(1080, 1920, CV_8UC4, map.data);
sensor_msgs::ImagePtr msg = cv_bridge::CvImage(std_msgs::Header(), "rgba8", rgba_frame).toImageMsg();
msg->header.stamp = t;
auto pub = (ros::Publisher *)user_data;
pub->publish(msg);
gst_buffer_unmap(buffer, &map);
gst_sample_unref(sample);
}
return GST_FLOW_OK;
}
int main(int argc, char *argv[]) {
std::string node_name = std::string("camera_sdk_") + argv[1];
std::replace(node_name.begin(), node_name.end(), '/', '_');
ros::init(argc, argv, node_name);
ros::NodeHandle nh;
ros::Publisher pub = nh.advertise<sensor_msgs::Image>(argv[2], 1);
gst_init(&argc, &argv);
//auto video_dev = "/dev/video6";
video_dev = argv[1];
GstElement *pipeline = gst_pipeline_new("pipeline");
GstElement *source = gst_element_factory_make("v4l2src", "source");
GstElement *filter1 = gst_element_factory_make("capsfilter", "filter1");
GstElement *nvconv = gst_element_factory_make("nvvidconv", "nvconv");
GstElement *filter2 = gst_element_factory_make("capsfilter", "filter2");
GstElement *sink = gst_element_factory_make("appsink", "sink");
if (!pipeline || !source || !filter1 || !nvconv || !filter2 || !sink) {
g_printerr("%s: %s: %d: Failed to create elements\n", __FILE__, __FUNCTION__, __LINE__);
return -1;
}
g_object_set(G_OBJECT(source), "device", video_dev, NULL);
g_print("Bind device: %s\n", video_dev);
GstCaps *caps1 = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"format", G_TYPE_STRING, "UYVY",
"framerate", GST_TYPE_FRACTION, 30, 1,
NULL);
g_object_set(G_OBJECT(filter1), "caps", caps1, NULL);
gst_caps_unref(caps1);
GstCaps *caps2 = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "RGBA",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
NULL);
g_object_set(G_OBJECT(filter2), "caps", caps2, NULL);
gst_caps_unref(caps2);
gst_bin_add_many(GST_BIN(pipeline), source, filter1, nvconv, filter2, sink, NULL);
if (!gst_element_link_many(source, filter1, nvconv, filter2, sink, NULL)) {
g_printerr("%s: %s: %d: Elements could not be linked.\n", __FILE__, __FUNCTION__, __LINE__);
gst_object_unref(pipeline);
return -1;
}
GstAppSinkCallbacks callbacks = {NULL, NULL, new_sample_callback};
gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, (gpointer)&pub, NULL);
GstBus *bus = gst_element_get_bus(pipeline);
bool pipe_start = false;
while (ros::ok()) {
GstMessage *msg = gst_bus_timed_pop_filtered(
bus, 1 * GST_MSECOND,
(GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("%s: %s: %d: Error: %s.\n", __FILE__, __FUNCTION__, __LINE__, err->message);
g_clear_error(&err);
g_free(debug_info);
break;
case GST_MESSAGE_EOS:
g_print("End of stream.\n");
break;
default:
break;
}
gst_message_unref(msg);
}
if (pipe_start == false && pub.getNumSubscribers() > 0) {
pipe_start = true;
auto ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("%s: %s: %d: Failed to start the pipeline.\n", __FILE__, __FUNCTION__, __LINE__);
gst_object_unref(pipeline);
return -1;
}
}
else if (pipe_start && pub.getNumSubscribers() < 1) {
pipe_start = false;
auto ret = gst_element_set_state(pipeline, GST_STATE_PAUSED);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("%s: %s: %d: Failed to pause the pipeline.\n", __FILE__, __FUNCTION__, __LINE__);
gst_object_unref(pipeline);
return -1;
}
}
ros::spinOnce();
}
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
gst_object_unref(bus);
g_print("Program exit.\n");
return 0;
}
Running results:


