Hi, I am using jetson’s nvv4l2h264enc to encode Mat images (in BGR format) and push them to the rtmp server. Although I can push the stream, I cannot specify the properties of nvv4l2h264enc.
For example, when I set “profile” to “high”, the rtmp stream I get is still “baseline”, and the “qp” value and “gop” and other parameters are also invalid.
I hope to get help.
The code is as follows:
GstElement *pipeline, *appsrc, *nvvidconv, *nvv4l2h264enc, *flvmux, *rtmpsink, *h264parse;
gst_init(nullptr, nullptr);
pipeline = gst_pipeline_new("appsrc-example");
appsrc = gst_element_factory_make("appsrc", "appsrc");
nvvidconv = gst_element_factory_make("nvvidconv", "nvvidconv");
nvv4l2h264enc = gst_element_factory_make("nvv4l2h264enc", "nvv4l2h264enc");
h264parse = gst_element_factory_make("h264parse", "h264parse");
flvmux = gst_element_factory_make("flvmux", "flvmux");
rtmpsink = gst_element_factory_make("rtmpsink", "rtmpsink");
GstCaps *caps;
caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"framerate", GST_TYPE_FRACTION, 30, 1,
NULL);
g_object_set(appsrc,
"caps", caps,
"format", GST_FORMAT_TIME,
"is-live", TRUE,
"do-timestamp", TRUE,
"max-bytes", 300000,
NULL);
g_object_set(nvvidconv, "gpu-id", 0, NULL);
g_object_set(rtmpsink, "location", rtmp_location.c_str(), NULL);
g_object_set(nvv4l2h264enc,
"gpu-id", 0,
"speed-preset", "ultrafast",
"insert-sps-pps", TRUE,
"key-int-max", 10,
"tune", "zerolatency",
"maxperf-enable", TRUE,
"profile", "baseline",
NULL);
g_object_set(flvmux, "streamable", TRUE, NULL);
gst_bin_add_many(GST_BIN(pipeline), appsrc, nvvidconv, nvv4l2h264enc, h264parse, flvmux, rtmpsink, NULL);
if (!gst_element_link_many(appsrc, nvvidconv, nvv4l2h264enc, h264parse, flvmux, rtmpsink, NULL))
{
gst_object_unref(GST_OBJECT(pipeline));
return;
}
gst_element_set_state(pipeline, GST_STATE_PLAYING);
GstClockTime timestamp = 0;
GstBuffer *buffer;
GstFlowReturn ret;
while (1)
{
// frame_copy is MAT images (in BGR format)
cv::cvtColor(frame_copy, frame_copy, cv::COLOR_BGR2YUV_I420);
buffer = gst_buffer_new_allocate(NULL, frame_copy.total() * frame_copy.elemSize(), NULL);
gst_buffer_fill(buffer, 0, frame_copy.data, frame_copy.total() * frame_copy.elemSize());
GST_BUFFER_PTS(buffer) = timestamp;
GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, fps);
timestamp += GST_BUFFER_DURATION(buffer);
g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
gst_buffer_unref(buffer);
g_usleep(1000000 / 30); //fps is 30
}
gst_buffer_unref(buffer);
gst_caps_unref(caps);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
gst_object_unref(GST_OBJECT(bus));
I’m wondering if it’s the image format that’s the problem (BGR converted to I420) or if the “fps” setting is wrong or if there’s no way to set the “nvv4l2h264enc” property when using “appsrc”.