Please provide complete information as applicable to your setup.
• Hardware Platform (Jetson / GPU) GPU
• DeepStream Version 6.2
• JetPack Version (valid for Jetson only)
• TensorRT Version 8.5.2.2
• NVIDIA GPU Driver Version (valid for GPU only) 550.54.15
• Issue Type( questions, new requirements, bugs)
My pipeline is multi_src_bin
→ nvvideoconvert
→ nvstreammux
… (the rest does not matter).
nvvideoconvert
element uses src-crop
to crop a part of the original stream. The issue is that after nvvideoconvert
buffer’s metadata (width, height) is not updated to cropped frame’s metadata. For example, if original frame is 1920x1080 and src-crop
is 0:0:1920:480
, I would expect that GstBuffer mapped to NvBufSurface would have 1920x480 resolution, not 1920x1080. This is important because I want to use new NvStreammux to be able to have different resolution frames in my pipeline.
I am trying to solve this by adding a probe to nvvideoconvert
source pad and modifying NvBufSurface -> NvBufSurfaceParams
, but it I’d like to keep this as a last resort, because this seems like a dirty way.
BTW, I know about nvdspreprocess
plugin, but for certain reasons I need this setup to work.
Here is the code I have modified for inserting a nvvideoconvert
element before nvstreammux
.
gboolean
link_element_to_streammux_sink_pad_roi (GstElement *streammux, GstElement *elem, GstElement *nvvidconv_crop, gint index) // with roi
{
gboolean ret = FALSE;
GstPad *mux_sink_pad = NULL;
// GstPad *mux_src_pad = NULL;
GstPad *src_pad = NULL;
GstPad *nvvidconv_src_pad = NULL;
GstPad *nvvidconv_sink_pad = NULL;
gchar pad_name[16];
nvvidconv_src_pad = gst_element_get_static_pad (nvvidconv_crop, "src");
if (!nvvidconv_src_pad) {
NVGSTDS_ERR_MSG_V ("Failed to get src pad from nvvideoconvert");
goto done;
}
// gst_pad_add_probe(nvvidconv_src_pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) update_buffer_metadata_probe, NULL, NULL);
nvvidconv_sink_pad = gst_element_get_static_pad (nvvidconv_crop, "sink");
if (!nvvidconv_sink_pad) {
NVGSTDS_ERR_MSG_V ("Failed to get sink pad from nvvideoconvert");
goto done;
}
if (index >= 0) {
g_snprintf (pad_name, 16, "sink_%u", index);
pad_name[15] = '\0';
} else {
strcpy (pad_name, "sink_%u");
}
mux_sink_pad = gst_element_get_request_pad (streammux, pad_name);
if (!mux_sink_pad) {
NVGSTDS_ERR_MSG_V ("Failed to get sink pad from streammux");
goto done;
}
// mux_src_pad = gst_element_get_static_pad (streammux, "src");
// if (!mux_sink_pad) {
// NVGSTDS_ERR_MSG_V ("Failed to get src pad from streammux");
// goto done;
// }
// gst_pad_add_probe(mux_src_pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) update_buffer_metadata_probe, NULL, NULL);
src_pad = gst_element_get_static_pad (elem, "src");
if (!src_pad) {
NVGSTDS_ERR_MSG_V ("Failed to get src pad from '%s'",
GST_ELEMENT_NAME (elem));
goto done;
}
if (gst_pad_link (src_pad, nvvidconv_sink_pad) != GST_PAD_LINK_OK) {
NVGSTDS_ERR_MSG_V ("Nvvidconv sinkpad Failed to link '%s' and '%s'", GST_ELEMENT_NAME (nvvidconv_crop),
GST_ELEMENT_NAME (elem));
goto done;
}
if (gst_pad_link (nvvidconv_src_pad, mux_sink_pad) != GST_PAD_LINK_OK) {
NVGSTDS_ERR_MSG_V ("Nvvidconv srcpad Failed to link '%s' and '%s'", GST_ELEMENT_NAME (nvvidconv_crop),
GST_ELEMENT_NAME (streammux));
goto done;
}
ret = TRUE;
done:
if (mux_sink_pad)
gst_object_unref (mux_sink_pad);
if (src_pad)
gst_object_unref (src_pad);
if (nvvidconv_src_pad)
gst_object_unref(nvvidconv_src_pad);
if (nvvidconv_sink_pad)
gst_object_unref(nvvidconv_sink_pad);
return ret;
}