Thank you very much. This example can not do that. In this example, the code:
" nvds_add_display_meta_to_frame(frame_meta, display_meta);"
is used to show the lines in the display.
However, I hope to save the full frame with the drawn lines as a “.jpg” in function of
“gst_fenceplugin_transform_ip”
I have tried to use the same way, but falled.
My code as below:
static GstFlowReturn
my_save_mat (GstFencePlugin * fenceplugin, NvBufSurface *input_buf, gint idx,
NvOSD_RectParams * crop_rect_params, gdouble & ratio, gint input_width,
gint input_height)
{
NvBufSurfTransform_Error err;
NvBufSurfTransformConfigParams transform_config_params;
NvBufSurfTransformParams transform_params;
NvBufSurfTransformRect src_rect;
NvBufSurfTransformRect dst_rect;
NvBufSurface ip_surf;
cv::Mat in_mat, out_mat;
ip_surf = *input_buf;
ip_surf.numFilled = ip_surf.batchSize = 1;
ip_surf.surfaceList = &(input_buf->surfaceList[idx]);
gint src_left = 0;
gint src_top = 0;
gint src_width = 1920;
gint src_height = 1080;
guint dest_width, dest_height;
dest_width = src_width;
dest_height = src_height;
NvBufSurface *nvbuf;
NvBufSurfaceCreateParams create_params;
create_params.gpuId = fenceplugin->gpu_id;
create_params.width = dest_width;
create_params.height = dest_height;
create_params.size = 0;
create_params.colorFormat = NVBUF_COLOR_FORMAT_RGBA;
create_params.layout = NVBUF_LAYOUT_PITCH;
vectorcv::Point AllPoints;
cv::Point temp;
#ifdef aarch64
create_params.memType = NVBUF_MEM_DEFAULT;
#else
create_params.memType = NVBUF_MEM_CUDA_UNIFIED;
#endif
NvBufSurfaceCreate (&nvbuf, 1, &create_params);
transform_config_params.compute_mode = NvBufSurfTransformCompute_Default;
transform_config_params.gpu_id = fenceplugin->gpu_id;
transform_config_params.cuda_stream = fenceplugin->cuda_stream;
err = NvBufSurfTransformSetSessionParams (&transform_config_params);
if (err != NvBufSurfTransformError_Success)
{
GST_ELEMENT_ERROR (fenceplugin, STREAM, FAILED,
(“NvBufSurfTransformSetSessionParams failed with error %d”, err), (NULL));
goto error;
}
// Calculate scaling ratio while maintaining aspect ratio
ratio = MIN (1.0 * dest_width/ src_width, 1.0 * dest_height / src_height);
if ((crop_rect_params->width == 0) || (crop_rect_params->height == 0))
{
GST_ELEMENT_ERROR (fenceplugin, STREAM, FAILED,
(“%s:crop_rect_params dimensions are zero”,func), (NULL));
goto error;
}
#ifdef aarch64
if (ratio <= 1.0 / 16 || ratio >= 16.0)
{
// Currently cannot scale by ratio > 16 or < 1/16 for Jetson
goto error;
}
#endif
// Set the transform ROIs for source and destination
src_rect = {(guint)src_top, (guint)src_left, (guint)src_width, (guint)src_height};
dst_rect = {0, 0, (guint)dest_width, (guint)dest_height};
// Set the transform parameters
transform_params.src_rect = &src_rect;
transform_params.dst_rect = &dst_rect;
transform_params.transform_flag =
NVBUFSURF_TRANSFORM_FILTER |
NVBUFSURF_TRANSFORM_CROP_SRC |
NVBUFSURF_TRANSFORM_CROP_DST;
transform_params.transform_filter = NvBufSurfTransformInter_Default;
//Memset the memory
NvBufSurfaceMemSet (nvbuf, 0, 0, 0);
GST_DEBUG_OBJECT (fenceplugin, “Scaling and converting input buffer\n”);
err = NvBufSurfTransform (&ip_surf, nvbuf, &transform_params);
if (err != NvBufSurfTransformError_Success)
{
GST_ELEMENT_ERROR (fenceplugin, STREAM, FAILED,
(“NvBufSurfTransform failed with error %d while converting buffer”, err),
(NULL));
goto error;
}
// Map the buffer so that it can be accessed by CPU
//if (NvBufSurfaceMap (nvbuf, 0, 0, NVBUF_MAP_READ) != 0){
if (NvBufSurfaceMap (nvbuf, 0, 0, NVBUF_MAP_READ_WRITE) != 0)
{
goto error;
}
NvBufSurfaceSyncForCpu (nvbuf, 0, 0);
in_mat = cv::Mat (dest_height, dest_width,
CV_8UC4, nvbuf->surfaceList[0].mappedAddr.addr[0],
nvbuf->surfaceList[0].pitch);
out_mat = cv::Mat (cv::Size(dest_width, dest_height), CV_8UC3);
cv::cvtColor (in_mat, out_mat, cv::COLOR_RGBA2BGR);
NvBufSurfaceSyncForDevice (nvbuf, 0, 0);
cv::imwrite(“fenceplugin_save.jpg”, out_mat);
if (NvBufSurfaceUnMap (nvbuf, 0, 0))
{
goto error;
}
NvBufSurfaceDestroy(nvbuf);
#ifdef aarch64
if (USE_EGLIMAGE)
{
if (NvBufSurfaceMapEglImage (dsexample->inter_buf, 0) !=0 )
{
goto error;
}
NvBufSurfaceUnMapEglImage (dsexample->inter_buf, 0);
}
#endif
return GST_FLOW_OK;
error:
// cout<<“ERROR”<<GST_FLOW_ERROR<<endl;
return GST_FLOW_ERROR;
}
/**
- Called when element recieves an input buffer from upstream element.
*/
static GstFlowReturn
gst_fenceplugin_transform_ip (GstBaseTransform * btrans, GstBuffer * inbuf)
{
GstFencePlugin *fenceplugin = GST_FENCEPLUGIN (btrans);
GstMapInfo in_map_info;
GstFlowReturn flow_ret = GST_FLOW_ERROR;
gdouble scale_ratio = 1.0;
FencePluginOutput *output;
NvBufSurface *surface = NULL;
NvDsBatchMeta *batch_meta = NULL;
NvDsFrameMeta *frame_meta = NULL;
NvDsMetaList * l_frame = NULL;
guint i = 0;
fenceplugin->frame_num++;
CHECK_CUDA_STATUS (cudaSetDevice (fenceplugin->gpu_id), “Unable to set cuda device”);
memset (&in_map_info, 0, sizeof (in_map_info));
if (!gst_buffer_map (inbuf, &in_map_info, GST_MAP_READ))
{
g_print (“Error: Failed to map gst buffer\n”);
goto error;
}
nvds_set_input_system_timestamp (inbuf, GST_ELEMENT_NAME (fenceplugin));
surface = (NvBufSurface *) in_map_info.data;
GST_DEBUG_OBJECT (fenceplugin,
“Processing Frame %” G_GUINT64_FORMAT " Surface %p\n",
fenceplugin->frame_num, surface);
if (CHECK_NVDS_MEMORY_AND_GPUID (fenceplugin, surface))
goto error;
batch_meta = gst_buffer_get_nvds_batch_meta (inbuf);
if (batch_meta == nullptr)
{
GST_ELEMENT_ERROR (fenceplugin, STREAM, FAILED,
(“NvDsBatchMeta not found for input buffer.”), (NULL));
return GST_FLOW_ERROR;
}
if (1)
{
NvDsMetaList * l_obj = NULL;
NvDsObjectMeta *obj_meta = NULL;
SQLDataInfo SqlData = {0,0,{0},{0}};
for (l_frame = batch_meta->frame_meta_list; l_frame != NULL; l_frame = l_frame->next)
{
frame_meta = (NvDsFrameMeta *) (l_frame->data);
NvOSD_RectParams rect_params;
SqlData = Check_DATA_SQL(frame_meta->source_id+1, CAMERA_TYPE_FENCE);
for (l_obj = frame_meta->obj_meta_list; l_obj != NULL; l_obj = l_obj->next)
{
obj_meta = (NvDsObjectMeta *) (l_obj->data);
attach_metadata_object (fenceplugin, obj_meta, output);
if (my_save_mat (fenceplugin,
surface, frame_meta->batch_id, &obj_meta->rect_params,
scale_ratio, fenceplugin->video_info.width,
fenceplugin->video_info.height) != GST_FLOW_OK) {
/* Error in conversion, skip processing on object. */
continue;
}
}
output = FencePluginProcess (fenceplugin->fencepluginlib_ctx,
(unsigned char *)fenceplugin->inter_buf->surfaceList[0].mappedAddr.addr[0]);
DrawArea (fenceplugin, frame_meta, scale_ratio, output, i,
SqlData.nArea, SqlData.nPoint, SqlData.PointData,SqlData.SummaryInfo);
rect_params.left = 0;
rect_params.top = 0;
rect_params.width = fenceplugin->video_info.width;
rect_params.height = fenceplugin->video_info.height;
/* Scale and convert the frame */
if (my_save_mat (fenceplugin, surface, i, &rect_params,
scale_ratio, fenceplugin->video_info.width,
fenceplugin->video_info.height) != GST_FLOW_OK)
{
goto error;
}
i++;
free (output);
}
}
flow_ret = GST_FLOW_OK;
// cout<<“=================”<<endl;
error:
nvds_set_output_system_timestamp (inbuf, GST_ELEMENT_NAME (fenceplugin));
gst_buffer_unmap (inbuf, &in_map_info);
return flow_ret;
}