Drawing lines in dsexample and save it with the frame

Please provide complete information as applicable to your setup.

**• Hardware Platform (Jetson / GPU)GPU
**• DeepStream Version6.1
**• JetPack Version (valid for Jetson only)
**• TensorRT Version8.4
**• NVIDIA GPU Driver Version (valid for GPU only)450
• Issue Type( questions, new requirements, bugs)
• How to reproduce the issue ? (This is for bugs. Including which sample app is using, the configuration files content, the command line used and other details for reproducing)
• Requirement details( This is for new requirement. Including the module name-for which plugin or for which sample application, the function description)

Dear professor:

 Thank you for helping me all the time. 
 My aim is:
 (1) draw a fence in dsexample.
 (2) save the frame as jpg, with the detected boundary box and the drawing lines.
 I  have implemented that by the following method.
(1)  right a fenceplugin  imitated the dsexample.  Dsexample is used to draw the fence, fenceplugin is used to save the picture as jpg. 
(2) Dsexample is inserted into pipeline in the default location, and fenceplugin insert into the pipeline behind  osd plugin. So I can save the bbox and lines. 

I think my method is too complex. I hope to implement those functions in one plugin. And I hope the code is in the C++, because we could use some c++ SDK.
I can change the plugin location in the pipeline.
Could you help me。 Thank you very much.

Can you refer “Image as Metadata example”?

C/C++ Sample Apps Source Details — DeepStream 6.1 Release documentation (nvidia.com)

Thank you very much. This example can not do that. In this example, the code:
" nvds_add_display_meta_to_frame(frame_meta, display_meta);"
is used to show the lines in the display.

However, I hope to save the full frame with the drawn lines as a “.jpg” in function of
“gst_fenceplugin_transform_ip”

I have tried to use the same way, but falled.
My code as below:

static GstFlowReturn
my_save_mat (GstFencePlugin * fenceplugin, NvBufSurface *input_buf, gint idx,
NvOSD_RectParams * crop_rect_params, gdouble & ratio, gint input_width,
gint input_height)
{
NvBufSurfTransform_Error err;
NvBufSurfTransformConfigParams transform_config_params;
NvBufSurfTransformParams transform_params;
NvBufSurfTransformRect src_rect;
NvBufSurfTransformRect dst_rect;
NvBufSurface ip_surf;
cv::Mat in_mat, out_mat;
ip_surf = *input_buf;
ip_surf.numFilled = ip_surf.batchSize = 1;
ip_surf.surfaceList = &(input_buf->surfaceList[idx]);

gint src_left = 0;
gint src_top = 0;
gint src_width = 1920;
gint src_height = 1080;

guint dest_width, dest_height;
dest_width = src_width;
dest_height = src_height;

NvBufSurface *nvbuf;
NvBufSurfaceCreateParams create_params;
create_params.gpuId = fenceplugin->gpu_id;
create_params.width = dest_width;
create_params.height = dest_height;
create_params.size = 0;
create_params.colorFormat = NVBUF_COLOR_FORMAT_RGBA;
create_params.layout = NVBUF_LAYOUT_PITCH;

vectorcv::Point AllPoints;
cv::Point temp;

#ifdef aarch64
create_params.memType = NVBUF_MEM_DEFAULT;
#else
create_params.memType = NVBUF_MEM_CUDA_UNIFIED;
#endif

NvBufSurfaceCreate (&nvbuf, 1, &create_params);
transform_config_params.compute_mode = NvBufSurfTransformCompute_Default;
transform_config_params.gpu_id = fenceplugin->gpu_id;
transform_config_params.cuda_stream = fenceplugin->cuda_stream;
err = NvBufSurfTransformSetSessionParams (&transform_config_params);

if (err != NvBufSurfTransformError_Success)
{
GST_ELEMENT_ERROR (fenceplugin, STREAM, FAILED,
(“NvBufSurfTransformSetSessionParams failed with error %d”, err), (NULL));
goto error;
}

// Calculate scaling ratio while maintaining aspect ratio
ratio = MIN (1.0 * dest_width/ src_width, 1.0 * dest_height / src_height);

if ((crop_rect_params->width == 0) || (crop_rect_params->height == 0))
{
GST_ELEMENT_ERROR (fenceplugin, STREAM, FAILED,
(“%s:crop_rect_params dimensions are zero”,func), (NULL));
goto error;
}

#ifdef aarch64
if (ratio <= 1.0 / 16 || ratio >= 16.0)
{
// Currently cannot scale by ratio > 16 or < 1/16 for Jetson
goto error;
}
#endif

// Set the transform ROIs for source and destination
src_rect = {(guint)src_top, (guint)src_left, (guint)src_width, (guint)src_height};
dst_rect = {0, 0, (guint)dest_width, (guint)dest_height};

// Set the transform parameters
transform_params.src_rect = &src_rect;
transform_params.dst_rect = &dst_rect;
transform_params.transform_flag =
NVBUFSURF_TRANSFORM_FILTER |
NVBUFSURF_TRANSFORM_CROP_SRC |
NVBUFSURF_TRANSFORM_CROP_DST;
transform_params.transform_filter = NvBufSurfTransformInter_Default;

//Memset the memory
NvBufSurfaceMemSet (nvbuf, 0, 0, 0);

GST_DEBUG_OBJECT (fenceplugin, “Scaling and converting input buffer\n”);

err = NvBufSurfTransform (&ip_surf, nvbuf, &transform_params);
if (err != NvBufSurfTransformError_Success)
{
GST_ELEMENT_ERROR (fenceplugin, STREAM, FAILED,
(“NvBufSurfTransform failed with error %d while converting buffer”, err),
(NULL));
goto error;
}

// Map the buffer so that it can be accessed by CPU
//if (NvBufSurfaceMap (nvbuf, 0, 0, NVBUF_MAP_READ) != 0){
if (NvBufSurfaceMap (nvbuf, 0, 0, NVBUF_MAP_READ_WRITE) != 0)
{
goto error;
}

NvBufSurfaceSyncForCpu (nvbuf, 0, 0);
in_mat = cv::Mat (dest_height, dest_width,
CV_8UC4, nvbuf->surfaceList[0].mappedAddr.addr[0],
nvbuf->surfaceList[0].pitch);
out_mat = cv::Mat (cv::Size(dest_width, dest_height), CV_8UC3);

cv::cvtColor (in_mat, out_mat, cv::COLOR_RGBA2BGR);
NvBufSurfaceSyncForDevice (nvbuf, 0, 0);

cv::imwrite(“fenceplugin_save.jpg”, out_mat);

if (NvBufSurfaceUnMap (nvbuf, 0, 0))
{
goto error;
}
NvBufSurfaceDestroy(nvbuf);
#ifdef aarch64

if (USE_EGLIMAGE)
{
if (NvBufSurfaceMapEglImage (dsexample->inter_buf, 0) !=0 )
{
goto error;
}
NvBufSurfaceUnMapEglImage (dsexample->inter_buf, 0);
}
#endif
return GST_FLOW_OK;
error:
// cout<<“ERROR”<<GST_FLOW_ERROR<<endl;
return GST_FLOW_ERROR;
}

/**

  • Called when element recieves an input buffer from upstream element.
    */
    static GstFlowReturn
    gst_fenceplugin_transform_ip (GstBaseTransform * btrans, GstBuffer * inbuf)
    {

GstFencePlugin *fenceplugin = GST_FENCEPLUGIN (btrans);
GstMapInfo in_map_info;
GstFlowReturn flow_ret = GST_FLOW_ERROR;
gdouble scale_ratio = 1.0;
FencePluginOutput *output;
NvBufSurface *surface = NULL;
NvDsBatchMeta *batch_meta = NULL;
NvDsFrameMeta *frame_meta = NULL;
NvDsMetaList * l_frame = NULL;
guint i = 0;

fenceplugin->frame_num++;
CHECK_CUDA_STATUS (cudaSetDevice (fenceplugin->gpu_id), “Unable to set cuda device”);

memset (&in_map_info, 0, sizeof (in_map_info));
if (!gst_buffer_map (inbuf, &in_map_info, GST_MAP_READ))
{
g_print (“Error: Failed to map gst buffer\n”);
goto error;
}

nvds_set_input_system_timestamp (inbuf, GST_ELEMENT_NAME (fenceplugin));
surface = (NvBufSurface *) in_map_info.data;
GST_DEBUG_OBJECT (fenceplugin,
“Processing Frame %” G_GUINT64_FORMAT " Surface %p\n",
fenceplugin->frame_num, surface);

if (CHECK_NVDS_MEMORY_AND_GPUID (fenceplugin, surface))
goto error;

batch_meta = gst_buffer_get_nvds_batch_meta (inbuf);
if (batch_meta == nullptr)
{
GST_ELEMENT_ERROR (fenceplugin, STREAM, FAILED,
(“NvDsBatchMeta not found for input buffer.”), (NULL));
return GST_FLOW_ERROR;
}

if (1)
{
NvDsMetaList * l_obj = NULL;
NvDsObjectMeta *obj_meta = NULL;
SQLDataInfo SqlData = {0,0,{0},{0}};
for (l_frame = batch_meta->frame_meta_list; l_frame != NULL; l_frame = l_frame->next)
{
frame_meta = (NvDsFrameMeta *) (l_frame->data);
NvOSD_RectParams rect_params;
SqlData = Check_DATA_SQL(frame_meta->source_id+1, CAMERA_TYPE_FENCE);

    for (l_obj = frame_meta->obj_meta_list; l_obj != NULL;  l_obj = l_obj->next)
    {
        obj_meta = (NvDsObjectMeta *) (l_obj->data);
        attach_metadata_object (fenceplugin, obj_meta, output);
              if (my_save_mat (fenceplugin,
               surface, frame_meta->batch_id, &obj_meta->rect_params,
               scale_ratio, fenceplugin->video_info.width,
               fenceplugin->video_info.height) != GST_FLOW_OK) {
      /* Error in conversion, skip processing on object. */
      continue;
    }

    }

    output = FencePluginProcess (fenceplugin->fencepluginlib_ctx,
                  (unsigned char *)fenceplugin->inter_buf->surfaceList[0].mappedAddr.addr[0]);
    DrawArea (fenceplugin, frame_meta, scale_ratio, output, i,
                           SqlData.nArea, SqlData.nPoint, SqlData.PointData,SqlData.SummaryInfo);

    rect_params.left = 0;
    rect_params.top = 0;
    rect_params.width = fenceplugin->video_info.width;
    rect_params.height = fenceplugin->video_info.height;
      /* Scale and convert the frame */
    if (my_save_mat (fenceplugin, surface, i, &rect_params,
            scale_ratio, fenceplugin->video_info.width,
            fenceplugin->video_info.height) != GST_FLOW_OK)
    {
        goto error;
    }
    i++;
    free (output);
}

}
flow_ret = GST_FLOW_OK;
// cout<<“=================”<<endl;
error:
nvds_set_output_system_timestamp (inbuf, GST_ELEMENT_NAME (fenceplugin));
gst_buffer_unmap (inbuf, &in_map_info);
return flow_ret;
}

Hi @yangyi , Sorry for late response, can you let us know if it is still an issue? Thanks.

I’m closing this topic due to there is no update from you for a period, assuming this issue was resolved.
If still need the support, please open a new topic. Thanks

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.