How to save images from DeepStream 360D Smart parking application?

Software environment

  1. deepstream5.0
  2. Linux version 5.4.0-53-generic (buildd@lgw01-amd64-046) (gcc version 7.5.0 (Ubuntu 7.5.0-3ubuntu1~18.04)) #59~18.04.1-Ubuntu

New requirements

Question

I am very interested in this project Deepstream 360D Smart parking application .So I have reproduced this project.Now,I want to make some changes to this project.

  1. I have reproduced this project deepstream_image_meta_test.So I want to save images which are detected by pgie in smart parking using the way deepstream_image_meta_test mentioned.But,I don’t know how to do it!
  2. I made some attempts,but I get wrong jpeg like thisimg.jpg the car pictures are not shown as planned. here is my code below.Please teach me how to do it,Thank you very much

PROBE

    // NVGSTDS_ELEM_ADD_PROBE(pipeline->common_elements.primary_bbox_buffer_probe_id,
    //                        pipeline->common_elements.primary_gie_bin.bin, "src",
    //                        gie_primary_processing_done_buf_prob, GST_PAD_PROBE_TYPE_BUFFER,
    //                        pipeline->common_elements.appCtx);
    //I write a probe myself
    GstPad *pgie_src_pad = gst_element_get_static_pad(pipeline->common_elements.primary_gie_bin.bin, "src");
    gst_pad_add_probe(pgie_src_pad, GST_PAD_PROBE_TYPE_BUFFER, jpeg_stream_process,
                      (gpointer)pipeline->common_elements.appCtx->obj_ctx_handle, NULL);
    gst_object_unref(pgie_src_pad);

jpeg_stream_process():

this is not different from pgie_src_pad_buffer_probe in deepstream_image_meta_test.

static GstPadProbeReturn
jpeg_stream_process(GstPad *pad, GstPadProbeInfo *info, gpointer ctx)
{
  GstBuffer *buf = (GstBuffer *)info->data;
  GstMapInfo inmap = GST_MAP_INFO_INIT;
  if (!gst_buffer_map(buf, &inmap, GST_MAP_READ))
  {
    NVGSTDS_ERR_MSG_V("input buffer mapinfo failed");
    return GST_PAD_PROBE_DROP;
  }
  NvBufSurface *ip_surf = (NvBufSurface *)inmap.data;
  gst_buffer_unmap(buf, &inmap);

  NvDsObjectMeta *obj_meta = NULL;
  guint vehicle_count = 0;
  guint person_count = 0;
  NvDsMetaList *l_frame = NULL;
  NvDsMetaList *l_obj = NULL;
  NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta(buf);

  for (l_frame = batch_meta->frame_meta_list; l_frame != NULL; l_frame = l_frame->next)
  {
    NvDsFrameMeta *frame_meta = (NvDsFrameMeta *)(l_frame->data);
    guint num_rects = 0;
    for (l_obj = frame_meta->obj_meta_list; l_obj != NULL; l_obj = l_obj->next)
    {
      obj_meta = (NvDsObjectMeta *)(l_obj->data);

      if (obj_meta->class_id == 0)
      {
        vehicle_count++;
        num_rects++;
        NVGSTDS_INFO_MSG_V("zzh info:class_id=%d class label=%s confidence=%f", obj_meta->class_id, obj_meta->obj_label, obj_meta->confidence);
      }
      if (obj_meta->class_id == 1)
      {
        person_count++;
        num_rects++;
      }
      /* Conditions that user needs to set to encode the detected objects of
       * interest. Here, by default all the detected objects are encoded.
       */
      if ((obj_meta->class_id == 0))
      {
        NvDsObjEncUsrArgs userData = {0};
        /* To be set by user */
        userData.saveImg = save_img;
        userData.attachUsrMeta = attach_user_meta;
        /* Preset */
        userData.objNum = num_rects;
        /*Main Function Call */
        nvds_obj_enc_process((NvDsObjEncCtxHandle)ctx, &userData, ip_surf, obj_meta, frame_meta);
      }
    }
  }
  nvds_obj_enc_finish((NvDsObjEncCtxHandle)ctx);
  return GST_PAD_PROBE_OK;
}

obj_ctx_handle


gboolean
create_pipeline(AppCtx *appCtx,
                bbox_generated_callback bbox_generated_post_analytics_cb,
                bbox_generated_callback all_bbox_generated_cb,
                perf_callback perf_cb,
                overlay_graphics_callback overlay_graphics_cb)
{
  gboolean ret = FALSE;
  NvDsPipeline *pipeline = &appCtx->pipeline;
  NvDsConfig *config = &appCtx->config;
  GstBus *bus;
  GstElement *last_elem;
  GstElement *tmp_elem1;
  GstElement *tmp_elem2;
  guint i;
  GstPad *fps_pad;
  gulong latency_probe_id;

  _dsmeta_quark = g_quark_from_static_string(NVDS_META_STRING);

  appCtx->all_bbox_generated_cb = all_bbox_generated_cb;
  appCtx->bbox_generated_post_analytics_cb = bbox_generated_post_analytics_cb;
  appCtx->overlay_graphics_cb = overlay_graphics_cb;

/*here is my obj_ctx_handle*/
  NvDsObjEncCtxHandle obj_ctx_handle = nvds_obj_enc_create_context();
  if (!obj_ctx_handle)
  {
    NVGSTDS_ERR_MSG_V("erro");
    goto done;
  }
  appCtx->obj_ctx_handle = obj_ctx_handle;

//...
// ...some codes are omitted here
}

obj_ctx_handle define

struct _AppCtx
{
  gboolean version;
  gboolean cintr;
  gboolean show_bbox_text;
  gboolean seeking;
  gboolean quit;
  gint person_class_id;
  gint car_class_id;
  gint return_value;
  guint index;

  GMutex app_lock;
  GCond app_cond;

  NvDsPipeline pipeline;
  NvDsConfig config;
  NvDsInstanceData instance_data[MAX_SOURCE_BINS];
  NvDsAppPerfStructInt perf_struct;
  bbox_generated_callback bbox_generated_post_analytics_cb;
  bbox_generated_callback all_bbox_generated_cb;
  overlay_graphics_callback overlay_graphics_cb;
  NvDsFrameLatencyInfo *latency_info;
  GMutex latency_lock;

  //this is my new add
  NvDsObjEncCtxHandle obj_ctx_handle;
};

Pls refer the API guide NVIDIA DeepStream SDK API Reference: Main Page