Save image from sgie cropped

Please provide complete information as applicable to your setup.

• Hardware Platform (Jetson / GPU) : GPU
• DeepStream Version : 6.1
• Issue Type( questions, new requirements, bugs) : While performing ANPR how to save the License plate image from sgie detector?
Using the usual deepstream-image-meta-test which only has pgie I’m able to save image(cropped). However, how to use the same concept for ANPR where i have pgie, sgie1,sgie2? I’m getting Segfault.
Here’s the code below:

static GstPadProbeReturn
sgie_src_pad_buffer_probe(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
{
    GstBuffer *buf = (GstBuffer *)info->data;
    NvDsObjectMeta *obj_meta = NULL;
    // guint vehicle_count = 0;
    // guint person_count = 0;
    // guint lp_count = 0;
    guint num_rects = 0;
    // guint label_i = 0;
    NvDsMetaList *l_frame = NULL;
    NvDsMetaList *l_obj = NULL;
    NvDsMetaList *l_class = NULL;
    NvDsMetaList *l_label = NULL;
    NvDsDisplayMeta *display_meta = NULL;
    NvDsClassifierMeta *class_meta = NULL;
    NvDsLabelInfo *label_info = NULL;
    GstClockTime now;
    //   perf_measure *perf = (perf_measure *)(u_data);

    NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta(buf);
    int current_device = -1;
    cudaGetDevice(&current_device);
    struct cudaDeviceProp prop;
    cudaGetDeviceProperties(&prop, current_device);

    // now = g_get_monotonic_time();

    //   if (perf->pre_time == GST_CLOCK_TIME_NONE)
    //   {
    //     perf->pre_time = now;
    //     perf->total_time = GST_CLOCK_TIME_NONE;
    //   }
    //   else
    //   {
    //     if (perf->total_time == GST_CLOCK_TIME_NONE)
    //     {
    //       perf->total_time = (now - perf->pre_time);
    //     }
    //     else
    //     {
    //       perf->total_time += (now - perf->pre_time);
    //     }
    //     perf->pre_time = now;
    //     perf->count++;
    //   }

    for (l_frame = batch_meta->frame_meta_list; l_frame != NULL; l_frame = l_frame->next)
    {
        NvDsFrameMeta *frame_meta = (NvDsFrameMeta *)(l_frame->data);
        int offset = 0;
        if (!frame_meta)
        {
            continue;
        }
        for (l_obj = frame_meta->obj_meta_list; l_obj != NULL; l_obj = l_obj->next)
        {
            obj_meta = (NvDsObjectMeta *)(l_obj->data);

            if (!obj_meta)
            {
                continue;
            }

            /* Check that the object has been detected by the primary detector
             * and that the class id is that of vehicles/persons. */
            // if (obj_meta->unique_component_id == PRIMARY_DETECTOR_UID)
            // {
            //     if (obj_meta->class_id == 0 || obj_meta->class_id == 9 || obj_meta->class_id == 10 || obj_meta->class_id == 16 || obj_meta->class_id == 17)
            //     {
            //         vehicle_count++;
            //     }
            //     // if (obj_meta->class_id == PGIE_CLASS_ID_PERSON)
            //     //   person_count++;
            // }

            /* To verify  encoded metadata of cropped objects, we iterate through the
             * user metadata of each object and if a metadata of the type
             * 'NVDS_CROP_IMAGE_META' is found then we write that to a file as
             * implemented below. */
            char fileNameString[FILE_NAME_SIZE];
            const char *osd_string = "OSD";
            int obj_res_width = (int)obj_meta->rect_params.width;
            int obj_res_height = (int)obj_meta->rect_params.height;
            if (prop.integrated)
            {
                obj_res_width = GST_ROUND_DOWN_2(obj_res_width);
                obj_res_height = GST_ROUND_DOWN_2(obj_res_height);
            }

            snprintf(fileNameString, FILE_NAME_SIZE, "./output/%s_%d_%d_%d_%s_%dx%d.jpg",
                     osd_string, frame_number, frame_meta->source_id, num_rects,
                     obj_meta->obj_label, obj_res_width, obj_res_height);
            /* For Demonstration Purposes we are writing metadata to jpeg images of
             * only vehicles for the first 100 frames only.
             * The files generated have a 'OSD' prefix. */
            if (obj_meta->class_id == 0)
            {
                NvDsUserMetaList *usrMetaList = obj_meta->obj_user_meta_list;
                FILE *file;
                while (usrMetaList != NULL)
                {
                    NvDsUserMeta *usrMetaData = (NvDsUserMeta *)usrMetaList->data;
                    if (usrMetaData->base_meta.meta_type == NVDS_CROP_IMAGE_META)
                    {
                        NvDsObjEncOutParams *enc_jpeg_image =
                            (NvDsObjEncOutParams *)usrMetaData->user_meta_data;
                        /* Write to File */
                        file = fopen(fileNameString, "wb");
                        fwrite(enc_jpeg_image->outBuffer, sizeof(uint8_t),
                               enc_jpeg_image->outLen, file);
                        fclose(file);
                        usrMetaList = NULL;
                    }
                    else
                    {
                        usrMetaList = usrMetaList->next;
                    }
                }
            }
        }

        display_meta = nvds_acquire_display_meta_from_pool(batch_meta);
        NvOSD_TextParams *txt_params = &display_meta->text_params[0];
       
    }
    // g_print("Frame Number = %d Vehicle Count = %d Person Count = %d"" License Plate Count = %d\n",
    //         frame_number, vehicle_count, person_count, lp_count);
    frame_number++;
    // total_plate_number += lp_count;
    return GST_PAD_PROBE_OK;
}

Next I’m attaching another probe:

static GstPadProbeReturn
osd_src_pad_buffer_probe(GstPad *pad, GstPadProbeInfo *info, gpointer ctx)
{
    GstBuffer *buf = (GstBuffer *)info->data;
    GstMapInfo inmap = GST_MAP_INFO_INIT;
    if (!gst_buffer_map(buf, &inmap, GST_MAP_READ))
    {
        GST_ERROR("input buffer mapinfo failed");
        // return GST_FLOW_ERROR;
        printf("input buffer mapinfo failed");
    }
    NvDsObjectMeta *obj_meta = NULL;
    // guint vehicle_count = 0;
    guint person_count = 0;
    // guint lp_count = 0;
    guint num_rects = 0;
    guint label_i = 0;
    NvDsMetaList *l_frame = NULL;
    NvDsMetaList *l_obj = NULL;
    NvDsMetaList *l_class = NULL;
    NvDsMetaList *l_label = NULL;
    NvBufSurface *ip_surf = (NvBufSurface *) inmap.data;
    gst_buffer_unmap (buf, &inmap);
    NvDsObjectMeta *obj = NULL;
    NvDsDisplayMeta *display_meta = NULL;
    NvDsClassifierMeta *class_meta = NULL;
    NvDsLabelInfo *label_info = NULL;
    GstClockTime now;
    //   perf_measure *perf = (perf_measure *)(u_data);

    NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta(buf);

    now = g_get_monotonic_time();

    //   if (perf->pre_time == GST_CLOCK_TIME_NONE)
    //   {
    //     perf->pre_time = now;
    //     perf->total_time = GST_CLOCK_TIME_NONE;
    //   }
    //   else
    //   {
    //     if (perf->total_time == GST_CLOCK_TIME_NONE)
    //     {
    //       perf->total_time = (now - perf->pre_time);
    //     }
    //     else
    //     {
    //       perf->total_time += (now - perf->pre_time);
    //     }
    //     perf->pre_time = now;
    //     perf->count++;
    //   }

    for (l_frame = batch_meta->frame_meta_list; l_frame != NULL; l_frame = l_frame->next)
    {
        NvDsFrameMeta *frame_meta = (NvDsFrameMeta *)(l_frame->data);
        int offset = 0;
        if (!frame_meta)
        {
            continue;
        }
        for (l_obj = frame_meta->obj_meta_list; l_obj != NULL; l_obj = l_obj->next)
        {
            obj_meta = (NvDsObjectMeta *)(l_obj->data);

            // if (!obj_meta)
            // {
            //     continue;
            // }

            /* Check that the object has been detected by the primary detector
             * and that the class id is that of vehicles/persons. */
            if (obj_meta->unique_component_id == PRIMARY_DETECTOR_UID)
            {
                if (obj_meta->class_id == 0 || obj_meta->class_id == 9 || obj_meta->class_id == 10 || obj_meta->class_id == 16 || obj_meta->class_id == 17)
                {
                    vehicle_count++;
                }
                // if (obj_meta->class_id == PGIE_CLASS_ID_PERSON)
                //   person_count++;
                // NvDsObjEncUsrArgs userData = {0};
                // /* To be set by user */
                // userData.saveImg = 0;
                // userData.attachUsrMeta = 1;
                // /* Set if Image scaling Required */
                // userData.scaleImg = FALSE;
                // userData.scaledWidth = 0;
                // userData.scaledHeight = 0;
                // /* Preset */
                // userData.objNum = num_rects;
                // /* Quality */
                // userData.quality = 80;
                // /*Main Function Call */
                // nvds_obj_enc_process((NvDsObjEncCtxHandle)ctx, &userData, ip_surf, obj, frame_meta);
            }

            if (obj_meta->unique_component_id == SECONDARY_DETECTOR_UID)
            {
                if (obj_meta->class_id == SGIE_CLASS_ID_LPD)
                {
                    lp_count++;

                    /* Print this info only when operating in secondary model. */
                    if (obj_meta->parent)
                    {
                        //   g_print("License plate found for parent object %p (type=%s)\n",
                        //           obj_meta->parent, pgie_classes_str[obj_meta->parent->class_id]);
                        g_print("License plate found for parent object %p\n", obj_meta->parent);
                    }
                    if (obj_meta->class_id == PGIE_CLASS_ID_PERSON)
                    {
                        person_count++;
                    }

                    NvDsObjEncUsrArgs userData = {0};
                    /* To be set by user */
                    userData.saveImg = 0;
                    userData.attachUsrMeta = 1;
                    /* Set if Image scaling Required */
                    userData.scaleImg = FALSE;
                    userData.scaledWidth = 0;
                    userData.scaledHeight = 0;
                    /* Preset */
                    userData.objNum = num_rects;
                    /* Quality */
                    userData.quality = 80;
                    /*Main Function Call */
                    nvds_obj_enc_process((NvDsObjEncCtxHandle)ctx, &userData, ip_surf, obj, frame_meta);

                    obj_meta->text_params.set_bg_clr = 1;
                    obj_meta->text_params.text_bg_clr.red = 0.0;
                    obj_meta->text_params.text_bg_clr.green = 0.0;
                    obj_meta->text_params.text_bg_clr.blue = 0.0;
                    obj_meta->text_params.text_bg_clr.alpha = 0.0;

                    obj_meta->text_params.font_params.font_color.red = 1.0;
                    obj_meta->text_params.font_params.font_color.green = 1.0;
                    obj_meta->text_params.font_params.font_color.blue = 0.0;
                    obj_meta->text_params.font_params.font_color.alpha = 1.0;
                    obj_meta->text_params.font_params.font_size = 12;
                }
            }

            //try here the logic for trigger line and speed
            for (l_class = obj_meta->classifier_meta_list; l_class != NULL; l_class = l_class->next)
            {
                class_meta = (NvDsClassifierMeta *)(l_class->data);
                if (!class_meta)
                {
                    continue;
                }
                if (class_meta->unique_component_id == SECONDARY_CLASSIFIER_UID)
                {
                    for (label_i = 0, l_label = class_meta->label_info_list; label_i < class_meta->num_labels && l_label;
                         label_i++, l_label = l_label->next)
                    {
                        label_info = (NvDsLabelInfo *)(l_label->data);
                        if (label_info)
                        {
                            if (label_info->label_id == 0 && label_info->result_class_id == 1)
                            {
                                g_print("Plate License %s\n", label_info->result_label);
                            }
                        }
                    }
                }
            }
        }

        display_meta = nvds_acquire_display_meta_from_pool(batch_meta);
        NvOSD_TextParams *txt_params = &display_meta->text_params[0];
        display_meta->num_labels = 1;
        txt_params->display_text = (char *)g_malloc0(MAX_DISPLAY_LEN);
        offset = snprintf(txt_params->display_text, MAX_DISPLAY_LEN, "Person = %d ", person_count);
        offset += snprintf(txt_params->display_text + offset, MAX_DISPLAY_LEN, "Vehicle = %d ", vehicle_count);

        /* Now set the offsets where the string should appear */
        txt_params->x_offset = 10;
        txt_params->y_offset = 12;

        /* Font , font-color and font-size */
        char font_n[6];
        snprintf(font_n, 6, "Serif");
        txt_params->font_params.font_name = font_n;
        txt_params->font_params.font_size = 10;
        txt_params->font_params.font_color.red = 1.0;
        txt_params->font_params.font_color.green = 1.0;
        txt_params->font_params.font_color.blue = 1.0;
        txt_params->font_params.font_color.alpha = 1.0;

        /* Text background color */
        txt_params->set_bg_clr = 1;
        txt_params->text_bg_clr.red = 0.0;
        txt_params->text_bg_clr.green = 0.0;
        txt_params->text_bg_clr.blue = 0.0;
        txt_params->text_bg_clr.alpha = 1.0;

        nvds_add_display_meta_to_frame(frame_meta, display_meta);
    }
    g_print("Frame Number = %d Vehicle Count = %d Person Count = %d"" License Plate Count = %d\n",
            frame_number, vehicle_count, person_count, lp_count);
    nvds_obj_enc_finish ((NvDsObjEncCtxHandle)ctx);
    frame_number++;
    // total_plate_number += lp_count;
    return GST_PAD_PROBE_OK;
}

Finally in the main():

if (!gst_element_link_many(streammux, queue1, pgie, queue2, tracker, queue3, sgie1, queue4,
                                   sgie2, queue5, nvdslogger, queue6, nvvidconv, queue7, nvosd, queue8, sink, NULL))
        {
            g_printerr("Elements could not be linked. Exiting.\n");
            return -1;
        }

and finally:

/* Lets add probe to get informed of the meta data generated, we add probe to
     * the sink pad of the osd element, since by that time, the buffer would have
     * had got all the metadata. */
    osd_src_pad = gst_element_get_static_pad(nvosd, "src");
    if (!osd_src_pad)
        g_print("Unable to get src pad\n");
    else
        gst_pad_add_probe(osd_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
                        osd_src_pad_buffer_probe, (gpointer) obj_ctx_handle, NULL);
    gst_object_unref(osd_src_pad);

    if (save_img == 1)
    {
        /* Lets add probe to get informed of the meta data generated, we add probe to
         * the sink pad of the osd element, since by that time, the buffer would have
         * had got all the metadata. */
        sgie_src_pad = gst_element_get_static_pad(nvosd, "sink");
        if (!sgie_src_pad)
            g_print("Unable to get sink pad\n");
        else
            gst_pad_add_probe(sgie_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
                              sgie_src_pad_buffer_probe, (gpointer) obj_ctx_handle, NULL);
        gst_object_unref(sgie_src_pad);
    }

Can anybody please help. I shall be highly obliged

Can you share why you need to add probe to both pgie/sgie?

For deepstream-image-meta-test, use nvds_obj_enc_process in pgie’s probe to encode the detected object into jpeg.

This function will add metadata of type NVDS_CROP_IMAGE_META.

For your code, you need to encode in pgie/sgie, and then read the encoded data at osd

Ok. Thanks for ur time. I had already figured it out.
Best,

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.