• Hardware Platform (Jetson / GPU)
Jetson TX2
• DeepStream Version
5.1
• JetPack Version (valid for Jetson only)
4.5.1
• TensorRT Version
7.1
I am trying to modify the deepstream-test2 app for face detection and other classifiers. I am able to get the label metadata information on each frame based on the object id of the detected person on the frame. I want to write the metadata to json schema that I have already prepared for each face is lost from the frame.
I am not able to find out where the face is lost. Can anyone help me on this?
for (l_frame = batch_meta->frame_meta_list; l_frame != NULL;
l_frame = l_frame->next) {
NvDsFrameMeta *frame_meta = (NvDsFrameMeta *) (l_frame->data);
int offset = 0;
for (l_obj = frame_meta->obj_meta_list; l_obj != NULL;
l_obj = l_obj->next) {
obj_meta = (NvDsObjectMeta *) (l_obj->data);
if (obj_meta->class_id == PGIE_CLASS_ID_FACE) {
face_count++;
num_rects++;
// if (obj_meta->class_id == PGIE_CLASS_ID_VEHICLE) {
// vehicle_count++;
// num_rects++;
// New ***
int id = obj_meta->object_id;
for(l_classifier = obj_meta->classifier_meta_list; l_classifier != NULL;
l_classifier = l_classifier->next) {
class_meta = (NvDsClassifierMeta *)(l_classifier->data);
for(l_label = class_meta->label_info_list; l_label != NULL;
l_label = l_label->next) {
label_info = (NvDsLabelInfo *) (l_label->data);
g_print ("******************FRAME START********************************* \n");
g_print ("FrameNo:%d \n",frame_number);
g_print("%d", id);
g_print("-------");
g_print ("%s\n", label_info->result_label);
// g_print("Timestamp: %d\n",(int)time(NULL));
struct timeval tv;
gettimeofday(&tv, NULL);
double time_in_mill_lastframe =
(tv.tv_sec) * 1000 + (tv.tv_usec) / 1000 ;
g_print("Timestamp Milli : %0.0f\n",time_in_mill_lastframe);
// int Atttime = time_in_mill_lastframe - time_in_mill_first;
// g_print("Att time : %f\n",Atttime);
// g_print("EBDID : %s\n",EBDID);
// g_print("\nCAMID : %s\n",camID);
// time_t now;
// struct tm *tm;
// now = time(0);
// if ((tm = localtime (&now)) == NULL) {
// printf ("Error extracting time stuff\n");
// return 1;
// }
// printf ("Converted Date&Time: %04d-%02d-%02d %02d:%02d:%02d\n",
// tm->tm_year+1900, tm->tm_mon+1, tm->tm_mday,
// tm->tm_hour, tm->tm_min, tm->tm_sec);
}
}
// ***
}
// if (obj_meta->class_id == PGIE_CLASS_ID_PERSON) {
// person_count++;
// num_rects++;
// obj_meta->text_params.font_params.font_size = 24;
// }
}
display_meta = nvds_acquire_display_meta_from_pool(batch_meta);
NvOSD_TextParams *txt_params = &display_meta->text_params[0];
display_meta->num_labels = 1;
txt_params->display_text = g_malloc0 (MAX_DISPLAY_LEN);
// offset = snprintf(txt_params->display_text, MAX_DISPLAY_LEN, "Person = %d ", person_count);
offset = snprintf(txt_params->display_text + offset , MAX_DISPLAY_LEN, "Face = %d ", face_count);
/* Now set the offsets where the string should appear */
txt_params->x_offset = 10;
txt_params->y_offset = 12;
/* Font , font-color and font-size */
txt_params->font_params.font_name = "Serif";
txt_params->font_params.font_size = 10;
txt_params->font_params.font_color.red = 1.0;
txt_params->font_params.font_color.green = 1.0;
txt_params->font_params.font_color.blue = 1.0;
txt_params->font_params.font_color.alpha = 1.0;
/* Text background color */
txt_params->set_bg_clr = 1;
txt_params->text_bg_clr.red = 0.0;
txt_params->text_bg_clr.green = 0.0;
txt_params->text_bg_clr.blue = 0.0;
txt_params->text_bg_clr.alpha = 1.0;
nvds_add_display_meta_to_frame(frame_meta, display_meta);
}
// if(face_count == 0)
// {
// g_print("Face Lost");
// }
g_print ("Frame Number = %d Number of objects = %d "
"Face Count = %d Timestamp = %d \n",
frame_number, num_rects, face_count, (int)time(NULL));
g_print ("**********************FRAME END***************************** \n");
// g_print ("Frame Number = %d Number of objects = %d "
// "Vehicle Count = %d Person Count = %d\n",
// frame_number, num_rects, vehicle_count, person_count);
frame_number++;
return GST_PAD_PROBE_OK;
}