Displaying custom data on frames

Please provide complete information as applicable to your setup.

• Hardware Platform (Jetson / GPU): GPU
• DeepStream Version : Deepstream-6.2

Sun Feb 11 18:53:16 2024
±----------------------------------------------------------------------------+
| NVIDIA-SMI 525.147.05 Driver Version: 525.147.05 CUDA Version: 12.0 |
|-------------------------------±---------------------±---------------------+
| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |
| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
| | | MIG M. |
|===============================+======================+======================|
| 0 NVIDIA GeForce … Off | 00000000:01:00.0 Off | N/A |
| N/A 41C P0 N/A / 80W | 6MiB / 6144MiB | 0% Default |
| | | N/A |
±------------------------------±---------------------±---------------------+

±----------------------------------------------------------------------------+
| Processes: |
| GPU GI CI PID Type Process name GPU Memory |
| ID ID Usage |
|=============================================================================|
| 0 N/A N/A 2910 G /usr/lib/xorg/Xorg 4MiB |
±----------------------------------------------------------------------------+

I am using DeepStream test5 application. In deepstream_test5_app_main.c, i am adding a custom function to estimate the speed of an object. I am able to receive this estimated speed in the kafka messages with generate_event_msg_meta. however , i want the estimated speed to be displayed for each object at frame level.Kindly guide me on displaying the estimated speed of each object in the frame. I am also sharing the code base below: kindly

// Speed Estimation Function
// Assumes global variable ObjectPosition objectPositions[MAX_TRACK_ID] where MAX_TRACK_ID is the maximum expected number of unique track IDs.
// frameRate is the rate at which frames are captured (e.g., 30 frames per second).
// ppm is the Pixels Per Meter ratio for the camera setup.
float estimateSpeed(int trackId, float frameRate, float ppm) {
ObjectPosition *pos = &objectPositions[trackId];

// Calculate the pixel displacement
float deltaX = pos->currX - pos->prevX;
float deltaY = pos->currY - pos->prevY;
float pixelDisplacement = sqrt(deltaX * deltaX + deltaY * deltaY);

// Convert pixel displacement to meters
float meterDisplacement = pixelDisplacement / ppm;

// Calculate the time interval between frames (in seconds)
float timeInterval = 1.0 / frameRate;

// Estimate the speed (meters per second)
float speed = meterDisplacement / timeInterval;

// Update previous positions for the next speed calculation
pos->prevX = pos->currX;
pos->prevY = pos->currY;

return speed;

}

static void
generate_event_msg_meta (AppCtx * appCtx, gpointer data, gint class_id, gboolean useTs,
GstClockTime ts, gchar * src_uri, gint stream_id, guint sensor_id,
NvDsObjectMeta * obj_params, AnalyticsUserMeta * obj_params1, float scaleW, float scaleH,
NvDsFrameMeta * frame_meta)
{
NvDsEventMsgMeta *meta = (NvDsEventMsgMeta *) data;
GstClockTime ts_generated = 0;

meta->objType = NVDS_OBJECT_TYPE_UNKNOWN; /**< object unknown /
/
The sensor_id is parsed from the source group name which has the format

  • [source]. */
    meta->sensorId = sensor_id;
    meta->placeId = sensor_id;
    meta->moduleId = sensor_id;
    meta->frameId = frame_meta->frame_num;
    meta->ts = (gchar *) g_malloc0 (MAX_TIME_STAMP_LEN + 1);
    meta->objectId = (gchar *) g_malloc0 (MAX_LABEL_SIZE);

strncpy (meta->objectId, obj_params->obj_label, MAX_LABEL_SIZE);

/** INFO: This API is called once for every 30 frames (now) */
if (useTs && src_uri) {
ts_generated =
generate_ts_rfc3339_from_ts (meta->ts, MAX_TIME_STAMP_LEN, ts, src_uri,
stream_id);
} else {
generate_ts_rfc3339 (meta->ts, MAX_TIME_STAMP_LEN);
}

/**

  • Valid attributes in the metadata sent over nvmsgbroker:
  • a) Sensor ID (shall be configured in nvmsgconv config file)
  • b) bbox info (meta->bbox) ← obj_params->rect_params (attr_info have sgie info)
  • c) tracking ID (meta->trackingId) ← obj_params->object_id
    */

/** bbox - resolution is scaled by nvinfer back to

  • the resolution provided by streammux
  • We have to scale it back to original stream resolution
    */

meta->bbox.left = obj_params->rect_params.left * scaleW;
meta->bbox.top = obj_params->rect_params.top * scaleH;
meta->bbox.width = obj_params->rect_params.width * scaleW;
meta->bbox.height = obj_params->rect_params.height * scaleH;

/** tracking ID */
meta->trackingId = obj_params->object_id;

/** sensor ID when streams are added using nvmultiurisrcbin REST API /
NvDsSensorInfo
sensorInfo = get_sensor_info(appCtx, stream_id);
if(sensorInfo) {
/** this stream was added using REST API; we have Sensor Info! */
LOGD(“this stream [%d:%s] was added using REST API; we have Sensor Info\n”,
sensorInfo->source_id, sensorInfo->sensor_id);
meta->sensorStr = g_strdup (sensorInfo->sensor_id);
}

(void) ts_generated;
//meta->type = NVDS_EVENT_ENTRY;
//meta->objType = NVDS_OBJECT_TYPE_PERSON;
//meta->objClassId = PERSON_ID;
//meta->objClassId = Green_Vest;
meta->occupancy = obj_params1->lccum_cnt;
meta->lccum_cnt_entry = obj_params1->lcc_cnt_entry;
meta->lccum_cnt_exit = obj_params1->lcc_cnt_exit ;
meta->source_id = obj_params1->source_id;

frame_meta->occupancy = obj_params1->lccum_cnt;
frame_meta->lccum_cnt_entry = obj_params1->lcc_cnt_entry;
frame_meta->lccum_cnt_exit = obj_params1->lcc_cnt_exit ;
frame_meta->source_id = obj_params1->source_id;

// g_print(“framenumber: %d\n”, frame_meta->frame_num);
// g_print(“source id: %d,track_id: %d\n”, meta->source_id,meta->trackingId);
// g_print(“bbox-left: %f,bbox-top: %f,bbox-width: %f,bbox-height: %f\n”,meta->bbox.left,meta->bbox.top,meta->bbox.width,meta->bbox.height);
// g_print(“occupancy: %d,Enter: %d, Exit: %d\n”,meta->occupancy,meta->lccum_cnt_entry, meta->lccum_cnt_exit);
// //g_print(“occupancy: %d,Enter1: %d, Exit1: %d\n”,meta->occupancy,obj_params1->lcc_cnt_entry, obj_params1->lcc_cnt_exit);

g_print(“framenumber: %d\n”, frame_meta->frame_num);
// g_print(“*********Frame-META-DATA\n”);
// g_print(“occupancy: %d,Enter: %d, Exit: %d\n”,frame_meta->occupancy,frame_meta->lccum_cnt_entry, frame_meta->lccum_cnt_exit);

//float frameRate = 30.0; // Example frame rate
float frameRate = fps_avg[stream_id];
//float ppm = 50.0; // Example Pixels Per Meter
float ppm = 387.6;

// Update current positions
objectPositions[obj_params->object_id].currX = meta->bbox.left + (meta->bbox.width / 2);
objectPositions[obj_params->object_id].currY = meta->bbox.top + (meta->bbox.height / 2);

// Call the speed estimation function
float speed = estimateSpeed(obj_params->object_id, frameRate, ppm); // Assume frameRate and ppm are available in this function
g_print(“fps: %.2f\n”, fps_avg[stream_id]);
g_print(“stream: %d\n”, stream_id);
// Use the speed for further processing or logging
printf(“Speed of object %d: %.2f meters/second\n”, obj_params->object_id, speed);
obj_params->speed = speed;

/*

  • This demonstrates how to attach custom objects.

  • Any custom object as per requirement can be generated and attached

  • like NvDsVehicleObject / NvDsPersonObject. Then that object should

  • be handled in gst-nvmsgconv component accordingly.
    */
    if (model_used == APP_CONFIG_ANALYTICS_RESNET_PGIE_3SGIE_TYPE_COLOR_MAKE) {
    if (class_id == RESNET10_PGIE_3SGIE_TYPE_COLOR_MAKECLASS_ID_CAR) {
    meta->type = NVDS_EVENT_MOVING;
    meta->objType = NVDS_OBJECT_TYPE_VEHICLE;
    meta->objClassId = RESNET10_PGIE_3SGIE_TYPE_COLOR_MAKECLASS_ID_CAR;

    NvDsVehicleObject *obj =
    (NvDsVehicleObject *) g_malloc0 (sizeof (NvDsVehicleObject));
    schema_fill_sample_sgie_vehicle_metadata (obj_params, obj);

    meta->extMsg = obj;
    meta->extMsgSize = sizeof (NvDsVehicleObject);
    }
    ifdef GENERATE_DUMMY_META_EXT
    else if (class_id == RESNET10_PGIE_3SGIE_TYPE_COLOR_MAKECLASS_ID_PERSON) {
    meta->type = NVDS_EVENT_ENTRY;
    meta->objType = NVDS_OBJECT_TYPE_PERSON;
    meta->objClassId = RESNET10_PGIE_3SGIE_TYPE_COLOR_MAKECLASS_ID_PERSON;

    NvDsPersonObject *obj =
    (NvDsPersonObject *) g_malloc0 (sizeof (NvDsPersonObject));
    generate_person_meta (obj);

    meta->extMsg = obj;
    meta->extMsgSize = sizeof (NvDsPersonObject);
    }
    endif /**< GENERATE_DUMMY_META_EXT */
    }

}

You can add a probe function for the sink_pad of nvdsosd plugin. Then add the relevant display meta to the probe function like osd_sink_pad_buffer_probe in deepstream_test1_app.c.