Please provide complete information as applicable to your setup.
• Hardware Platform (Jetson Orin NX16)
• DeepStream Version 7
• JetPack Version 6 36.3
I’m working on ainvr app, i’ve deployed deepstream 7 with yolov8s as PGIE and lpd/LPRnet/VehicleMakeNet as SGIEs, my question is, how can i save the information of the license plate and vehicle make in one vehicle objects, instead of seperate ones:
The above screenshot shows information for the same vehicle, but they’re saved in seperate objects.
Here’s deepstream-test5 app code, i’ve changed the parts where the data is being saved:
static void
generate_event_msg_meta(AppCtx *appCtx, gpointer data, gint class_id, gboolean useTs,
GstClockTime ts, gchar *src_uri, gint stream_id, guint sensor_id,
NvDsObjectMeta *obj_params, float scaleW, float scaleH,
NvDsFrameMeta *frame_meta)
{
NvDsEventMsgMeta *meta = (NvDsEventMsgMeta *)data;
GstClockTime ts_generated = 0;
meta->objType = NVDS_OBJECT_TYPE_UNKNOWN; /**< object unknown */
/* The sensor_id is parsed from the source group name which has the format
* [source<sensor-id>]. */
meta->sensorId = sensor_id;
meta->placeId = sensor_id;
meta->moduleId = sensor_id;
meta->frameId = frame_meta->frame_num;
meta->ts = (gchar *)g_malloc0(MAX_TIME_STAMP_LEN + 1);
meta->objectId = (gchar *)g_malloc0(MAX_LABEL_SIZE);
strncpy(meta->objectId, obj_params->obj_label, MAX_LABEL_SIZE);
/** INFO: This API is called once for every 30 frames (now) */
if ((useTs && src_uri) || appCtx->config.source_attr_all_config.type == NV_DS_SOURCE_IPC)
{
ts_generated =
generate_ts_rfc3339_from_ts(meta->ts, MAX_TIME_STAMP_LEN, ts, src_uri,
stream_id);
}
else
{
generate_ts_rfc3339(meta->ts, MAX_TIME_STAMP_LEN);
}
meta->bbox.left = obj_params->rect_params.left * scaleW;
meta->bbox.top = obj_params->rect_params.top * scaleH;
meta->bbox.width = obj_params->rect_params.width * scaleW;
meta->bbox.height = obj_params->rect_params.height * scaleH;
/** tracking ID */
meta->trackingId = obj_params->object_id;
/** sensor ID when streams are added using nvmultiurisrcbin REST API */
NvDsSensorInfo *sensorInfo = get_sensor_info(appCtx, stream_id);
if (sensorInfo)
{
/** this stream was added using REST API; we have Sensor Info! */
LOGD("this stream [%d:%s] was added using REST API; we have Sensor Info\n",
sensorInfo->source_id, sensorInfo->sensor_id);
meta->sensorStr = g_strdup(sensorInfo->sensor_id);
}
(void)ts_generated;
if (model_used == APP_CONFIG_ANALYTICS_YOLOV8S_PGIE_3SGIE)
{
if ((class_id == LPD_CLASS_ID && (strcmp(obj_params->obj_label, "") == 0 || obj_params->obj_label == NULL)) ||
class_id == 2 || class_id == 3 || class_id == 5 || class_id == 7)
{
meta->type = NVDS_EVENT_MOVING;
meta->objType = NVDS_OBJECT_TYPE_VEHICLE;
meta->objClassId = RESNET10_PGIE_3SGIE_TYPE_COLOR_MAKECLASS_ID_CAR;
NvDsVehicleObject *obj =
(NvDsVehicleObject *)g_malloc0(sizeof(NvDsVehicleObject));
schema_fill_sample_sgie_vehicle_metadata(obj_params, obj);
meta->extMsg = obj;
meta->extMsgSize = sizeof(NvDsVehicleObject);
}
}
}
static void
schema_fill_sample_sgie_vehicle_metadata(NvDsObjectMeta *obj_params,
NvDsVehicleObject *obj)
{
if (!obj_params || !obj)
{
return;
}
/** The JSON obj->classification, say type, color, or make
* according to the schema shall have null (unknown)
* classifications (if the corresponding sgie failed to provide a label)
*/
obj->type = NULL;
obj->make = NULL;
obj->model = NULL;
obj->color = NULL;
obj->license = NULL;
obj->region = NULL;
GList *l;
for (l = obj_params->classifier_meta_list; l != NULL; l = l->next)
{
NvDsClassifierMeta *classifierMeta = (NvDsClassifierMeta *)(l->data);
g_print("Component id (SGIE) %d\n", classifierMeta->unique_component_id);
switch (classifierMeta->unique_component_id)
{
case SECONDARY_GIE_VEHICLE_TYPE_UNIQUE_ID:
obj->type = get_first_result_label(classifierMeta);
break;
case SECONDARY_GIE_VEHICLE_LICENSE_PLATE_UNIQUE_ID:
obj->license = get_first_result_label(classifierMeta);
break;
case SECONDARY_GIE_VEHICLE_MAKE_UNIQUE_ID:
obj->make = get_first_result_label(classifierMeta);
break;
case SECONDARY_GIE_VEHICLE_COLOR_UNIQUE_ID:
obj->color = get_first_result_label(classifierMeta);
break;
default:
break;
}
}
}
Here’s my config:
[primary-gie]
enable=1
gpu-id=0
gie-unique-id=1
nvbuf-memory-type=4
config-file=config_infer_primary_yoloV8_nx16.txt
model-engine-file=/yolov8s/model_b4_gpu0_int8.engine
batch-size=4
bbox-border-color0=1;0;0;1
bbox-border-color1=0;1;1;1
bbox-border-color2=0;0;1;1
bbox-border-color3=0;1;0;1
interval=0
[tracker]
enable=1
tracker-width=960
tracker-height=544
ll-lib-file=/opt/nvidia/deepstream/deepstream/lib/libnvds_nvmultiobjecttracker.so
ll-config-file=config_tracker_NvDCF_PNv2.6_Interval_1_PVA.yml;config_tracker_NvDCF_PNv2.6_Interval_1_PVA.yml
sub-batches=2:2
gpu-id=0
display-tracking-id=1
[secondary-gie0]
enable=1
gpu-id=0
batch-size=4
gie-unique-id=4
operate-on-gie-id=1
config-file=lpd_yolov4-tiny_us.txt
[secondary-gie1]
enable=1
gpu-id=0
batch-size=4
gie-unique-id=5
operate-on-gie-id=4
operate-on-class-ids=0;
config-file=lpr_config_sgie_us.txt
[secondary-gie2]
enable=1
gpu-id=0
batch-size=4
gie-unique-id=6
operate-on-gie-id=1
config-file=config_infer_secondary_vehicleMake.txt
This code saves the data, but i need it to be in the same objects, is there a way to do that?
I’ve also tried to change the configuration of LPD model to operate on VehicleMake model, so i would have yolov8s (PGIE) → VehicleMakeNet (SGIE0) → LPDnet (SGIE1) → LPRnet (SGIE2) , but i’m not having any output from LPDnet and LPRnet, here are my configs for the sgies:
LPDnet config
[property]
gpu-id=0
net-scale-factor=0.0039215697906911373
#offsets=103.939;116.779;123.68
model-color-format=0
labelfile-path=/yolov8s/yolov4/usa_lpd_label.txt
model-engine-file=/yolov8s/model/LPDNet_usa_pruned_tao5.onnx_b40_gpu0_int8.engine
onnx-file=/yolov8s/model/LPDNet_usa_pruned_tao5.onnx
int8-calib-file=/yolov8s/model/usa_cal_8.5.3.bin
tlt-model-key=nvidia_tlt
infer-dims=3;480;640
uff-input-dims=3;480;640;0
uff-input-order=0
uff-input-blob-name=input_1
batch-size=16
## 0=FP32, 1=INT8, 2=FP16 mode
#process-mode=1
network-mode=1
num-detected-classes=1
interval=0
gie-unique-id=1
network-type=0
operate-on-gie-id=1
#operate-on-class-ids=0
cluster-mode=3
output-blob-names=output_cov/Sigmoid;output_bbox/BiasAdd
input-object-min-height=30
input-object-min-width=40
[class-attrs-all]
pre-cluster-threshold=0.01
roi-top-offset=0
roi-bottom-offset=0
detected-min-w=0
detected-min-h=0
detected-max-w=0
detected-max-h=0
VehicleMakeNet config
[property]
gpu-id=0
net-scale-factor=1
tlt-model-key=tlt_encode
tlt-encoded-model=/opt/nvidia/deepstream/deepstream-7.0/samples/models/Secondary_VehicleMake/resnet18_vehiclemakenet.etlt
model-engine-file=/yolov8s/Secondary_VehicleMake/resnet18_vehiclemakenet.etlt_b40_gpu0_int8.engine
labelfile-path=/yolov8s/Secondary_VehicleMake/labels.txt
int8-calib-file=/opt/nvidia/deepstream/deepstream-7.0/samples/models/Secondary_VehicleMake/cal_trt.bin
force-implicit-batch-dim=1
batch-size=4
# 0=FP32 and 1=INT8 mode
network-mode=1
network-type=1
#input-object-min-width=64
#input-object-min-height=64
model-color-format=0
gpu-id=0
gie-unique-id=2
operate-on-gie-id=1
#operate-on-class-ids=0
#is-classifier=1
uff-input-blob-name=input_1
output-blob-names=predictions/Softmax
classifier-async-mode=1
classifier-threshold=0.5
process-mode=2
#scaling-filter=0
#scaling-compute-hw=0
infer-dims=3;224;224
So is it possible to make this pipeline work?