Secondary classifiers labels are missing from output

Please provide complete information as applicable to your setup.

• Hardware Platform (Jetson / GPU) - T4
• DeepStream Version - 5
• TensorRT Version - 7.0
• NVIDIA GPU Driver Version (valid for GPU only) - 440.82

I am using a .c file to create deepstream-app configurations I am getting correct output up till the tracker but not the classification part!
Like a have pipeline as FaceDetector → tracker → classifier and I am getting output for 4 faces as
face 4, face 3, face 2, face 1. Also my detector’s labels.txt file has only one label i.e. face. But I can 't seem to get the secondary classifiers labels. Below is the deepstream-app config in a .c file:

#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <math.h>
#include <unistd.h>
#include <iostream>
#include "gstnvdsmeta.h"

/* The muxer output resolution must be set if the input streams will be of
 * different resolution. The muxer will scale all the input frames to this
 * resolution. */
#define MUXER_OUTPUT_WIDTH 1920
#define MUXER_OUTPUT_HEIGHT 1080

/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
 * based on the fastest source's framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 4000000

#define TILED_OUTPUT_WIDTH 1920
#define TILED_OUTPUT_HEIGHT 1080

#define SGIE1_CONFIG_FILE "beard_sgie_config.txt"

#define TRACKER_CONFIG_FILE "tracker_config.txt"
#define MAX_TRACKING_ID_LEN 16


/* Tracker config parsing */

#define CHECK_ERROR(error) \
    if (error) { \
        g_printerr ("Error while parsing config file: %s\n", error->message); \
        goto done; \
    }

#define CONFIG_GROUP_TRACKER "tracker"
#define CONFIG_GROUP_TRACKER_WIDTH "tracker-width"
#define CONFIG_GROUP_TRACKER_HEIGHT "tracker-height"
#define CONFIG_GROUP_TRACKER_LL_CONFIG_FILE "ll-config-file"
#define CONFIG_GROUP_TRACKER_LL_LIB_FILE "ll-lib-file"
#define CONFIG_GROUP_TRACKER_ENABLE_BATCH_PROCESS "enable-batch-process"
#define CONFIG_GPU_ID "gpu-id"

static gchar *
get_absolute_file_path (gchar *cfg_file_path, gchar *file_path)
{
  gchar abs_cfg_path[PATH_MAX + 1];
  gchar *abs_file_path;
  gchar *delim;

  if (file_path && file_path[0] == '/') {
    return file_path;
  }

  if (!realpath (cfg_file_path, abs_cfg_path)) {
    g_free (file_path);
    return NULL;
  }

  // Return absolute path of config file if file_path is NULL.
  if (!file_path) {
    abs_file_path = g_strdup (abs_cfg_path);
    return abs_file_path;
  }

  delim = g_strrstr (abs_cfg_path, "/");
  *(delim + 1) = '\0';

  abs_file_path = g_strconcat (abs_cfg_path, file_path, NULL);
  g_free (file_path);

  return abs_file_path;
}

static gboolean
set_tracker_properties (GstElement *nvtracker)
{
  gboolean ret = FALSE;
  GError *error = NULL;
  gchar **keys = NULL;
  gchar **key = NULL;
  GKeyFile *key_file = g_key_file_new ();

  if (!g_key_file_load_from_file (key_file, TRACKER_CONFIG_FILE, G_KEY_FILE_NONE,
          &error)) {
    g_printerr ("Failed to load config file: %s\n", error->message);
    return FALSE;
  }

  keys = g_key_file_get_keys (key_file, CONFIG_GROUP_TRACKER, NULL, &error);
  CHECK_ERROR (error);

  for (key = keys; *key; key++) {
    if (!g_strcmp0 (*key, CONFIG_GROUP_TRACKER_WIDTH)) {
      gint width =
          g_key_file_get_integer (key_file, CONFIG_GROUP_TRACKER,
          CONFIG_GROUP_TRACKER_WIDTH, &error);
      CHECK_ERROR (error);
      g_object_set (G_OBJECT (nvtracker), "tracker-width", width, NULL);
    } else if (!g_strcmp0 (*key, CONFIG_GROUP_TRACKER_HEIGHT)) {
      gint height =
          g_key_file_get_integer (key_file, CONFIG_GROUP_TRACKER,
          CONFIG_GROUP_TRACKER_HEIGHT, &error);
      CHECK_ERROR (error);
      g_object_set (G_OBJECT (nvtracker), "tracker-height", height, NULL);
    } else if (!g_strcmp0 (*key, CONFIG_GPU_ID)) {
      guint gpu_id =
          g_key_file_get_integer (key_file, CONFIG_GROUP_TRACKER,
          CONFIG_GPU_ID, &error);
      CHECK_ERROR (error);
      g_object_set (G_OBJECT (nvtracker), "gpu_id", gpu_id, NULL);
    } else if (!g_strcmp0 (*key, CONFIG_GROUP_TRACKER_LL_CONFIG_FILE)) {
      char* ll_config_file = get_absolute_file_path (TRACKER_CONFIG_FILE,
                g_key_file_get_string (key_file,
                    CONFIG_GROUP_TRACKER,
                    CONFIG_GROUP_TRACKER_LL_CONFIG_FILE, &error));
      CHECK_ERROR (error);
      g_object_set (G_OBJECT (nvtracker), "ll-config-file", ll_config_file, NULL);
    } else if (!g_strcmp0 (*key, CONFIG_GROUP_TRACKER_LL_LIB_FILE)) {
      char* ll_lib_file = get_absolute_file_path (TRACKER_CONFIG_FILE,
                g_key_file_get_string (key_file,
                    CONFIG_GROUP_TRACKER,
                    CONFIG_GROUP_TRACKER_LL_LIB_FILE, &error));
      CHECK_ERROR (error);
      g_object_set (G_OBJECT (nvtracker), "ll-lib-file", ll_lib_file, NULL);
    } else if (!g_strcmp0 (*key, CONFIG_GROUP_TRACKER_ENABLE_BATCH_PROCESS)) {
      gboolean enable_batch_process =
          g_key_file_get_integer (key_file, CONFIG_GROUP_TRACKER,
          CONFIG_GROUP_TRACKER_ENABLE_BATCH_PROCESS, &error);
      CHECK_ERROR (error);
      g_object_set (G_OBJECT (nvtracker), "enable_batch_process",
                    enable_batch_process, NULL);
    } else {
      g_printerr ("Unknown key '%s' for group [%s]", *key,
          CONFIG_GROUP_TRACKER);
    }
  }

  ret = TRUE;
done:
  if (error) {
    g_error_free (error);
  }
  if (keys) {
    g_strfreev (keys);
  }
  if (!ret) {
    g_printerr ("%s failed", __func__);
  }
  return ret;
}


static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data) {
    GMainLoop *loop = (GMainLoop *) data;
    switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_EOS:
        g_print ("End of stream\n");
        g_main_loop_quit (loop);
        break;
    case GST_MESSAGE_ERROR: {
        gchar *debug;
        GError *error;
        gst_message_parse_error (msg, &error, &debug);
        g_printerr ("ERROR from element %s: %s\n",
                    GST_OBJECT_NAME (msg->src), error->message);
        if (debug)
            g_printerr ("Error details: %s\n", debug);
        g_free (debug);
        g_error_free (error);
        g_main_loop_quit (loop);
        break;
    }
    default:
        break;
    }
    return TRUE;
}

static void printUsage(const char* cmd) {
    g_printerr ("\tUsage: %s -c pgie_config_file -i <H264 or JPEG filename> [-b BATCH] [-d]\n", cmd);
    g_printerr ("-h: \n\tprint help info \n");
    g_printerr ("-c: \n\tpgie config file, e.g. pgie_frcnn_tlt_config.txt  \n");
    g_printerr ("-i: \n\tH264 or JPEG input file  \n");
    g_printerr ("-b: \n\tbatch size, this will override the value of \"baitch-size\" in pgie config file  \n");
    g_printerr ("-d: \n\tenable display, otherwise dump to output H264 or JPEG file  \n");
}
int
main (int argc, char *argv[]) {
    GMainLoop *loop = NULL;
    GstElement *pipeline = NULL, *source = NULL, *parser = NULL,
               *decoder = NULL, *streammux = NULL, *sink = NULL,
               *pgie = NULL, *nvvidconv = NULL, *nvdsosd = NULL,
               *parser1 = NULL, *nvvidconv1 = NULL, *enc = NULL,
               *tiler = NULL, *tee = NULL, *sgie1 = NULL, *sgie2 = NULL, 
               *sgie3 = NULL, *nvtracker = NULL;

#ifdef PLATFORM_TEGRA
    GstElement *transform = NULL;
#endif
    GstBus *bus = NULL;
    guint bus_watch_id;
    GstPad *osd_sink_pad = NULL;

    gboolean isH264 = FALSE;
    gboolean useDisplay = FALSE;
    guint tiler_rows, tiler_cols;
    guint batchSize = 1;
    guint pgie_batch_size;
    guint c;
    const char* optStr = "b:c:dhi:";
    std::string pgie_config;
    std::string input_file;

    while ((c = getopt(argc, argv, optStr)) != -1) {
        switch (c) {
            case 'b':
                batchSize = std::atoi(optarg);
                batchSize = batchSize == 0 ? 1:batchSize;
                break;
            case 'c':
                pgie_config.assign(optarg);
                break;
            case 'd':
                useDisplay = TRUE;
                break;
            case 'i':
                input_file.assign(optarg);
                break;
            case 'h':
            default:
                printUsage(argv[0]);
                return -1;
          }
     }

    /* Check input arguments */
    if (argc == 1) {
        printUsage(argv[0]);
        return -1;
    }

    const gchar *p_end = input_file.c_str() + strlen(input_file.c_str());
    if(!strncmp(p_end - strlen("h264"), "h264", strlen("h264"))) {
        isH264 = TRUE;
    } else if(!strncmp(p_end - strlen("jpg"), "jpg", strlen("jpg")) || !strncmp(p_end - strlen("jpeg"), "jpeg", strlen("jpeg"))) {
        isH264 = FALSE;
    } else {
        g_printerr("input file only support H264 and JPEG\n");
        return -1;
    }

    const char* use_display = std::getenv("USE_DISPLAY");
    if(use_display != NULL && std::stoi(use_display) == 1) {
        useDisplay = true;
    }

    const char* batch_size = std::getenv("BATCH_SIZE");
    if(batch_size != NULL ) {
        batchSize = std::stoi(batch_size);
        g_printerr("batch size is %d \n", batchSize);
    }

    /* Standard GStreamer initialization */
    gst_init (&argc, &argv);
    loop = g_main_loop_new (NULL, FALSE);

    /* Create gstreamer elements */
    /* Create Pipeline element that will form a connection of other elements */
    pipeline = gst_pipeline_new ("ds-custom-pipeline");

    /* Source element for reading from the file */
    source = gst_element_factory_make ("filesrc", "file-source");

    /* Since the data format in the input file is elementary h264 stream,
     * we need a h264parser */
    if(isH264 == TRUE) {
        parser = gst_element_factory_make ("h264parse", "h264-parser");
    } else {
        parser = gst_element_factory_make ("jpegparse", "jpeg-parser");
    }

    /* Use nvdec_h264 for hardware accelerated decode on GPU */
    decoder = gst_element_factory_make ("nvv4l2decoder", "nvv4l2-decoder");

    /* Create nvstreammux instance to form batches from one or more sources. */
    streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");

    if (!pipeline || !streammux) {
        g_printerr ("One element could not be created. Exiting.\n");
        return -1;
    }

    /* Use nvinfer to run inferencing on decoder's output,
     * behaviour of inferencing is set through config file */
    pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");

    /* We need to have a tracker to track the identified objects */
    nvtracker = gst_element_factory_make ("nvtracker", "tracker");

    /* We need three secondary gies so lets create 3 more instances of
     nvinfer */
    sgie1 = gst_element_factory_make ("nvinfer", "secondary1-nvinference-engine");

    /* Use convertor to convert from NV12 to RGBA as required by nvdsosd */
    nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");

    /* Create OSD to draw on the converted RGBA buffer */
    nvdsosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");

    tee = gst_element_factory_make("tee", "tee");
    tiler = gst_element_factory_make ("nvmultistreamtiler", "nvtiler");

    /* Finally render the osd output */
#ifdef PLATFORM_TEGRA
    transform = gst_element_factory_make ("nvegltransform", "nvegl-transform");
#endif
    if(useDisplay == FALSE) {
        if(isH264 == TRUE){
            parser1 = gst_element_factory_make ("h264parse", "h264-parser1");
            enc = gst_element_factory_make ("nvv4l2h264enc", "h264-enc");
        } else{
            parser1 = gst_element_factory_make ("jpegparse", "jpeg-parser1");
            enc = gst_element_factory_make ("jpegenc", "jpeg-enc");
        }
        nvvidconv1 = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter1");
        sink = gst_element_factory_make ("filesink", "file-sink");
        if (!source || !parser || !parser1 || !decoder || !tee || !pgie
                || !nvtracker || !sgie1 || !tiler || !nvvidconv || !nvvidconv1 || !nvdsosd || !enc || !sink) {
            g_printerr ("One element could not be created. Exiting.\n");
            return -1;
        }
    } else {
        sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
        if (!source || !parser || !decoder || !tee || !pgie
                || !nvtracker || !sgie1 || !tiler || !nvvidconv || !nvdsosd || !sink) {
            g_printerr ("One element could not be created. Exiting.\n");
            return -1;
        }
    }

#ifdef PLATFORM_TEGRA
    if(!transform) {
        g_printerr ("One tegra element could not be created. Exiting.\n");
        return -1;
    }
#endif

    /* we set the input filename to the source element */
    g_object_set (G_OBJECT (source), "location", input_file.c_str(), NULL);

    //save the file to local dir
    if(useDisplay == FALSE) {
        if(isH264 == TRUE)
            g_object_set (G_OBJECT (sink), "location", "./out.h264", NULL);
        else
            g_object_set (G_OBJECT (sink), "location", "./out.jpg", NULL);
    }
    g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",
                  MUXER_OUTPUT_HEIGHT, "batch-size", batchSize,
                  "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);

    /* Set all the necessary properties of the nvinfer element,
     * the necessary ones are : */
    g_object_set (G_OBJECT (pgie), "config-file-path", pgie_config.c_str(), NULL);
    
    g_object_set (G_OBJECT (sgie1), "config-file-path", SGIE1_CONFIG_FILE, NULL);

    /* Override the batch-size set in the config file with the number of sources. */
    g_object_get (G_OBJECT (pgie), "batch-size", &pgie_batch_size, NULL);
    if (pgie_batch_size != batchSize) {
    g_printerr
        ("WARNING: Overriding infer-config batch-size (%d) with number of sources (%d)\n",
        pgie_batch_size, batchSize);
    g_object_set (G_OBJECT (pgie), "batch-size", batchSize, NULL);
    g_object_set (G_OBJECT (sgie1), "batch-size", batchSize, NULL);
    }
    
    /* Set necessary properties of the tracker element. */
    if (!set_tracker_properties(nvtracker)) {
        g_printerr ("Failed to set tracker properties. Exiting.\n");
        return -1;
    }

    tiler_rows = (guint) sqrt (batchSize);
    tiler_cols = (guint) ceil (1.0 * batchSize / tiler_rows);
    /* we set the tiler properties here */
    g_object_set (G_OBJECT (tiler), "rows", tiler_rows, "columns", tiler_cols,
      "width", TILED_OUTPUT_WIDTH, "height", TILED_OUTPUT_HEIGHT, NULL);

    /* we add a message handler */
    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
    gst_object_unref (bus);

    /* Set up the pipeline */
    /* we add all elements into the pipeline */
    if(useDisplay == FALSE) {
        gst_bin_add_many (GST_BIN (pipeline),
                          source, parser, decoder, tee, streammux, pgie, nvtracker, sgie1, tiler,
                          nvvidconv, nvdsosd, nvvidconv1, enc, parser1, sink, NULL);
    } else {
#ifdef PLATFORM_TEGRA
        gst_bin_add_many (GST_BIN (pipeline),
                          source, parser, decoder, tee, streammux, pgie,nvtracker, sgie1,
                          tiler, nvvidconv, nvdsosd, transform, sink, NULL);
#else
        gst_bin_add_many (GST_BIN (pipeline),
                          source, parser, decoder, tee, streammux, pgie,nvtracker, sgie1,
                          tiler, nvvidconv, nvdsosd, sink, NULL);
#endif
    }

    for(guint i = 0; i < batchSize; i++) {
        GstPad *sinkpad, *srcpad;
        gchar pad_name_sink[16] = {};
        gchar pad_name_src[16] = {};

        g_snprintf (pad_name_sink, 15, "sink_%u", i);
        g_snprintf (pad_name_src, 15, "src_%u", i);
        sinkpad = gst_element_get_request_pad (streammux, pad_name_sink);
        if (!sinkpad) {
            g_printerr ("Streammux request sink pad failed. Exiting.\n");
            return -1;
        }

        srcpad = gst_element_get_request_pad(tee, pad_name_src);
        if (!srcpad) {
            g_printerr ("tee request src pad failed. Exiting.\n");
            return -1;
        }

        if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
            g_printerr ("Failed to link tee to stream muxer. Exiting.\n");
            return -1;
        }

        gst_object_unref (sinkpad);
        gst_object_unref (srcpad);
    }
    /* We link the elements together */
    /* file-source -> h264-parser -> nvv4l2decoder ->
     * nvinfer -> nvvideoconvert -> nvdsosd -> video-renderer */

    if (!gst_element_link_many (source, parser, decoder, tee, NULL)) {
        g_printerr ("Elements could not be linked: 1. Exiting.\n");
        return -1;
    }
    if (useDisplay == FALSE) {
        if (!gst_element_link_many (streammux, pgie, nvtracker, sgie1, tiler,
                                    nvvidconv, nvdsosd, nvvidconv1, enc, parser1, sink, NULL)) {
            g_printerr ("Elements could not be linked: 2. Exiting.\n");
            return -1;
        }
    } else {
#ifdef PLATFORM_TEGRA
        if (!gst_element_link_many (streammux, pgie,  nvtracker, sgie1,tiler,
                                    nvvidconv, nvdsosd, transform, sink, NULL)) {
            g_printerr ("Elements could not be linked: 2. Exiting.\n");
            return -1;
        }
#else
        if (!gst_element_link_many (streammux, pgie, nvtracker, sgie1, tiler,
                                    nvvidconv, nvdsosd, sink, NULL)) {
            g_printerr ("Elements could not be linked: 2. Exiting.\n");
            return -1;
        }
#endif
    }
    /* Set the pipeline to "playing" state */
    g_print ("Now playing: %s\n", pgie_config.c_str());
    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    /* Wait till pipeline encounters an error or EOS */
    g_print ("Running...\n");
    g_main_loop_run (loop);

    /* Out of the main loop, clean up nicely */
    g_print ("Returned, stopping playback\n");
    gst_element_set_state (pipeline, GST_STATE_NULL);
    g_print ("Deleting pipeline\n");
    gst_object_unref (GST_OBJECT (pipeline));
    g_source_remove (bus_watch_id);
    g_main_loop_unref (loop);
    return 0;
}

and the secondary classifiers config file:

[property]
gpu-id=0
net-scale-factor=1
model-engine-file=./tensorrt_engines_awsT4/beard_resnet18.engine
labelfile-path=./beard_labels.txt
force-implicit-batch-dim=1
batch-size=1
# 0=FP32 and 1=INT8 mode
network-mode=2
input-object-min-width=224
input-object-min-height=224
process-mode=2
model-color-format=0
gpu-id=0
gie-unique-id=2
operate-on-gie-id=1
operate-on-class-ids=0
is-classifier=1
output-blob-names=prob
classifier-async-mode=1
classifier-threshold=0.51
process-mode=2
#scaling-filter=0
#scaling-compute-hw=0

Below is the terminal output:

./deepstream-custom -c retinaface_pgie_config.txt -i download.jpeg 
Now playing: retinaface_pgie_config.txt
WARNING: ../nvdsinfer/nvdsinfer_func_utils.cpp:34 [TRT]: Current optimization profile is: 0. Please ensure there are no enqueued operations pending in this context prior to switching profiles
0:00:02.041383090   220 0x55c4bb290360 INFO                 nvinfer gstnvinfer.cpp:602:gst_nvinfer_logger:<secondary1-nvinference-engine> NvDsInferContext[UID 2]: Info from NvDsInferContextImpl::deserializeEngineAndBackend() <nvdsinfer_context_impl.cpp:1577> [UID = 2]: deserialized trt engine from :/opt/nvidia/deepstream/deepstream-5.0/sources/apps/deepstream-retinaface/tensorrt_engines_awsT4/beard_resnet18.engine
INFO: ../nvdsinfer/nvdsinfer_model_builder.cpp:685 [Implicit Engine Info]: layers num: 2
0   INPUT  kFLOAT data            3x224x224       
1   OUTPUT kFLOAT prob            2x1x1           

0:00:02.041475711   220 0x55c4bb290360 INFO                 nvinfer gstnvinfer.cpp:602:gst_nvinfer_logger:<secondary1-nvinference-engine> NvDsInferContext[UID 2]: Info from NvDsInferContextImpl::generateBackendContext() <nvdsinfer_context_impl.cpp:1681> [UID = 2]: Use deserialized engine model: /opt/nvidia/deepstream/deepstream-5.0/sources/apps/deepstream-retinaface/tensorrt_engines_awsT4/beard_resnet18.engine
0:00:02.042334763   220 0x55c4bb290360 INFO                 nvinfer gstnvinfer_impl.cpp:311:notifyLoadModelStatus:<secondary1-nvinference-engine> [UID 2]: Load new model:beard_sgie_config.txt sucessfully
gstnvtracker: Loading low-level lib at /opt/nvidia/deepstream/deepstream-5.0/lib/libnvds_mot_klt.so
gstnvtracker: Optional NvMOT_RemoveStreams not implemented
gstnvtracker: Batch processing is OFF
WARNING: ../nvdsinfer/nvdsinfer_func_utils.cpp:34 [TRT]: Current optimization profile is: 0. Please ensure there are no enqueued operations pending in this context prior to switching profiles
0:00:02.428758247   220 0x55c4bb290360 INFO                 nvinfer gstnvinfer.cpp:602:gst_nvinfer_logger:<primary-nvinference-engine> NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::deserializeEngineAndBackend() <nvdsinfer_context_impl.cpp:1577> [UID = 1]: deserialized trt engine from :/opt/nvidia/deepstream/deepstream-5.0/sources/apps/deepstream-retinaface/tensorrt_engines_awsT4/retina_r50.engine
INFO: ../nvdsinfer/nvdsinfer_model_builder.cpp:685 [Implicit Engine Info]: layers num: 2
0   INPUT  kFLOAT data            3x640x1088      
1   OUTPUT kFLOAT prob            428401x1x1      

0:00:02.428832403   220 0x55c4bb290360 INFO                 nvinfer gstnvinfer.cpp:602:gst_nvinfer_logger:<primary-nvinference-engine> NvDsInferContext[UID 1]: Info from NvDsInferContextImpl::generateBackendContext() <nvdsinfer_context_impl.cpp:1681> [UID = 1]: Use deserialized engine model: /opt/nvidia/deepstream/deepstream-5.0/sources/apps/deepstream-retinaface/tensorrt_engines_awsT4/retina_r50.engine
0:00:02.452104069   220 0x55c4bb290360 INFO                 nvinfer gstnvinfer_impl.cpp:311:notifyLoadModelStatus:<primary-nvinference-engine> [UID 1]: Load new model:retinaface_pgie_config.txt sucessfully
Running...
KLT Tracker Init
End of stream
Returned, stopping playback
Deleting pipeline

Below is the output from the model

2 Likes

@y14uc339

Do you mean label names are not shown on the classification output image?
Or you cannot get classification results from metadata?

@ersheng SO do i need explicitly extract them from the metadata? I thought this is handled by DS and the labels are rendered over the image automatically

@y14uc339

I think you can refer to configurations of Gst-nvdsosd here

https://docs.nvidia.com/metropolis/deepstream/plugin-manual/index.html#page/DeepStream%20Plugins%20Development%20Guide/deepstream_plugin_details.3.06.html#

Sample

[osd]
enable=1
gpu-id=0
border-width=1
text-size=15
text-color=1;1;1;1;
text-bg-color=0.3;0.3;0.3;1
font=Serif
show-clock=0
clock-x-offset=800
clock-y-offset=820
clock-text-size=12
clock-color=1;0;0;0
nvbuf-memory-type=0
1 Like

@ersheng So, I was referring to deepstream-test2 even in that classifier labels are not rendered in the output. Can you tell me how to do it in deepstream-test2? There are three classifiers used in that app