Hello,
I wish to configure a red box to video source0, blue box to video source1 and no box to video source2. From deepstream-test3-app, I add on NvDsLineMeta and set the drawing to different stream id. However, the stream id looks like not read one-by-one according to input sequence source0, source1 and source2. So, my drawing is shown on other stream sometimes. May I know how to overcome this?
Regards,
Nero
/*
* Copyright (c) 2018 NVIDIA Corporation. All rights reserved.
*
* NVIDIA Corporation and its licensors retain all intellectual property
* and proprietary rights in and to this software, related documentation
* and any modifications thereto. Any use, reproduction, disclosure or
* distribution of this software and related documentation without an express
* license agreement from NVIDIA Corporation is strictly prohibited.
*
*/
#include <gst/gst.h>
#include <glib.h>
#include <math.h>
#include <string.h>
#include <sys/time.h>
#include "gstnvdsmeta.h"
#include "gstnvstreammeta.h"
#include "gst-nvmessage.h"
#define MAX_DISPLAY_LEN 64
#define PGIE_CLASS_ID_VEHICLE 0
#define PGIE_CLASS_ID_PERSON 2
/* The muxer output resolution must be set if the input streams will be of
* different resolution. The muxer will scale all the input frames to this
* resolution. */
#define MUXER_OUTPUT_WIDTH 1920
#define MUXER_OUTPUT_HEIGHT 1080
/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
* based on the fastest source's framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 4000000
#define TILED_OUTPUT_WIDTH 1920
#define TILED_OUTPUT_HEIGHT 1080
/* NVIDIA Decoder source pad memory feature. This feature signifies that source
* pads having this capability will push GstBuffers containing cuda buffers. */
#define GST_CAPS_FEATURES_NVMM "memory:NVMM"
gchar pgie_classes_str[4][32] = {"Vehicle", "TwoWheeler", "Person",
"RoadSign"};
#define FPS_PRINT_INTERVAL 300
static struct timeval start_time = {};
static guint probe_counter = 0;
/* tiler_sink_pad_buffer_probe will extract metadata received on OSD sink pad
* and update params for drawing rectangle, object information etc. */
static void generate_line_meta(gpointer data, guint id)
{
guint num_lines;
NvOSD_LineParams *l_params = NULL;
NvDsLineMeta *meta = (NvDsLineMeta *)data;
if (id == 0)
{
meta->num_lines = 4.0;
meta->batch_id = 0.0;
meta->line_params = (NvOSD_LineParams *)g_malloc0((meta->num_lines) * sizeof(NvOSD_LineParams));
l_params = (NvOSD_LineParams *)&meta->line_params[0];
l_params->x1 = 100;
l_params->y1 = 100;
l_params->x2 = 400;
l_params->y2 = 150;
l_params->line_width = 10;
l_params->line_color.red = 1.0;
l_params->line_color.green = 0.0;
l_params->line_color.blue = 0.0;
l_params->line_color.alpha = 0.5;
l_params = (NvOSD_LineParams *)&meta->line_params[1];
l_params->x1 = 400;
l_params->y1 = 150;
l_params->x2 = 500;
l_params->y2 = 550;
l_params->line_width = 10;
l_params->line_color.red = 1.0;
l_params->line_color.green = 0.0;
l_params->line_color.blue = 0.0;
l_params->line_color.alpha = 0.5;
l_params = (NvOSD_LineParams *)&meta->line_params[2];
l_params->x1 = 500;
l_params->y1 = 550;
l_params->x2 = 150;
l_params->y2 = 500;
l_params->line_width = 10;
l_params->line_color.red = 1.0;
l_params->line_color.green = 0.0;
l_params->line_color.blue = 0.0;
l_params->line_color.alpha = 0.5;
l_params = (NvOSD_LineParams *)&meta->line_params[3];
l_params->x1 = 150;
l_params->y1 = 500;
l_params->x2 = 100;
l_params->y2 = 100;
l_params->line_width = 10;
l_params->line_color.red = 1.0;
l_params->line_color.green = 0.0;
l_params->line_color.blue = 0.0;
l_params->line_color.alpha = 0.5;
}
if (id == 1)
{
meta->num_lines = 4.0;
meta->batch_id = 1.0;
meta->line_params = (NvOSD_LineParams *)g_malloc0((meta->num_lines) * sizeof(NvOSD_LineParams));
l_params = (NvOSD_LineParams *)&meta->line_params[0];
l_params->x1 = 100;
l_params->y1 = 100;
l_params->x2 = 400;
l_params->y2 = 150;
l_params->line_width = 10;
l_params->line_color.red = 1.0;
l_params->line_color.green = 1.0;
l_params->line_color.blue = 0.0;
l_params->line_color.alpha = 0.5;
l_params = (NvOSD_LineParams *)&meta->line_params[1];
l_params->x1 = 400;
l_params->y1 = 150;
l_params->x2 = 500;
l_params->y2 = 550;
l_params->line_width = 10;
l_params->line_color.red = 1.0;
l_params->line_color.green = 1.0;
l_params->line_color.blue = 0.0;
l_params->line_color.alpha = 0.5;
l_params = (NvOSD_LineParams *)&meta->line_params[2];
l_params->x1 = 500;
l_params->y1 = 550;
l_params->x2 = 150;
l_params->y2 = 500;
l_params->line_width = 10;
l_params->line_color.red = 1.0;
l_params->line_color.green = 1.0;
l_params->line_color.blue = 0.0;
l_params->line_color.alpha = 0.5;
l_params = (NvOSD_LineParams *)&meta->line_params[3];
l_params->x1 = 150;
l_params->y1 = 500;
l_params->x2 = 100;
l_params->y2 = 100;
l_params->line_width = 10;
l_params->line_color.red = 1.0;
l_params->line_color.green = 1.0;
l_params->line_color.blue = 0.0;
l_params->line_color.alpha = 0.5;
//g_print("ID = %d, Yellow\n", id);
}
}
static GstPadProbeReturn
tiler_src_pad_buffer_probe(GstPad *pad, GstPadProbeInfo *info,
gpointer u_data)
{
GstMeta *gst_meta = NULL;
NvDsMeta *nvdsmeta = NULL;
gpointer state = NULL;
static GQuark _nvdsmeta_quark = 0;
GstBuffer *buf = (GstBuffer *)info->data;
NvDsFrameMeta *frame_meta = NULL;
guint num_rects = 0, rect_index = 0;
NvDsObjectParams *obj_meta = NULL;
NvOSD_TextParams *txt_params = NULL;
GstNvStreamMeta *streammeta = NULL;
if (!_nvdsmeta_quark)
_nvdsmeta_quark = g_quark_from_static_string(NVDS_META_STRING);
if (probe_counter == 0)
{
gettimeofday(&start_time, NULL);
}
else if (probe_counter == FPS_PRINT_INTERVAL)
{
struct timeval cur_time;
gettimeofday(&cur_time, NULL);
g_print("FPS for the last %d batches: %.2f\n", FPS_PRINT_INTERVAL,
FPS_PRINT_INTERVAL / ((cur_time.tv_sec - start_time.tv_sec) +
(cur_time.tv_usec - start_time.tv_usec) / 1e6));
probe_counter = 0;
start_time = cur_time;
}
probe_counter++;
streammeta = gst_buffer_get_nvstream_meta(buf);
while ((gst_meta = gst_buffer_iterate_meta(buf, &state)))
{
if (gst_meta_api_type_has_tag(gst_meta->info->api, _nvdsmeta_quark))
{
nvdsmeta = (NvDsMeta *)gst_meta;
/* We are interested only in intercepting Meta of type
* "NVDS_META_FRAME_INFO" as they are from our infer elements. */
if (nvdsmeta->meta_type == NVDS_META_FRAME_INFO)
{
guint vehicle_count = 0;
guint person_count = 0;
frame_meta = (NvDsFrameMeta *)nvdsmeta->meta_data;
if (frame_meta == NULL)
{
g_print("NvDS Meta contained NULL meta \n");
return GST_PAD_PROBE_OK;
}
/* We reset the num_strings here as we plan to iterate through the
* the detected objects and form our own strings.
* The pipeline generated strings shall be discarded.
*/
frame_meta->num_strings = 0;
num_rects = frame_meta->num_rects;
/* This means we have num_rects in frame_meta->obj_params,
* now lets iterate through them */
for (rect_index = 0; rect_index < num_rects; rect_index++)
{
/* Now using above information we need to form a text that should
* be displayed on top of the bounding box, so lets form it here. */
obj_meta = (NvDsObjectParams *)&frame_meta->obj_params[rect_index];
txt_params = &(obj_meta->text_params);
if (txt_params->display_text)
g_free(txt_params->display_text);
txt_params->display_text = g_malloc0(MAX_DISPLAY_LEN);
g_snprintf(txt_params->display_text, MAX_DISPLAY_LEN, "%s ",
pgie_classes_str[obj_meta->class_id]);
if (obj_meta->class_id == PGIE_CLASS_ID_VEHICLE)
vehicle_count++;
if (obj_meta->class_id == PGIE_CLASS_ID_PERSON)
person_count++;
/* Now set the offsets where the string should appear */
txt_params->x_offset = obj_meta->rect_params.left;
txt_params->y_offset = obj_meta->rect_params.top - 25;
/* Font , font-color and font-size */
txt_params->font_params.font_name = "Arial";
txt_params->font_params.font_size = 10;
txt_params->font_params.font_color.red = 1.0;
txt_params->font_params.font_color.green = 1.0;
txt_params->font_params.font_color.blue = 1.0;
txt_params->font_params.font_color.alpha = 1.0;
/* Text background color */
txt_params->set_bg_clr = 1;
txt_params->text_bg_clr.red = 0.0;
txt_params->text_bg_clr.green = 0.0;
txt_params->text_bg_clr.blue = 0.0;
txt_params->text_bg_clr.alpha = 1.0;
frame_meta->num_strings++;
}
//g_print("Source ID = %d\n", frame_meta->stream_id);
/* Enable this print by setting environment variable GST_DEBUG=4
* before running the app. */
GST_INFO("Source %d Frame Number = %lu Number of objects = %d "
"Vehicle Count = %d Person Count = %d",
frame_meta->stream_id,
streammeta->stream_frame_num[frame_meta->batch_id],
num_rects, vehicle_count, person_count);
NvDsMeta *gst_line_meta = NULL;
NvDsLineMeta *line_meta = (NvDsLineMeta *)g_malloc0(sizeof(NvDsLineMeta));
generate_line_meta(line_meta, frame_meta->stream_id);
//g_print ("line = %d\n", line_meta->line_parasms->x1);
gst_line_meta = gst_buffer_add_nvds_meta(buf, (void *)line_meta, (GDestroyNotify)nvds_free_line_info);
if (gst_line_meta)
{
gst_line_meta->meta_type = NVDS_META_LINE_INFO;
}
else
{
g_print("Error in attaching event meta to buffer\n");
}
}
}
}
return GST_PAD_PROBE_OK;
}
static gboolean
bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *)data;
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
g_print("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_WARNING:
{
gchar *debug;
GError *error;
gst_message_parse_warning(msg, &error, &debug);
g_printerr("WARNING from element %s: %s\n",
GST_OBJECT_NAME(msg->src), error->message);
g_free(debug);
g_printerr("Warning: %s\n", error->message);
g_error_free(error);
break;
}
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_printerr("ERROR from element %s: %s\n",
GST_OBJECT_NAME(msg->src), error->message);
if (debug)
g_printerr("Error details: %s\n", debug);
g_free(debug);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
case GST_MESSAGE_ELEMENT:
{
if (gst_nvmessage_is_stream_eos(msg))
{
guint stream_id;
if (gst_nvmessage_parse_stream_eos(msg, &stream_id))
{
g_print("Got EOS from stream %d\n", stream_id);
}
}
break;
}
default:
break;
}
return TRUE;
}
static void
cb_newpad(GstElement *decodebin, GstPad *decoder_src_pad, gpointer data)
{
GstCaps *caps = gst_pad_query_caps(decoder_src_pad, NULL);
const GstStructure *str = gst_caps_get_structure(caps, 0);
const gchar *name = gst_structure_get_name(str);
GstElement *source_bin = (GstElement *)data;
GstCapsFeatures *features = gst_caps_get_features(caps, 0);
/* Need to check if the pad created by the decodebin is for video and not
* audio. */
if (!strncmp(name, "video", 5))
{
/* Link the decodebin pad only if decodebin has picked nvidia
* decoder plugin nvdec_*. We do this by checking if the pad caps contain
* NVMM memory features. */
if (gst_caps_features_contains(features, GST_CAPS_FEATURES_NVMM))
{
/* Get the source bin ghost pad */
GstPad *bin_ghost_pad = gst_element_get_static_pad(source_bin, "src");
if (!gst_ghost_pad_set_target(GST_GHOST_PAD(bin_ghost_pad),
decoder_src_pad))
{
g_printerr("Failed to link decoder src pad to source bin ghost pad\n");
}
gst_object_unref(bin_ghost_pad);
}
else
{
g_printerr("Error: Decodebin did not pick nvidia decoder plugin.\n");
}
}
}
static GstElement *
create_source_bin(guint index, gchar *uri)
{
GstElement *bin = NULL, *uri_decode_bin = NULL;
gchar bin_name[16] = {};
g_snprintf(bin_name, 15, "source-bin-%02d", index);
/* Create a source GstBin to abstract this bin's content from the rest of the
* pipeline */
bin = gst_bin_new(bin_name);
/* Source element for reading from the uri.
* We will use decodebin and let it figure out the container format of the
* stream and the codec and plug the appropriate demux and decode plugins. */
uri_decode_bin = gst_element_factory_make("uridecodebin", "uri-decode-bin");
if (!bin || !uri_decode_bin)
{
g_printerr("One element in source bin could not be created.\n");
return NULL;
}
/* We set the input uri to the source element */
g_object_set(G_OBJECT(uri_decode_bin), "uri", uri, NULL);
/* Connect to the "pad-added" signal of the decodebin which generates a
* callback once a new pad for raw data has beed created by the decodebin */
g_signal_connect(G_OBJECT(uri_decode_bin), "pad-added",
G_CALLBACK(cb_newpad), bin);
gst_bin_add(GST_BIN(bin), uri_decode_bin);
/* We need to create a ghost pad for the source bin which will act as a proxy
* for the video decoder src pad. The ghost pad will not have a target right
* now. Once the decode bin creates the video decoder and generates the
* cb_newpad callback, we will set the ghost pad target to the video decoder
* src pad. */
if (!gst_element_add_pad(bin, gst_ghost_pad_new_no_target("src",
GST_PAD_SRC)))
{
g_printerr("Failed to add ghost pad in source bin\n");
return NULL;
}
return bin;
}
int main(int argc, char *argv[])
{
GMainLoop *loop = NULL;
GstElement *pipeline = NULL, *streammux = NULL, *sink = NULL, *pgie = NULL,
*nvvidconv = NULL, *nvosd = NULL, *tiler = NULL, *filter = NULL;
GstBus *bus = NULL;
guint bus_watch_id;
GstPad *tiler_src_pad = NULL;
guint i, num_sources;
guint tiler_rows, tiler_columns;
guint pgie_batch_size;
/* Check input arguments */
if (argc < 2)
{
g_printerr("Usage: %s <uri1> [uri2] ... [uriN] \n", argv[0]);
return -1;
}
num_sources = argc - 1;
/* Standard GStreamer initialization */
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
/* Create gstreamer elements */
/* Create Pipeline element that will form a connection of other elements */
pipeline = gst_pipeline_new("dstest3-pipeline");
/* Create nvstreammux instance to form batches from one or more sources. */
streammux = gst_element_factory_make("nvstreammux", "stream-muxer");
if (!pipeline || !streammux)
{
g_printerr("One element could not be created. Exiting.\n");
return -1;
}
gst_bin_add(GST_BIN(pipeline), streammux);
for (i = 0; i < num_sources; i++)
{
GstPad *sinkpad, *srcpad;
gchar pad_name[16] = {};
GstElement *source_bin = create_source_bin(i, argv[i + 1]);
if (!source_bin)
{
g_printerr("Failed to create source bin. Exiting.\n");
return -1;
}
gst_bin_add(GST_BIN(pipeline), source_bin);
g_snprintf(pad_name, 15, "sink_%u", i);
sinkpad = gst_element_get_request_pad(streammux, pad_name);
if (!sinkpad)
{
g_printerr("Streammux request sink pad failed. Exiting.\n");
return -1;
}
srcpad = gst_element_get_static_pad(source_bin, "src");
if (!srcpad)
{
g_printerr("Failed to get src pad of source bin. Exiting.\n");
return -1;
}
if (gst_pad_link(srcpad, sinkpad) != GST_PAD_LINK_OK)
{
g_printerr("Failed to link source bin to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref(srcpad);
gst_object_unref(sinkpad);
}
/* Use nvinfer to infer on batched frame. */
pgie = gst_element_factory_make("nvinfer", "primary-nvinference-engine");
/* Use bboxfilter2 to infer on batched frame. */
filter = gst_element_factory_make("bboxfilter2", "myfilter");
/* Use nvtiler to composite the batched frames into a 2D tiled array based
* on the source of the frames. */
tiler = gst_element_factory_make("nvmultistreamtiler", "nvtiler");
/* Use convertor to convert from NV12 to RGBA as required by nvosd */
nvvidconv = gst_element_factory_make("nvvidconv", "nvvideo-converter");
/* Create OSD to draw on the converted RGBA buffer */
nvosd = gst_element_factory_make("nvosd", "nv-onscreendisplay");
/* Finally render the osd output */
sink = gst_element_factory_make("nveglglessink", "nvvideo-renderer");
if (!pgie || !filter || !tiler || !nvvidconv || !nvosd || !sink)
{
g_printerr("One element could not be created. Exiting.\n");
return -1;
}
g_object_set(G_OBJECT(streammux), "width", MUXER_OUTPUT_WIDTH, "height",
MUXER_OUTPUT_HEIGHT, "batch-size", num_sources,
"batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
/* Configure the nvinfer element using the nvinfer config file. */
g_object_set(G_OBJECT(pgie),
"config-file-path", "dstest3_pgie_config.txt", NULL);
/* Override the batch-size set in the config file with the number of sources. */
g_object_get(G_OBJECT(pgie), "batch-size", &pgie_batch_size, NULL);
if (pgie_batch_size != num_sources)
{
g_printerr("WARNING: Overriding infer-config batch-size (%d) with number of sources (%d)\n",
pgie_batch_size, num_sources);
g_object_set(G_OBJECT(pgie), "batch-size", num_sources, NULL);
}
tiler_rows = (guint)sqrt(num_sources);
tiler_columns = (guint)ceil(1.0 * num_sources / tiler_rows);
/* we set the tiler properties here */
g_object_set(G_OBJECT(tiler), "rows", tiler_rows, "columns", tiler_columns,
"width", TILED_OUTPUT_WIDTH, "height", TILED_OUTPUT_HEIGHT, NULL);
/* we set the osd properties here */
g_object_set(G_OBJECT(nvosd), "font-size", 15, NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
/* Set up the pipeline */
/* we add all elements into the pipeline */
gst_bin_add_many(GST_BIN(pipeline), pgie, filter, tiler, nvvidconv, nvosd, sink,
NULL);
/* we link the elements together
* nvstreammux -> nvinfer -> nvtiler -> nvvidconv -> nvosd -> video-renderer */
if (!gst_element_link_many(streammux, pgie, filter, tiler, nvvidconv, nvosd, sink,
NULL))
{
g_printerr("Elements could not be linked. Exiting.\n");
return -1;
}
/* Lets add probe to get informed of the meta data generated, we add probe to
* the sink pad of the osd element, since by that time, the buffer would have
* had got all the metadata. */
tiler_src_pad = gst_element_get_static_pad(pgie, "src");
if (!tiler_src_pad)
g_print("Unable to get src pad\n");
else
gst_pad_add_probe(tiler_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
tiler_src_pad_buffer_probe, NULL, NULL);
/* Set the pipeline to "playing" state */
g_print("Now playing:");
for (i = 0; i < num_sources; i++)
{
g_print(" %s,", argv[i + 1]);
}
g_print("\n");
gst_element_set_state(pipeline, GST_STATE_PLAYING);
/* Wait till pipeline encounters an error or EOS */
g_print("Running...\n");
g_main_loop_run(loop);
/* Out of the main loop, clean up nicely */
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
g_source_remove(bus_watch_id);
g_main_loop_unref(loop);
return 0;
}