I have bought a camera (Dahua) that is not NVC support (a GeniCam). in order to make it work with deepstream sdk 4, I have installed a new src named “aravissrc”, and then I successfully connected it to deepstream’s yolo via running the following Gstreamer command:
Thank You very much indeed for your response. I have spent several days on implementing this. However, I have not been successful. its mostly because the
deepstream_source_bin.c
uses so many functions to produce the Gstreamer code and also because I have not written the deepstream myself. Is there any quick and dirty way to do this?
Hi,
We don’t have this kind of camera. You may see if other users can share experience. Since the applications are all open source, users may use similar cameras and can share you tips/guidance. We encourage users on customization and integration.
I have changed the code deapstream_custom.c to read frames from my camera (using aravissrc) and show them in a window, which runs successfully:
/*
* Copyright (c) 2019 NVIDIA Corporation. All rights reserved.
*
* NVIDIA Corporation and its licensors retain all intellectual property
* and proprietary rights in and to this software, related documentation
* and any modifications thereto. Any use, reproduction, disclosure or
* distribution of this software and related documentation without an express
* license agreement from NVIDIA Corporation is strictly prohibited.
*
*/
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include "gstnvdsmeta.h"
/* The muxer output resolution must be set if the input streams will be of
* different resolution. The muxer will scale all the input frames to this
* resolution. */
#define MUXER_OUTPUT_WIDTH 1920
#define MUXER_OUTPUT_HEIGHT 1080
/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
* based on the fastest source's framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 4000000
static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data) {
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_printerr ("ERROR from element %s: %s\n",
GST_OBJECT_NAME (msg->src), error->message);
if (debug)
g_printerr ("Error details: %s\n", debug);
g_free (debug);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int
main (int argc, char *argv[]) {
GMainLoop *loop = NULL;
GstElement *pipeline = NULL, *source = NULL, *h264parser = NULL,
*decoder = NULL, *streammux = NULL, *sink = NULL,
*pgie = NULL, *nvvidconv = NULL, *nvdsosd = NULL;
#ifdef PLATFORM_TEGRA
GstElement *transform = NULL;
#endif
GstBus *bus = NULL;
guint bus_watch_id;
GstPad *osd_sink_pad = NULL;
gboolean link_ok;
/* Check input arguments */
if (argc != 3) {
g_printerr ("Usage: %s config_file <H264 filename>\n", argv[0]);
return -1;
}
/* Standard GStreamer initialization */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements */
/* Create Pipeline element that will form a connection of other elements */
pipeline = gst_pipeline_new ("ds-custom-pipeline");
/* Source element for reading from the file */
source = gst_element_factory_make ("aravissrc", "file-source");
// GstCaps *vid_caps = gst_caps_new_simple("video/x-raw(memory:NVMM)",
// "format", G_TYPE_STRING, "RGB","width", G_TYPE_INT, 1280,"height", G_TYPE_INT, 720, NULL);
GstCaps *vid_caps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "RGB","width", G_TYPE_INT, 2592,"height", G_TYPE_INT, 1944 , NULL);
/* Since the data format in the input file is elementary h264 stream,
* we need a h264parser */
h264parser = gst_element_factory_make ("h264parse", "h264-parser");
/* Use nvdec_h264 for hardware accelerated decode on GPU */
decoder = gst_element_factory_make ("nvv4l2decoder", "nvv4l2-decoder");
g_object_set (G_OBJECT (decoder), "bufapi-version", 1, NULL);
/* Create nvstreammux instance to form batches from one or more sources. */
streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");
if (!pipeline || !streammux) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* Use nvinfer to run inferencing on decoder's output,
* behaviour of inferencing is set through config file */
pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");
/* Use convertor to convert from NV12 to RGBA as required by nvdsosd */
nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");
/* Create OSD to draw on the converted RGBA buffer */
nvdsosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");
/* Finally render the osd output */
#ifdef PLATFORM_TEGRA
transform = gst_element_factory_make ("nvegltransform", "nvegl-transform");
#endif
sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
// if (!source || !h264parser || !decoder || !pgie
// || !nvvidconv || !nvdsosd || !sink) {
// g_printerr ("One element could not be created. Exiting.\n");
// return -1;
// }
if (!source ) {
g_printerr ("source could not be created. Exiting.\n");
return -1;
}
#ifdef PLATFORM_TEGRA
if(!transform) {
g_printerr ("One tegra element could not be created. Exiting.\n");
return -1;
}
#endif
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source), "camera-name", argv[2], NULL);
g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",
MUXER_OUTPUT_HEIGHT, "batch-size", 1,
"batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
/* Set all the necessary properties of the nvinfer element,
* the necessary ones are : */
g_object_set (G_OBJECT (pgie),
"config-file-path", argv[1], NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* Set up the pipeline */
/* we add all elements into the pipeline */
#ifdef PLATFORM_TEGRA
gst_bin_add_many (GST_BIN (pipeline),
source, sink, NULL);
#else
gst_bin_add_many (GST_BIN (pipeline),
source, h264parser, decoder, streammux, pgie,
nvvidconv, nvdsosd, sink, NULL);
#endif
link_ok = gst_element_link_filtered(source,sink, vid_caps);
gst_caps_unref (vid_caps);
if (!link_ok) {
g_warning ("Failed to link peyman caps!");
}
GstPad *sinkpad, *srcpad;
gchar pad_name_sink[16] = "sink_0";
gchar pad_name_src[16] = "src";
sinkpad = gst_element_get_request_pad (streammux, pad_name_sink);
if (!sinkpad) {
g_printerr ("Streammux request sink pad failed. Exiting.\n");
return -1;
}
srcpad = gst_element_get_static_pad (decoder, pad_name_src);
if (!srcpad) {
g_printerr ("Decoder request src pad failed. Exiting.\n");
return -1;
}
if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
g_printerr ("Failed to link decoder to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref (sinkpad);
gst_object_unref (srcpad);
//if (!gst_element_link_many ( source,nvvidconv, NULL)) {
// g_printerr ("Elements peyman source, nvvidconv could not be linked: 2. Exiting.\n");
// return -1;
// }
// if (!gst_element_link_many ( nvvidconv, sink, NULL)) {
// g_printerr ("Elements peyman nvvidconv, sink, could not be linked: 2. Exiting.\n");
// return -1;
//}
/* Set the pipeline to "playing" state */
g_print ("Now playing: %s\n", argv[1]);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait till pipeline encounters an error or EOS */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
now I would like to add nvinfer to be able to detect objects, using yololite (I have changed some of the codes related to yolov2-tiny to make deepstream sdk4 work with yololite, and it is successful).
again, I have modified the deepstream_custom.c to implement this. my code is as follows (please pay more attention to the balded parts):
/*
* Copyright (c) 2019 NVIDIA Corporation. All rights reserved.
*
* NVIDIA Corporation and its licensors retain all intellectual property
* and proprietary rights in and to this software, related documentation
* and any modifications thereto. Any use, reproduction, disclosure or
* distribution of this software and related documentation without an express
* license agreement from NVIDIA Corporation is strictly prohibited.
*
*/
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include "gstnvdsmeta.h"
/* The muxer output resolution must be set if the input streams will be of
* different resolution. The muxer will scale all the input frames to this
* resolution. */
#define MUXER_OUTPUT_WIDTH 2592
#define MUXER_OUTPUT_HEIGHT 2048
/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
* based on the fastest source's framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 4000000
static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data) {
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_printerr ("ERROR from element %s: %s\n",
GST_OBJECT_NAME (msg->src), error->message);
if (debug)
g_printerr ("Error details: %s\n", debug);
g_free (debug);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static gboolean
link_elements_with_filter_complex (GstElement *element1, GstElement *element2)
{
gboolean link_ok;
GstCaps *caps;
caps = gst_caps_new_full (
gst_structure_new ("video/x-raw",
"format", G_TYPE_STRING, "RGB","width", G_TYPE_INT, 2592,"height", G_TYPE_INT, 1944 , NULL),
gst_structure_new ("video/x-raw(memory:NVMM)",
"format", G_TYPE_STRING, "NV12","width", G_TYPE_INT, 2592,"height", G_TYPE_INT, 1944 , NULL),
NULL);
link_ok = gst_element_link_filtered (element1, element2, caps);
gst_caps_unref (caps);
if (!link_ok) {
g_warning ("Failed to link peyman element1 and element2!");
}
return link_ok;
}
static gboolean
link_elements_with_filter_simple (GstElement *element1, GstElement *element2)
{
gboolean link_ok;
GstCaps *caps;
caps = gst_caps_new_simple ("video/x-raw(memory:NVMM)",
"format", G_TYPE_STRING, "NV12","width", G_TYPE_INT, 2592,"height", G_TYPE_INT, 1944 ,"framerate", GST_TYPE_FRACTION, 25, 1 ,NULL);
link_ok = gst_element_link_filtered (element1, element2, caps);
gst_caps_unref (caps);
if (!link_ok) {
g_warning ("Failed to link element1 and element2!");
}
return link_ok;
}
int
main (int argc, char *argv[]) {
GMainLoop *loop = NULL;
GstElement *pipeline = NULL, *source = NULL, *qtdemux=NULL,*capsfilter=NULL ,*nvvidconv2=NULL,*nvmultistreamtiler=NULL ,*h264parser = NULL,
*decoder = NULL, *streammux = NULL, *sink = NULL,
*pgie = NULL, *nvvidconv = NULL, *nvdsosd = NULL;
#ifdef PLATFORM_TEGRA
GstElement *transform = NULL;
#endif
GstBus *bus = NULL;
guint bus_watch_id;
GstPad *osd_sink_pad = NULL;
gboolean link_ok;
/* Check input arguments */
if (argc != 3) {
g_printerr ("Usage: %s config_file <H264 filename>\n", argv[0]);
return -1;
}
/* Standard GStreamer initialization */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements */
/* Create Pipeline element that will form a connection of other elements */
pipeline = gst_pipeline_new ("ds-custom-pipeline");
/* Source element for reading from the file */
source = gst_element_factory_make ("aravissrc", "file-source");
// GstCaps *vid_caps = gst_caps_new_simple("video/x-raw(memory:NVMM)",
// "format", G_TYPE_STRING, "RGB","width", G_TYPE_INT, 1280,"height", G_TYPE_INT, 720, NULL);
// GstCaps *vid_caps = gst_caps_new_simple("video/x-raw",
// "format", G_TYPE_STRING, "RGB","width", G_TYPE_INT, 2592,"height", G_TYPE_INT, 1944 , NULL);
capsfilter = gst_element_factory_make("capsfilter", "caps");
g_object_set(capsfilter, "caps", gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "NV12","width", G_TYPE_INT, 2592,"height", G_TYPE_INT, 2048 , NULL), NULL);
qtdemux= gst_element_factory_make ("qtdemux", "qtdemux-1");
nvmultistreamtiler=gst_element_factory_make ("nvmultistreamtiler", "nvmultistreamtiler-1");
g_object_set (G_OBJECT (nvmultistreamtiler),"rows",1,"columns",1 ,"width", MUXER_OUTPUT_WIDTH, "height",
MUXER_OUTPUT_HEIGHT, NULL);
/* Since the data format in the input file is elementary h264 stream,
* we need a h264parser */
h264parser = gst_element_factory_make ("h264parse", "h264-parser");
/* Use nvdec_h264 for hardware accelerated decode on GPU */
decoder = gst_element_factory_make ("nvv4l2decoder", "nvv4l2-decoder");
g_object_set (G_OBJECT (decoder), "bufapi-version", 1, NULL);
/* Create nvstreammux instance to form batches from one or more sources. */
streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");
if (!pipeline || !streammux) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* Use nvinfer to run inferencing on decoder's output,
* behaviour of inferencing is set through config file */
pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");
/* Use convertor to convert from NV12 to RGBA as required by nvdsosd */
nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");
//g_object_set (G_OBJECT (nvvidconv), "Width", MUXER_OUTPUT_WIDTH, "height",
// MUXER_OUTPUT_HEIGHT,"format","NV12", NULL);
nvvidconv2 = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter_1");
/* Create OSD to draw on the converted RGBA buffer */
nvdsosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");
/* Finally render the osd output */
#ifdef PLATFORM_TEGRA
transform = gst_element_factory_make ("nvegltransform", "nvegl-transform");
#endif
sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
g_object_set (G_OBJECT (sink), "sync", 0, NULL);
// if (!source || !qtdemux || !decoder || !pgie
// || !nvvidconv || !nvdsosd || !sink) {
// g_printerr ("One element could not be created. Exiting.\n");
// return -1;
// }
if (!source ) {
g_printerr ("source could not be created. Exiting.\n");
return -1;
}
if (!qtdemux ) {
g_printerr ("qtdemux could not be created. Exiting.\n");
return -1;
}
if (!capsfilter ) {
g_printerr ("caps filter could not be created. Exiting.\n");
return -1;
}
#ifdef PLATFORM_TEGRA
if(!transform) {
g_printerr ("One tegra element could not be created. Exiting.\n");
return -1;
}
#endif
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source), "camera-name", argv[2], NULL);
g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",
MUXER_OUTPUT_HEIGHT, "batch-size", 1,
"batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
/* Set all the necessary properties of the nvinfer element,
* the necessary ones are : */
g_object_set (G_OBJECT (pgie),
"config-file-path", argv[1], NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* Set up the pipeline */
/* we add all elements into the pipeline */
[b]#ifdef PLATFORM_TEGRA
gst_bin_add_many (GST_BIN (pipeline),
source ,nvvidconv,capsfilter ,streammux,pgie, nvmultistreamtiler,nvvidconv2,nvdsosd,transform,sink, NULL);
//g_printerr ("kar mikone.\n");
[/b]
#else
gst_bin_add_many (GST_BIN (pipeline),
source, h264parser, decoder, streammux, pgie,
nvvidconv, nvdsosd, sink, NULL);
#endif
//link_ok = gst_element_link_filtered(source,sink, vid_caps);
// gst_caps_unref (vid_caps);
// if (!link_ok) {
// g_warning ("Failed to link peyman caps!");
//}
//link_elements_with_filter_simple(transform,sink);
//link_elements_with_filter_complex(source,nvvidconv);
//gst_element_link_filtered(source,sink);
[b]
GstPad *sinkpad, *srcpad;
gchar pad_name_sink[16] = "sink_0";
gchar pad_name_src[16] = "src";
sinkpad = gst_element_get_request_pad (streammux, pad_name_sink);
if (!sinkpad) {
g_printerr ("Streammux request sink pad failed. Exiting.\n");
return -1;
}
srcpad = gst_element_get_static_pad (capsfilter, pad_name_src);
if (!srcpad) {
g_printerr ("Decoder request src pad failed. Exiting.\n");
return -1;
}
if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
g_printerr ("Failed peymannnnnnnnnnn to link decoder to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref (sinkpad);
gst_object_unref (srcpad);
[/b]
if (!gst_element_link_many ( source ,nvvidconv,capsfilter , NULL)) {
g_printerr ("Elements peyman source, nvvidconv, capsfilter could not be linked: 2. Exiting.\n");
return -1;
}
[b]
if (!gst_element_link_many (streammux,pgie, nvmultistreamtiler,nvvidconv2,nvdsosd,transform,sink , NULL)) {
g_printerr ("Elements peyman streammux,pgie,multistreamtiler,vidconv2,osd could not be linked: 2. Exiting.\n");
return -1;
}
[/b]
// if (!gst_element_link_many ( nvvidconv, sink, NULL)) {
// g_printerr ("Elements peyman nvvidconv, sink, could not be linked: 2. Exiting.\n");
// return -1;
//}
/* Set the pipeline to "playing" state */
g_print ("Now playing: %s\n", argv[1]);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait till pipeline encounters an error or EOS */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
although I think the above code should be correct, when I run it, it opens up the display window, but it quickly closes and gives me the following error:
$ ./deepstream-custom /opt/nvidia/deepstream/deepstream-4.0/sources/deepstream_4.x_apps_another_copy/config_infer_primary_yoloV2_tiny.txt "U3V-00D24866386"
Now playing: /opt/nvidia/deepstream/deepstream-4.0/sources/deepstream_4.x_apps_another_copy/config_infer_primary_yoloV2_tiny.txt
Using winsys: x11
Creating LL OSD context new
Running...
(deepstream-custom:13866): GLib-GObject-WARNING **: 09:15:33.386: g_object_set_is_valid_property: object class 'ArvUvStream' has no property named 'packet-resend'
Creating LL OSD context new
ERROR from element file-source: Internal data stream error.
Error details: gstbasesrc.c(3055): gst_base_src_loop (): /GstPipeline:ds-custom-pipeline/GstAravis:file-source:
streaming stopped, reason error (-5)
Returned, stopping playback
Deleting pipeline
what do You think I am missing here? (please note that the following Gstreamer code runs successfully:
I finally solved it!
I needed to set the format of the input caps. it now works well.
this can work with a variety of different cameras such as dahua, hikevision,bussler, etc.
/*
* Copyright (c) 2019 NVIDIA Corporation. All rights reserved.
*
* NVIDIA Corporation and its licensors retain all intellectual property
* and proprietary rights in and to this software, related documentation
* and any modifications thereto. Any use, reproduction, disclosure or
* distribution of this software and related documentation without an express
* license agreement from NVIDIA Corporation is strictly prohibited.
*
*/
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include "gstnvdsmeta.h"
/* The muxer output resolution must be set if the input streams will be of
* different resolution. The muxer will scale all the input frames to this
* resolution. */
#define MUXER_OUTPUT_WIDTH 2592
#define MUXER_OUTPUT_HEIGHT 2048
/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
* based on the fastest source's framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 400000000
static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data) {
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_printerr ("ERROR from element %s: %s\n",
GST_OBJECT_NAME (msg->src), error->message);
if (debug)
g_printerr ("Error details: %s\n", debug);
g_free (debug);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int
main (int argc, char *argv[]) {
GMainLoop *loop = NULL;
GstElement *pipeline = NULL, *source = NULL,*capsfilter=NULL ,*nvvidconv2=NULL,*nvmultistreamtiler=NULL , *streammux = NULL, *sink = NULL,*pgie = NULL, *nvvidconv = NULL, *nvdsosd = NULL;
#ifdef PLATFORM_TEGRA
GstElement *transform = NULL;
#endif
GstBus *bus = NULL;
guint bus_watch_id;
GstPad *osd_sink_pad = NULL;
gboolean link_ok;
GstCaps *caps = NULL;
GstCaps *lnvcaps = NULL;
GstCapsFeatures *feature = NULL;
/* Check input arguments */
if (argc != 3) {
g_printerr ("Usage: %s config_file <H264 filename>\n", argv[0]);
return -1;
}
/* Standard GStreamer initialization */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements */
/* Create Pipeline element that will form a connection of other elements */
pipeline = gst_pipeline_new ("ds-custom-pipeline");
/* Source element for reading from the file */
source = gst_element_factory_make ("aravissrc", "camera");
/* creating suitable capsfilter */
capsfilter = gst_element_factory_make("capsfilter", "caps");
caps = gst_caps_new_simple("video/x-raw","format", G_TYPE_STRING, "RGB",
"width", G_TYPE_INT, MUXER_OUTPUT_WIDTH, "height", G_TYPE_INT, MUXER_OUTPUT_HEIGHT,
"framerate", GST_TYPE_FRACTION, 30,1, NULL);
lnvcaps = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING,"NV12",
"width", G_TYPE_INT, MUXER_OUTPUT_WIDTH, "height", G_TYPE_INT, MUXER_OUTPUT_HEIGHT,
"framerate", GST_TYPE_FRACTION, 30,1, NULL);
feature = gst_caps_features_new ("memory:NVMM", NULL);
gst_caps_set_features (lnvcaps, 0, feature);
g_object_set (G_OBJECT (capsfilter), "caps", lnvcaps, NULL);
/* creating multi-stream-tiler */
nvmultistreamtiler=gst_element_factory_make ("nvmultistreamtiler", "nvmultistreamtiler-1");
g_object_set (G_OBJECT (nvmultistreamtiler),"rows",1,"columns",1 ,"width", MUXER_OUTPUT_WIDTH, "height",
MUXER_OUTPUT_HEIGHT, NULL);
/* Create nvstreammux instance to form batches from one or more sources. */
streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");
if (!pipeline || !streammux) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* creating primary classifier */
pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");
/* Use convertor to convert from NV12 to RGBA as required by nvdsosd */
nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");
nvvidconv2 = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter_1");
/* Create OSD to draw on the converted RGBA buffer */
nvdsosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");
/* create transform element for tegra */
#ifdef PLATFORM_TEGRA
transform = gst_element_factory_make ("nvegltransform", "nvegl-transform");
#endif
/* create sink element */
sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
g_object_set (G_OBJECT (sink), "sync", 0, NULL);
/* check source creation */
if (!source ) {
g_printerr ("source could not be created. Exiting.\n");
return -1;
}
/* check capsfilter creation */
if (!capsfilter ) {
g_printerr ("caps filter could not be created. Exiting.\n");
return -1;
}
/* check transform creation */
#ifdef PLATFORM_TEGRA
if(!transform) {
g_printerr ("One tegra element could not be created. Exiting.\n");
return -1;
}
#endif
/* we set the input camera name to the source element */
g_object_set (G_OBJECT (source), "camera-name", argv[2], NULL);
g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",
MUXER_OUTPUT_HEIGHT, "batch-size", 1,
"batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
/* Set all the necessary properties of the nvinfer element,
* the necessary ones are : */
g_object_set (G_OBJECT (pgie),
"config-file-path", argv[1], NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* Set up the pipeline */
/* we add all elements into the pipeline */
#ifdef PLATFORM_TEGRA
gst_bin_add_many (GST_BIN (pipeline),
source ,nvvidconv,capsfilter ,streammux,pgie, nvmultistreamtiler,nvvidconv2,nvdsosd,transform,sink, NULL);
#endif
/* add pad from capsfilter to streammux (streammux requires pad in its input) */
GstPad *sinkpad, *srcpad;
gchar pad_name_sink[16] = "sink_0";
gchar pad_name_src[16] = "src";
sinkpad = gst_element_get_request_pad (streammux, pad_name_sink);
if (!sinkpad) {
g_printerr ("Streammux request sink pad failed. Exiting.\n");
return -1;
}
srcpad = gst_element_get_static_pad (capsfilter, pad_name_src);
if (!srcpad) {
g_printerr ("Decoder request src pad failed. Exiting.\n");
return -1;
}
if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
g_printerr ("Failed to link capsfilter to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref (sinkpad);
gst_object_unref (srcpad);
if (!gst_element_link_many ( source ,nvvidconv,capsfilter , NULL)) {
g_printerr ("Elements combination source->nvvidconv->capsfilter could not be created: 2. Exiting.\n");
return -1;
}
if (!gst_element_link_many (streammux,pgie, nvmultistreamtiler,nvvidconv2,nvdsosd,transform,sink , NULL)) {
g_printerr ("Elements combination streammux->pgie->nvmultistreamtiler->nvvidconv2->nvdsosd->transform->sink could not be created: 2. Exiting.\n");
return -1;
}
/* Set the pipeline to "playing" state */
g_print ("Now playing: %s\n", argv[1]);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait till pipeline encounters an error or EOS */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}