deepstream on nvidia v4l2src

Hi,

I am having hard time converting my pipeline to an application. I am a newbie so please bear with me on this. I have my pipeline like this,

gst-launch-1.0 v4l2src ! capsfilter caps= ‘video/x-raw,format=(string)YUY2’ ! nvvidconv ! capsfilter caps= ‘video/x-raw(memory:NVMM),format=(string)NV12’ ! nvvidconv ! capsfilter caps= ‘video/x-raw,format=(string)NV12’ ! nvvideoconvert ! capsfilter caps=‘video/x-raw(memory:NVMM),format=(string)NV12’ ! mux.sink_0 nvstreammux live-source=1 name=mux batch-size=1 width=1280 height=720 ! nvinfer config-file-path=/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt batch-size=1 ! nvmultistreamtiler rows=1 columns=1 width=1280 height=720 ! nvvideoconvert ! nvdsosd ! nvegltransform ! nveglglessink

I am trying to get the video source directly from the camera source that I attached to nvidia directly. And I wrote a c-application as below for the above pipeline.

#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include “gstnvdsmeta.h”

#define MAX_DISPLAY_LEN 64

#define PGIE_CLASS_ID_VEHICLE 0
#define PGIE_CLASS_ID_PERSON 2

gint frame_number = 0;
gchar pgie_classes_str[4][32] = { “Vehicle”, “TwoWheeler”, “Person”,
“Roadsign”
};

static GstPadProbeReturn osd_sink_pad_buffer_probe(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
{
GstBuffer *buf=(GstBuffer *)info->data;
guint num_rects =0;
NvDsObjectMeta *obj_meta = NULL;
guint vehicle_count = 0;
guint person_count = 0;
NvDsMetaList * l_frame = NULL;
NvDsMetaList * l_obj = NULL;
NvDsDisplayMeta *display_meta = NULL;

NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta (buf);

for (l_frame = batch_meta->frame_meta_list; l_frame != NULL;
  l_frame = l_frame->next) {
    NvDsFrameMeta *frame_meta = (NvDsFrameMeta *) (l_frame->data);
    int offset = 0;
    for (l_obj = frame_meta->obj_meta_list; l_obj != NULL;
            l_obj = l_obj->next) {
        obj_meta = (NvDsObjectMeta *) (l_obj->data);
        if (obj_meta->class_id == PGIE_CLASS_ID_VEHICLE) {
            vehicle_count++;
            num_rects++;
        }
        if (obj_meta->class_id == PGIE_CLASS_ID_PERSON) {
            person_count++;
            num_rects++;
        }
    }
    display_meta = nvds_acquire_display_meta_from_pool(batch_meta);
    NvOSD_TextParams *txt_params  = &display_meta->text_params[0];
    display_meta->num_labels = 1;
    txt_params->display_text = g_malloc0 (MAX_DISPLAY_LEN);
    offset = snprintf(txt_params->display_text, MAX_DISPLAY_LEN, "Person = %d ", person_count);
    offset = snprintf(txt_params->display_text + offset , MAX_DISPLAY_LEN, "Vehicle = %d ", vehicle_count);

    /* Now set the offsets where the string should appear */
    txt_params->x_offset = 10;
    txt_params->y_offset = 12;

    /* Font , font-color and font-size */
    txt_params->font_params.font_name = "Serif";
    txt_params->font_params.font_size = 10;
    txt_params->font_params.font_color.red = 1.0;
    txt_params->font_params.font_color.green = 1.0;
    txt_params->font_params.font_color.blue = 1.0;
    txt_params->font_params.font_color.alpha = 1.0;

    /* Text background color */
    txt_params->set_bg_clr = 1;
    txt_params->text_bg_clr.red = 0.0;
    txt_params->text_bg_clr.green = 0.0;
    txt_params->text_bg_clr.blue = 0.0;
    txt_params->text_bg_clr.alpha = 1.0;

    nvds_add_display_meta_to_frame(frame_meta, display_meta);
}

g_print ("Frame Number = %d Number of objects = %d "
        "Vehicle Count = %d Person Count = %d\n",
        frame_number, num_rects, vehicle_count, person_count);
frame_number++;
return GST_PAD_PROBE_OK;

}

int main(int argc, char *argv)
{
GstElement *pipeline, *source, *filter1, *convert,*filter2, *filter3, *vidconv, *filter4, *mux, *infer, *tiler, *osd, *transform , *sink, *bin, *convert2 , *vidconv2;
GMainLoop *loop;
GstCaps *caps1, *caps2, *caps3, *caps4;
GstPad *osd_sink_pad =NULL, *srcpad, *sinkpad;

loop = g_main_loop_new(NULL,FALSE);
gst_init(&argc, &argv);
pipeline = gst_pipeline_new(“nv_pipeline”);
gchar *string1 = “video/x-raw(memory:NVMM),format=(string)NV12”;

source = gst_element_factory_make(“v4l2src”, “source”);
filter1 = gst_element_factory_make(“capsfilter”, “filter1”);
convert = gst_element_factory_make(“nvvidconv”, “convert”);
filter2 = gst_element_factory_make(“capsfilter”, “filter2”);
filter3 = gst_element_factory_make(“capsfilter”, “filter3”);
filter4 = gst_element_factory_make(“capsfilter”, “filter4”);
vidconv = gst_element_factory_make(“nvvideoconvert”, “vidconv”);
mux = gst_element_factory_make(“nvstreammux”, “mux”);
infer = gst_element_factory_make(“nvinfer”, “infer”);
tiler = gst_element_factory_make(“nvmultistreamtiler”, “tiler”);
osd = gst_element_factory_make(“nvosd”, “osd”);
transform = gst_element_factory_make(“nvegltransform”, “transform”);
sink = gst_element_factory_make(“nveglglessink”, “sink”);
convert2 = gst_element_factory_make(“nvvidconv”, “convert2”);
vidconv2 = gst_element_factory_make(“nvvideoconvert”, “vidconv2”);

gst_bin_add_many(GST_BIN(pipeline), source,filter1,convert,filter2, convert2,filter3,vidconv, filter4,mux,infer, tiler,vidconv2, osd,transform,sink,NULL);

gst_element_link_many(source,filter1,convert,filter2, convert2,filter3, vidconv, filter4,mux,infer, tiler,vidconv2, osd,transform,sink,NULL);

osd_sink_pad = gst_element_get_static_pad(osd, “sink”);
gst_pad_add_probe(osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER, osd_sink_pad_buffer_probe, NULL, NULL);

caps1 = gst_caps_new_simple(“video/x-raw”, “format”,G_TYPE_STRING,“YUY2”,NULL);
caps2 = gst_caps_from_string(string1);
caps3 = gst_caps_new_simple(“video/x-raw”, “format”, G_TYPE_STRING,“NV12”, NULL);
caps4 = gst_caps_from_string(string1);

g_object_set(G_OBJECT(filter1), “caps”, caps1, NULL);
g_object_set(G_OBJECT(filter2), “caps”, caps2, NULL);
g_object_set(G_OBJECT(filter3), “caps”, caps3, NULL);
g_object_set(G_OBJECT(filter4), “caps”, caps4, NULL);

g_object_set(G_OBJECT(mux), “live-source”, 1, “name”, “mux”, “batch-size”, 1, “width”, 1280, “height”, 720, NULL);
g_object_set(G_OBJECT(infer), “config-file-path”,“/opt/nvidia/deepstream/deepstream-4.0/sources/apps/sample_apps/deepstream-test1/dstest1_pgie_config.txt”,NULL);
g_object_set(G_OBJECT(infer), “batch-size”, 1, NULL);
g_object_set(G_OBJECT(tiler), “rows”, 1, “columns”, 1, “width”, 1280, “height”, 720, NULL);

gst_caps_unref(caps1);
gst_caps_unref(caps2);
gst_caps_unref(caps3);
gst_caps_unref(caps4);

gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print(“Running …\n”);
g_main_loop_run(loop);

gst_element_set_state(pipeline,GST_STATE_NULL);
gst_object_unref(pipeline);
return 0;
}

I am not sure where I should apply pads, and bins. Please help. Thanks!