/* * sudo rm -rf ~/.cache/gstreamer-1.0 * sudo rm ${HOME}/.cache/gstreamer-1.0/registry.aarch64.bin * Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "deepstream_app.h" //#include "deepstream_config_file_parser.h" #include "nvds_version.h" #include #include #include #include "gstnvdsmeta.h" #include "nvdsmeta_schema.h" #include "deepstream_test5_app.h" #define MAX_DISPLAY_LEN (64) #define INFERENCE "inference.txt" #define LABELSFILE "labels.txt" #define ANALYTICS "analytics.txt" #define MESSAGE "messages.txt" #define KAFKA "kafka.txt" #define LIB_TRACKER "/opt/nvidia/deepstream/deepstream-5.0/lib/libnvds_mot_klt.so" #define PROTO_LIB "/opt/nvidia/deepstream/deepstream-5.0/lib/libnvds_kafka_proto.so" #define MSG2P_LIB "/opt/nvidia/deepstream/deepstream-5.0/sources/libs/nvmsgconv/libnvds_msgconv.so" #define CONN_STR "192.168.0.21;9092;quickstart-events" #define TOPIC "quickstart-events" #define FONT_CLOCK "Arial" #define CLOCK_COLOR 0xff0000ff #define DEFAULT_X_WINDOW_WIDTH 1280 #define DEFAULT_X_WINDOW_HEIGHT 720 #define TRACKER_WIDTH 480 #define TRACKER_HEIGHT 288 /* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set * based on the fastest source's framerate. */ #define MUXER_BATCH_TIMEOUT_USEC 40000 #define PGIE_CLASS_ID_PERSON 2 GST_DEBUG_CATEGORY (NVDS_APP); static gboolean display_off = FALSE; static gint schema_type = 1; /* generate_event_msg_meta (gpointer data, gint class_id, gboolean useTs, GstClockTime ts, MyUserMeta * obj_params, float scaleW, float scaleH, NvDsFrameMeta * frame_meta) */ static void generate_event_msg_meta (gpointer data, gint class_id, MyUserMeta * obj_params) { NvDsEventMsgMeta *meta = (NvDsEventMsgMeta *) data; meta->occupancy = obj_params->lccum_cnt; meta->lccum_cnt_entry = obj_params->lcc_cnt_entry; meta->lccum_cnt_exit = obj_params->lcc_cnt_exit ; } /** * Callback function to be called once all inferences (Primary + Secondary) * are done. This is opportunity to modify content of the metadata. * e.g. Here Person is being replaced with Man/Woman and corresponding counts * are being maintained. It should be modified according to network classes * or can be removed altogether if not required. */ void my_custom_parse_nvdsanalytics_meta_data (NvDsMetaList *l_user, MyUserMeta *data); static GstPadProbeReturn bbox_generated_probe_after_analytics (GstPad * pad, GstPadProbeInfo * info, gpointer u_data) { NvDsObjectMeta *obj_meta =NULL; GstBuffer *buf = (GstBuffer *) info->data; NvDsFrameMeta *frame_meta = NULL; GstClockTime buffer_pts = 0; guint32 stream_id = 0; float scaleW = 0; float scaleH = 0; GList *l; NvDsMetaList *l_myuser; // g_print("1"); NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta (buf); if (!batch_meta) { // No batch meta attached. // g_print("2"); return GST_PAD_PROBE_OK; } for (NvDsMetaList *l_frame = batch_meta->frame_meta_list; l_frame; l_frame = l_frame->next) { // g_print("3"); frame_meta = (NvDsFrameMeta *) l_frame->data; if (frame_meta == NULL) { // Ignore Null frame meta. // g_print("4"); continue; } NvDsFrameMeta *frame_meta = l_frame->data; stream_id = frame_meta->source_id; l_myuser = frame_meta->frame_user_meta_list; int frameCount=0; /* add the analytics data to l_myuser */ MyUserMeta *user_data = (MyUserMeta *) g_malloc0(sizeof(MyUserMeta)); if (l_myuser != NULL) { // g_print ("5"); my_custom_parse_nvdsanalytics_meta_data(l_myuser, user_data); } // g_print("6"); l_myuser = l_myuser->next; for (l = frame_meta->obj_meta_list; l; l = l->next) { obj_meta = (NvDsObjectMeta *) l->data; // g_print("7 "); if (obj_meta == NULL) { // Ignore Null object. // g_print("8"); continue; } /* Frequency of messages to be send will be based on use case. * Here message is being sent for first object every 30 frames. */ NvDsEventMsgMeta *msg_meta = (NvDsEventMsgMeta *) g_malloc0 (sizeof (NvDsEventMsgMeta)); generate_event_msg_meta (msg_meta, 2, user_data); NvDsUserMeta *user_event_meta = nvds_acquire_user_meta_from_pool (batch_meta); if (user_event_meta) { user_event_meta->user_meta_data = (void *) msg_meta; user_event_meta->base_meta.batch_meta = batch_meta; user_event_meta->base_meta.meta_type = NVDS_EVENT_MSG_META; nvds_add_user_meta_to_frame(frame_meta, user_event_meta); // g_print("9"); } else { g_print ("Error in attaching event meta to buffer\n"); } } g_free(user_data); } return GST_PAD_PROBE_OK; } /****************************************************************/ /* generate_event_msg_meta (msg_meta, 2, TRUE, /**< useTs NOTE: Pass FALSE for files without base-timestamp in URI */ /*buffer_pts, user_data, scaleW, scaleH, frame_meta); */ static gboolean bus_call (GstBus * bus, GstMessage * msg, gpointer data) { GMainLoop *loop = (GMainLoop *) data; switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_EOS: g_print ("End of stream\n"); g_main_loop_quit (loop); break; case GST_MESSAGE_ERROR:{ gchar *debug; GError *error; gst_message_parse_error (msg, &error, &debug); g_printerr ("ERROR from element %s: %s\n", GST_OBJECT_NAME (msg->src), error->message); if (debug) g_printerr ("Error details: %s\n", debug); g_free (debug); g_error_free (error); g_main_loop_quit (loop); break; } default: break; } return TRUE; } /***************************************************************/ int main (int argc, char *argv[]) { // Initialisation // GMainLoop *loop = NULL; GstElement *pipeline = NULL, *source = NULL, *h264parser = NULL,*decoder = NULL, *nvstreammux = NULL, *sink = NULL, *pgie = NULL, *nvvidconv = NULL, *nvosd = NULL; GstElement *msgconv = NULL, *msgbroker = NULL, *analytics = NULL, *tracker = NULL, *tee = NULL; GstElement *queue1 = NULL, *queue2 = NULL; GstPad *osd_sink_pad = NULL; GstPad *src_pad = NULL; GstPad *tee_msg_pad = NULL; GstPad *tee_render_pad = NULL; GstPad *sink_pad = NULL; int i; GstBus *bus = NULL; guint bus_watch_id; #ifdef PLATFORM_TEGRA GstElement *transform = NULL; #endif /* Check input arguments */ if (argc != 2) { g_printerr ("Usage: %s \n", argv[0]); return -1; } /* Standard GStreamer initialization */ gst_init (&argc, &argv); loop = g_main_loop_new (NULL, FALSE); /* Create gstreamer elements */ /* Create Pipeline element that will form a connection of other elements */ pipeline = gst_pipeline_new ("dstest1-pipeline"); /* Source element for reading from the file */ source = gst_element_factory_make ("filesrc", "file-source"); /* Since the data format in the input file is elementary h264 stream, * we need a h264parser */ h264parser = gst_element_factory_make ("h264parse", "h264-parser"); /* Use nvdec_h264 for hardware accelerated decode on GPU */ decoder = gst_element_factory_make ("nvv4l2decoder", "nvv4l2-decoder"); /* Create nvstreammux instance to form batches from one or more sources. */ nvstreammux = gst_element_factory_make ("nvstreammux", "stream-muxer"); if (!pipeline || !nvstreammux) { g_printerr ("One element could not be created. Exiting.\n"); return -1; } /* Use nvinfer to run inferencing on decoder's output, * behaviour of inferencing is set through config file */ pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine"); /* Use convertor to convert from NV12 to RGBA as required by nvosd */ nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter"); /* analytics */ analytics = gst_element_factory_make ("nvdsanalytics", "analytics"); /* tracker */ tracker = gst_element_factory_make ("nvtracker", "tracker"); /* Create OSD to draw on the converted RGBA buffer */ nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay"); /* Create msg converter to generate payload from buffer metadata */ msgconv = gst_element_factory_make ("nvmsgconv", "nvmsg-converter"); /* Create msg broker to send payload to server */ msgbroker = gst_element_factory_make ("nvmsgbroker", "nvmsg-broker"); /* Create tee to render buffer and send message simultaneously*/ tee = gst_element_factory_make ("tee", "nvsink-tee"); /* Create queues */ queue1 = gst_element_factory_make ("queue", "nvtee-que1"); queue2 = gst_element_factory_make ("queue", "nvtee-que2"); /****************************************************************************************************************/ /* Finally render the osd output */ #ifdef PLATFORM_TEGRA transform = gst_element_factory_make ("nvegltransform", "nvegl-transform"); #endif sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer"); if (!source || !h264parser || !decoder || !pgie || !nvvidconv || !analytics || !tracker || !nvosd || !msgconv || !msgbroker || !tee || !queue1 || !queue2 || !sink ) { g_printerr ("One element could not be created. Exiting.\n"); return -1; } #ifdef PLATFORM_TEGRA if(!transform) { g_printerr ("One tegra element could not be created. Exiting.\n"); return -1; } #endif /* we set the input filename to the source element */ g_object_set (G_OBJECT (source), "location", argv[1], NULL); g_object_set (G_OBJECT (nvstreammux), "batch-size", 1, NULL); g_object_set (G_OBJECT (nvstreammux), "width", DEFAULT_X_WINDOW_WIDTH, "height", DEFAULT_X_WINDOW_HEIGHT, "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL); /* Set all the necessary properties of the nvinfer element, * the necessary ones are : */ g_object_set (G_OBJECT (pgie), "config-file-path", INFERENCE, NULL); g_object_set (G_OBJECT (pgie), "batch-size", 4, NULL); g_object_set (G_OBJECT(analytics), "config-file", ANALYTICS, NULL); g_object_set (G_OBJECT(tracker), "tracker-width", TRACKER_WIDTH, "tracker-height", TRACKER_HEIGHT, "ll-lib-file",LIB_TRACKER, NULL); g_object_set (G_OBJECT (sink),"sync",TRUE, NULL); g_object_set (G_OBJECT(msgconv), "config", MESSAGE,"payload-type",schema_type,"comp-id",2,"msg2p_lib",MSG2P_LIB, NULL); /* msgbroker parameters (con_str, proto-lib to do */ g_object_set (G_OBJECT(msgbroker),"topic",TOPIC,"proto-lib", PROTO_LIB,"conn-str", CONN_STR,"config",KAFKA, NULL); /* osd parameters */ g_object_set (G_OBJECT(nvosd), "clock-font",FONT_CLOCK,"display-clock",0,"x-clock-offset",800, "y-clock-offset",820,"clock-color",CLOCK_COLOR,"clock-font-size",15,NULL); /* we add a message handler */ //The bus allows applications to receive GstMessage packets bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); bus_watch_id = gst_bus_add_watch (bus, bus_call, loop); gst_object_unref (bus); /* Set up the pipeline */ /* we add all elements into the pipeline */ #ifdef PLATFORM_TEGRA gst_bin_add_many (GST_BIN (pipeline), source, h264parser, decoder, nvstreammux, pgie, nvvidconv, transform,analytics,tracker, nvosd, tee, queue1, queue2, msgconv, msgbroker, sink, NULL); #else gst_bin_add_many (GST_BIN (pipeline), source, h264parser, decoder, nvstreammux, pgie, nvvidconv, analytics,tracker, nvosd, tee, queue1, queue2, msgconv, msgbroker, sink, NULL); #endif sink_pad = gst_element_get_request_pad (nvstreammux, "sink_0"); if (!sink_pad) { g_printerr ("Streammux request sink pad failed. Exiting.\n"); return -1; } src_pad = gst_element_get_static_pad (decoder, "src"); if (!src_pad) { g_printerr ("Decoder request src pad failed. Exiting.\n"); return -1; } if (gst_pad_link (src_pad, sink_pad) != GST_PAD_LINK_OK) { g_printerr ("Failed to link decoder to stream muxer. Exiting.\n"); return -1; } gst_object_unref (sink_pad); gst_object_unref (src_pad); /* we link the elements together */ /* file-source -> h264-parser -> nvh264-decoder -> nvstreammux -> * nvinfer -> nvvidconv -> nvosd -> tee -> video-renderer * | * |-> msgconv -> msgbroker */ if (!gst_element_link_many (source, h264parser, decoder, NULL)) { g_printerr ("Elements could not be linked. Exiting.\n"); return -1; } if (!gst_element_link_many (nvstreammux, pgie,tracker,analytics, nvvidconv, nvosd, tee, NULL)) { g_printerr ("Elements could not be linked2. Exiting.\n"); return -1; } if (!gst_element_link_many (queue1, msgconv, msgbroker, NULL)) { g_printerr ("Elements could not be linked3. Exiting.\n"); return -1; } #ifdef PLATFORM_TEGRA if (!display_off) { if (!gst_element_link_many (queue2, transform, sink, NULL)) { g_printerr ("Elements could not be linked4. Exiting.\n"); return -1; } } else { if (!gst_element_link (queue2, sink)) { g_printerr ("Elements could not be linked5. Exiting.\n"); return -1; } } #else if (!gst_element_link (queue2, sink)) { g_printerr ("Elements could not be linked6. Exiting.\n"); return -1; } #endif sink_pad = gst_element_get_static_pad (queue1, "sink"); tee_msg_pad = gst_element_get_request_pad (tee, "src_%u"); tee_render_pad = gst_element_get_request_pad (tee, "src_%u"); if (!tee_msg_pad || !tee_render_pad) { g_printerr ("Unable to get request pads\n"); return -1; } if (gst_pad_link (tee_msg_pad, sink_pad) != GST_PAD_LINK_OK) { g_printerr ("Unable to link tee and message converter\n"); gst_object_unref (sink_pad); return -1; } gst_object_unref (sink_pad); sink_pad = gst_element_get_static_pad (queue2, "sink"); if (gst_pad_link (tee_render_pad, sink_pad) != GST_PAD_LINK_OK) { g_printerr ("Unable to link tee and render\n"); gst_object_unref (sink_pad); return -1; } gst_object_unref (sink_pad); /* Lets add probe to get informed of the meta data generated, we add probe to * the sink pad of the osd element, since by that time, the buffer would have * had got all the metadata. */ osd_sink_pad = gst_element_get_static_pad (nvosd, "sink"); if (!osd_sink_pad) g_print ("Unable to get sink pad\n"); else gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER, bbox_generated_probe_after_analytics, NULL, NULL); gst_object_unref (osd_sink_pad); /* Set the pipeline to "playing" state */ g_print ("Now playing: %s\n", argv[1]); gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Wait till pipeline encounters an error or EOS */ g_print ("Running...\n"); g_main_loop_run (loop); /* Out of the main loop, clean up nicely */ g_print ("Returned, stopping playback\n"); /* Release the request pads from the tee, and unref them */ gst_element_release_request_pad (tee, tee_msg_pad); gst_element_release_request_pad (tee, tee_render_pad); gst_object_unref (tee_msg_pad); gst_object_unref (tee_render_pad); gst_element_set_state (pipeline, GST_STATE_NULL); g_print ("Deleting pipeline\n"); gst_object_unref (GST_OBJECT (pipeline)); g_source_remove (bus_watch_id); g_main_loop_unref (loop); return 0; }