I use test 3
The code is as below:
#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include <math.h>
#include <string.h>
#include <sys/time.h>
#include <gst/rtsp-server/rtsp-server.h>
#include “gstnvdsmeta.h”
#ifndef PLATFORM_TEGRA
#include “gst-nvmessage.h”
#endif
#define MAX_DISPLAY_LEN 64
#define PGIE_CLASS_ID_VEHICLE 0
#define PGIE_CLASS_ID_PERSON 2
#include<sys/types.h>
#include<sys/socket.h>
#include<netinet/in.h>
#include<arpa/inet.h>
#include<errno.h>
#include<unistd.h>
#include<stdio.h>
#include<string.h>
#include<stdlib.h>
#include<event.h>
#include<event2/util.h>
#define SOURCE_NUM 1
/* The muxer output resolution must be set if the input streams will be of
- different resolution. The muxer will scale all the input frames to this
- resolution. */
#define MUXER_OUTPUT_WIDTH 1920
#define MUXER_OUTPUT_HEIGHT 1080
/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
- based on the fastest source’s framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 4000000
#define TILED_OUTPUT_WIDTH 1920
#define TILED_OUTPUT_HEIGHT 1080
/* NVIDIA Decoder source pad memory feature. This feature signifies that source
- pads having this capability will push GstBuffers containing cuda buffers. */
#define GST_CAPS_FEATURES_NVMM “memory:NVMM”
gchar pgie_classes_str[4][32] = { “Vehicle”, “TwoWheeler”, “Person”,
“RoadSign”
};
gchar rtsp_source[1][80] = {“rtsp://admin:admin@192.168.1.90:554/videoinput_1:0/h264_1/onvif.stm”};
#define FPS_PRINT_INTERVAL 300
//static struct timeval start_time = { };
//static guint probe_counter = 0;
/* tiler_sink_pad_buffer_probe will extract metadata received on OSD sink pad
- and update params for drawing rectangle, object information etc. */
int create_pipeline();
int call_cpp_OpenCamera(int index);
static
gboolean checkVideoIsOnline(gpointer arg){
g_printerr(“\nperiod_1\n”);
if(call_cpp_OpenCamera(0) == 1){
g_printerr(“STREM ON\n”);
create_pipeline();
g_printerr(“STREM AGAIN\n”);
return FALSE;
}
else{
g_printerr(“STREM OFF\n”);
return TRUE;
}
}
static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print (“End of stream\n”);
g_main_loop_quit (loop);
break;
case GST_MESSAGE_WARNING:
{
gchar *debug;
GError *error;
gst_message_parse_warning (msg, &error, &debug);
g_printerr (“WARNING from element %s: %s\n”,
GST_OBJECT_NAME (msg->src), error->message);
g_free (debug);
g_printerr (“Warning: %s\n”, error->message);
g_error_free (error);
break;
}
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_printerr (“ERROR from element %s: %s\n”,
GST_OBJECT_NAME (msg->src), error->message);
if (debug)
g_printerr (“Error details: %s\n”, debug);
g_free (debug);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
#ifndef PLATFORM_TEGRA
case GST_MESSAGE_ELEMENT:
{
if (gst_nvmessage_is_stream_eos (msg)) {
guint stream_id;
if (gst_nvmessage_parse_stream_eos (msg, &stream_id)) {
g_print (“Got EOS from stream %d\n”, stream_id);
}
}
break;
}
#endif
default:
break;
}
return TRUE;
}
static void
cb_newpad (GstElement * decodebin, GstPad * decoder_src_pad, gpointer data)
{
g_print (“In cb_newpad\n”);
GstCaps *caps = gst_pad_get_current_caps (decoder_src_pad);
const GstStructure *str = gst_caps_get_structure (caps, 0);
const gchar *name = gst_structure_get_name (str);
GstElement *source_bin = (GstElement *) data;
GstCapsFeatures *features = gst_caps_get_features (caps, 0);
/* Need to check if the pad created by the decodebin is for video and not
- audio. /
if (!strncmp (name, “video”, 5)) {
/ Link the decodebin pad only if decodebin has picked nvidia
- decoder plugin nvdec_*. We do this by checking if the pad caps contain
- NVMM memory features. /
if (gst_caps_features_contains (features, GST_CAPS_FEATURES_NVMM)) {
/ Get the source bin ghost pad */
GstPad *bin_ghost_pad = gst_element_get_static_pad (source_bin, “src”);
if (!gst_ghost_pad_set_target (GST_GHOST_PAD (bin_ghost_pad),
decoder_src_pad)) {
g_printerr (“Failed to link decoder src pad to source bin ghost pad\n”);
}
gst_object_unref (bin_ghost_pad);
} else {
g_printerr (“Error: Decodebin did not pick nvidia decoder plugin.\n”);
}
}
}
static void
decodebin_child_added (GstChildProxy * child_proxy, GObject * object,
gchar * name, gpointer user_data)
{
g_print (“Decodebin child added: %s\n”, name);
if (g_strrstr (name, “decodebin”) == name) {
g_signal_connect (G_OBJECT (object), “child-added”,
G_CALLBACK (decodebin_child_added), user_data);
}
if (g_strstr_len (name, -1, “nvv4l2decoder”) == name) {
g_print (“Seting bufapi_version\n”);
g_object_set (object, “bufapi-version”, TRUE, NULL);
}
}
static GstElement *
create_source_bin (guint index, gchar * uri)
{
GstElement *bin = NULL, *uri_decode_bin = NULL;
gchar bin_name[16] = { };
g_snprintf (bin_name, 15, “source-bin-%02d”, index);
/* Create a source GstBin to abstract this bin’s content from the rest of the
- pipeline */
bin = gst_bin_new (bin_name);
/* Source element for reading from the uri.
- We will use decodebin and let it figure out the container format of the
- stream and the codec and plug the appropriate demux and decode plugins. */
uri_decode_bin = gst_element_factory_make (“uridecodebin”, “uri-decode-bin”);
if (!bin || !uri_decode_bin) {
g_printerr (“One element in source bin could not be created.\n”);
return NULL;
}
/* We set the input uri to the source element */
g_object_set (G_OBJECT (uri_decode_bin), “uri”, uri, “async-handling”, TRUE, NULL);
/* Connect to the “pad-added” signal of the decodebin which generates a
- callback once a new pad for raw data has beed created by the decodebin */
g_signal_connect (G_OBJECT (uri_decode_bin), “pad-added”,
G_CALLBACK (cb_newpad), bin);
g_signal_connect (G_OBJECT (uri_decode_bin), “child-added”,
G_CALLBACK (decodebin_child_added), bin);
gst_bin_add (GST_BIN (bin), uri_decode_bin);
/* We need to create a ghost pad for the source bin which will act as a proxy
- for the video decoder src pad. The ghost pad will not have a target right
- now. Once the decode bin creates the video decoder and generates the
- cb_newpad callback, we will set the ghost pad target to the video decoder
- src pad. */
if (!gst_element_add_pad (bin, gst_ghost_pad_new_no_target (“src”,
GST_PAD_SRC))) {
g_printerr (“Failed to add ghost pad in source bin\n”);
return NULL;
}
return bin;
}
static gboolean
start_rtsp_streaming (guint rtsp_port_num, guint updsink_port_num)
{
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
char udpsrc_pipeline[512];
char port_num_Str[64] = { 0 };
char *encoder_name;
encoder_name = “H264”;
sprintf (udpsrc_pipeline,
"( udpsrc name=pay0 port=%d caps="application/x-rtp, media=video, "
“clock-rate=90000, encoding-name=%s, payload=96 " )”,
updsink_port_num, encoder_name);
sprintf (port_num_Str, “%d”, rtsp_port_num);
server = gst_rtsp_server_new ();
g_object_set (server, “service”, port_num_Str, NULL);
mounts = gst_rtsp_server_get_mount_points (server);
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_shared (factory, TRUE);
gst_rtsp_media_factory_set_launch (factory, udpsrc_pipeline);
gst_rtsp_mount_points_add_factory (mounts, “/ds-test”, factory);
g_object_unref (mounts);
gst_rtsp_server_attach (server, NULL);
g_print
(“\n *** DeepStream: Launched RTSP Streaming at rtsp://localhost:%d/ds-test ***\n\n”,
rtsp_port_num);
return TRUE;
}
int create_pipeline()
{
GstElement *pipeline = NULL;
GMainLoop *loop = NULL;
GstBus *bus = NULL;
guint bus_watch_id;
GstElement *streammux = NULL, *sink = NULL, *pgie = NULL,
*nvvidconv = NULL, *nvosd = NULL, *encoder = NULL,
*rtppay = NULL, *transform = NULL, *cap_filter = NULL;
GstCaps *caps = NULL;
guint i;
loop = g_main_loop_new (NULL, FALSE);
pipeline = gst_pipeline_new (“dstest3-pipeline”);
streammux = gst_element_factory_make (“nvstreammux”, “stream-muxer”);
gst_bin_add (GST_BIN (pipeline), streammux);
for (i = 0; i < SOURCE_NUM; i++) {
GstPad *sinkpad, *srcpad;
gchar pad_name[16] = { };
GstElement *source_bin = create_source_bin (i, rtsp_source[i]);
if (!source_bin) {
g_printerr ("Failed to create source bin. Exiting.\n");
return -1;
}
gst_bin_add (GST_BIN (pipeline), source_bin);
g_snprintf (pad_name, 15, "sink_%u", i);
sinkpad = gst_element_get_request_pad (streammux, pad_name);
if (!sinkpad) {
g_printerr ("Streammux request sink pad failed. Exiting.\n");
return -1;
}
srcpad = gst_element_get_static_pad (source_bin, "src");
if (!srcpad) {
g_printerr ("Failed to get src pad of source bin. Exiting.\n");
return -1;
}
if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
g_printerr ("Failed to link source bin to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref (srcpad);
gst_object_unref (sinkpad);
}
pgie = gst_element_factory_make (“nvinfer”, “primary-nvinference-engine”);
nvvidconv = gst_element_factory_make (“nvvideoconvert”, “nvvideo-converter”);
nvosd = gst_element_factory_make (“nvdsosd”, “nv-onscreendisplay”);
transform = gst_element_factory_make (“nvvideoconvert”, “transform”);
cap_filter = gst_element_factory_make (“capsfilter”, “filter”);
encoder = gst_element_factory_make (“nvv4l2h264enc”, “h264-encoder”);
rtppay = gst_element_factory_make (“rtph264pay”, “rtppay-h264”);
sink = gst_element_factory_make (“udpsink”, “udpsink”);
caps = gst_caps_from_string (“video/x-raw(memory:NVMM), format=I420”);
g_object_set (G_OBJECT (cap_filter), “caps”, caps, NULL);
g_object_set (G_OBJECT (encoder), “bitrate”, 4000000, NULL);
g_object_set (G_OBJECT (encoder), “profile”, 4, NULL);
g_object_set (G_OBJECT (sink), “host”, “224.224.255.255”, “port”,
5403, “async”, TRUE, “sync”, 0, NULL);
g_object_set (G_OBJECT (streammux), “width”, MUXER_OUTPUT_WIDTH, “height”,
MUXER_OUTPUT_HEIGHT, “batch-size”, SOURCE_NUM,
“batched-push-timeout”, MUXER_BATCH_TIMEOUT_USEC, NULL);
g_object_set (G_OBJECT (pgie),
“config-file-path”, “dstest3_pgie_config.txt”, NULL);
g_object_set (G_OBJECT (pgie), “batch-size”, 1, NULL);
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
//
gst_bin_add_many (GST_BIN (pipeline), pgie, nvvidconv, nvosd,
transform, cap_filter, encoder, rtppay, sink, NULL);
if (!gst_element_link_many (streammux, pgie, nvvidconv, nvosd,
transform, cap_filter, encoder, rtppay, sink, NULL)) {
g_printerr (“Elements could not be linked. Exiting.\n”);
return -1;
}
g_print (“\n0000\n”);
gboolean ret = TRUE;
ret = start_rtsp_streaming (8557, 5403);
if (ret != TRUE) {
g_print (“%s: start_rtsp_straming function failed\n”, func);
}
g_print (“Now playing:”);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print (“Running…\n”);
GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (pipeline),
GST_DEBUG_GRAPH_SHOW_ALL, “ds-app-playing”);
g_main_loop_run (loop);
g_print (“Returned, stopping playback\n”);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print (“Deleting pipeline\n”);
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
gst_deinit ();
}
typedef struct sockaddr SA;
int tcp_connect_server(const char* server_ip, int port)
{
int sockfd, status, save_errno;
struct sockaddr_in server_addr;
memset(&server_addr, 0, sizeof(server_addr) );
server_addr.sin_family = AF_INET;
server_addr.sin_port = htons(port);
status = inet_aton(server_ip, &server_addr.sin_addr);
if( status == 0 ) //the server_ip is not valid value
{
errno = EINVAL;
return -1;
}
sockfd = socket(PF_INET, SOCK_STREAM, 0);
if( sockfd == -1 )
return sockfd;
status = connect(sockfd, (SA*)&server_addr, sizeof(server_addr) );
if( status == -1 )
{
save_errno = errno;
close(sockfd);
errno = save_errno; //the close may be error
return -1;
}
return sockfd;
}
int
main (int argc, char *argv)
{
gst_init (&argc, &argv);
if(call_cpp_OpenCamera(0) == 1){
create_pipeline();
}
while(1)
{
g_print(“\n while \n”);
sleep(5);
if(tcp_connect_server(“192.168.1.90”,554) != -1){
g_print(“\n PIPE AGAIN \n”);
gst_init (&argc, &argv);
create_pipeline();
}
else{
g_print(“\n NO STREAM \n”);
}
}
return 0;
}