#include #include #include #include #define DEFAULT_RTSP_PORT "554" #define DEFAULT_WIDTH 1920 #define DEFAULT_HEIGHT 1080 static char *port = (char *) DEFAULT_RTSP_PORT; static int width = DEFAULT_WIDTH; static int height = DEFAULT_HEIGHT; static int system_quit = 0; static int live_view_connected = 0; static int main_video_connected = 0; static int second_video_connected = 0; static guint s_pipe_cfg_timer = 0; static GOptionEntry entries[] = { {"port", 'p', 0, G_OPTION_ARG_STRING, &port, "Port to listen on (default: " DEFAULT_RTSP_PORT ")", "PORT"}, {"width", 'w', 0, G_OPTION_ARG_INT, &width, "Width of camera reading", "WIDTH"}, {"height", 'h', 0, G_OPTION_ARG_INT, &height, "Height of camera reading", "HEIGHT"}, {NULL} }; static GstElement *s_argus = NULL; static GstPipeline *sink; static GstPipeline *liveview; static gboolean bus_callback(GstBus * bus, GstMessage * message, gpointer data) { switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_INFO:{ break; } case GST_MESSAGE_WARNING:{ break; } case GST_MESSAGE_ERROR:{ g_print ("ERROR message\n"); break; } case GST_MESSAGE_STATE_CHANGED:{ GstState oldstate, newstate; gst_message_parse_state_changed (message, &oldstate, &newstate, NULL); g_print("Change state from %d to %d\n", oldstate, newstate); switch (newstate) { case GST_STATE_PLAYING: g_print("State: playing...\n"); GST_DEBUG_BIN_TO_DOT_FILE(data, GST_DEBUG_GRAPH_SHOW_ALL, "playing"); break; case GST_STATE_PAUSED: g_print("State: paused...\n"); break; case GST_STATE_READY: g_print("State: Ready...\n"); break; case GST_STATE_NULL: g_print("State: NULL...\n"); break; default: break; } break; } case GST_MESSAGE_EOS:{ g_print ("Received EOS. Exiting ...\n"); break; } default: break; } return TRUE; } static void _intr_handler (int signum) { struct sigaction action; g_printerr("User Interrupted.. \n"); memset (&action, 0, sizeof (action)); action.sa_handler = SIG_DFL; sigaction (SIGINT, &action, NULL); system_quit = 1; } static void pipe_live_need_data_callback (GstElement* object, guint arg0, gpointer user_data) { // g_print("%s() element %p, arg0 %d\r\n", __FUNCTION__, object, arg0); } static void new_session_callback (GstRTSPClient * client, GstRTSPSession * session, gpointer user_data) { GstRTSPConnection* conn; GstRTSPUrl* url; gchar* uri; g_print("RTSP client new session %p available\n", session); guint default_session_timeout = gst_rtsp_session_get_timeout(session); g_print("RTSP default client's session timeout is %d seconds\n", default_session_timeout); g_print("Set RTSP client's session timeout to %d seconds\n", 2); gst_rtsp_session_set_timeout (session, 2); conn = gst_rtsp_client_get_connection(client); url = gst_rtsp_connection_get_url(conn); uri = gst_rtsp_url_get_request_uri(url); g_print("RTSP client %p request URI: %s\n", client, uri); // config = (NvDsSinkEncoderConfig*)user_data; // config->rtsp_client_connected_cb(config->rtsp_cb_pri, session, uri); g_free(uri); } static void session_removed_callback (GstRTSPSessionPool * pool, GstRTSPSession * session, gpointer user_data) { g_print("RTSP client session %p is removed\n", session); // config = (NvDsSinkEncoderConfig*)user_data; // config->rtsp_client_disconnected_cb(config->rtsp_cb_pri, session); } static void rtsp_client_closed_callback (GstRTSPClient * client, gpointer user_data) { g_print("RTSP client %p closed\n", client); g_signal_handlers_disconnect_by_func(client, G_CALLBACK (new_session_callback), user_data); g_signal_handlers_disconnect_by_func(client, G_CALLBACK (rtsp_client_closed_callback), user_data); } static GstRTSPFilterResult rtsp_client_filter_func (GstRTSPServer * server, GstRTSPClient * client, gpointer user_data) { GList* session_list = gst_rtsp_client_session_filter(client, NULL, NULL); if(!session_list){ return GST_RTSP_FILTER_REMOVE; } g_list_free_full(session_list, g_object_unref); return GST_RTSP_FILTER_KEEP; } static gboolean cleanup_session_pool_callback(GstRTSPSessionPool * pool, gpointer user_data) { g_print("cleanup_session_pool_callback\n"); gst_rtsp_session_pool_cleanup(pool); gst_rtsp_server_client_filter (user_data, rtsp_client_filter_func, NULL); return TRUE; } static void rtsp_client_connected_callback (GstRTSPServer * server, GstRTSPClient * client, gpointer user_data) { g_print("RTSP client %p connected\n", client); g_signal_connect (client, "new-session", G_CALLBACK (new_session_callback), user_data); g_signal_connect (client, "closed", G_CALLBACK (rtsp_client_closed_callback), user_data); } static void new_state_callback (GstRTSPMedia * self, gint object, gpointer user_data) { g_print("%s() media %p, object %d\r\n", __FUNCTION__, self, object); if (NULL == user_data) { if (object == 4) main_video_connected = 1; else if (object == 1) main_video_connected = 0; } else { if (object == 4) second_video_connected = 1; else if (object == 1) second_video_connected = 0; } } static void new_stream_callback (GstRTSPMedia * self, GstRTSPStream * object, gpointer user_data) { g_print("%s() media %p, object %p\r\n", __FUNCTION__, self, object); } static void removed_stream_callback (GstRTSPMedia * self, GstRTSPStream * object, gpointer user_data) { g_print("%s() media %p, object %p\r\n", __FUNCTION__, self, object); } static void target_state_callback (GstRTSPMedia * self, gint object, gpointer user_data) { g_print("%s() media %p, object %d\r\n", __FUNCTION__, self, object); } static void unprepared_callback (GstRTSPMedia * self, gpointer user_data) { g_print("%s() media %p\r\n", __FUNCTION__, self); } static void on_configure_cb (GstRTSPMediaFactory * self, GstRTSPMedia * object, gpointer user_data) { g_print("%s() factory %p, media %p\r\n", __FUNCTION__, self, object); } static void on_constructed_cb (GstRTSPMediaFactory * self, GstRTSPMedia * object, gpointer user_data) { GstElement *pipesrc; GstElement *pipeline; g_print("%s() factory %p, media %p\r\n", __FUNCTION__, self, object); pipeline = gst_rtsp_media_get_element(object); pipesrc = gst_bin_get_by_name(pipeline, "intersrc1"); if (!pipesrc) pipesrc = gst_bin_get_by_name(pipeline, "intersrc2"); if (pipesrc) { g_signal_connect (pipesrc, "need-data", G_CALLBACK (pipe_live_need_data_callback), NULL); } // Set callback for media object g_signal_connect (object, "unprepared", G_CALLBACK (unprepared_callback), NULL); g_signal_connect (object, "target-state", G_CALLBACK (target_state_callback), NULL); g_signal_connect (object, "removed-stream", G_CALLBACK (removed_stream_callback), NULL); g_signal_connect (object, "new-stream", G_CALLBACK (new_stream_callback), NULL); g_signal_connect (object, "new-state", G_CALLBACK (new_state_callback), NULL); } int main (int argc, char *argv[]) { struct sigaction action; GMainLoop *loop; GstRTSPServer *server; GstRTSPMountPoints *mounts; GstRTSPMediaFactory *factory; GstRTSPMediaFactory *factory2; GOptionContext *optctx; GError *error = NULL; char input_str[512] = {0}; GstState state; int ret; GstBus *bus; GstRTSPSessionPool *pool; GSource* g_source; GstElement *shmsink; gchar *out_uname = "User1"; gchar *out_passwd = "TestUser1"; optctx = g_option_context_new ("Test camera input and rtsp stream"); g_option_context_add_main_entries (optctx, entries, NULL); g_option_context_add_group (optctx, gst_init_get_option_group ()); if (!g_option_context_parse (optctx, &argc, &argv, &error)) { g_printerr ("Error parsing options: %s\n", error->message); g_option_context_free (optctx); g_clear_error (&error); return -1; } g_option_context_free (optctx); loop = g_main_loop_new (NULL, FALSE); g_clear_error (&error); if (width < 3840) { sink = GST_PIPELINE (gst_parse_launch ("nvarguscamerasrc name=argussrc sensor-mode=0 exposuretimerange=\"27000 33333000\" gainrange=\"1 16\" ispdigitalgainrange=\"1 256\" tnr-mode=0 ee-mode=0 aeantibanding=0 silent=0 ! video/x-raw(memory:NVMM),width=1932,height=1090,format=NV12,framerate=30/1 " " ! nvvidconv left=6 right=1926 top=6 bottom=1086 ! video/x-raw(memory:NVMM),width=1920,height=1080,format=NV12 " " ! queue leaky=2 max-size-buffers=3 " " ! interpipesink name=camsrc max-lateness=250000000 drop=true async=true sync=true forward-eos=true forward-events=true", &error)); } else { sink = GST_PIPELINE (gst_parse_launch ("nvarguscamerasrc name=argussrc sensor-mode=3 exposuretimerange=\"27000 33333000\" gainrange=\"1 16\" ispdigitalgainrange=\"1 256\" tnr-mode=0 ee-mode=0 aeantibanding=0 silent=0 ! video/x-raw(memory:NVMM),width=3864,height=2180,format=NV12,framerate=30/1 " " ! nvvidconv left=12 right=3852 top=10 bottom=2170 ! video/x-raw(memory:NVMM),width=3840,height=2160,format=NV12 " " ! queue leaky=2 max-size-buffers=3 " " ! interpipesink name=camsrc max-lateness=250000000 drop=true async=true sync=true forward-eos=true forward-events=true", &error)); } if (error) { printf("ERR create sink pipeline failed: %s\n", error->message); } g_clear_error (&error); liveview = GST_PIPELINE(gst_parse_launch ("interpipesrc name=intersrc0 is-live=true emit-signals=true do-timestamp=true format=3 max-bytes=54525952 listen-to=camsrc" " ! capsfilter caps=video/x-raw(memory:NVMM),format=NV12 " " ! nvvidconv ! video/x-raw(memory:NVMM),format=I420,width=1280,height=720 " " ! videorate ! video/x-raw(memory:NVMM),framerate=5/1 " " ! nvv4l2vp8enc bitrate=1000000 ! rtpvp8pay ! fakesink ", &error)); if (error) { printf("ERROR create liveview pipeline: %s\n", error->message); } shmsink = gst_bin_get_by_name(liveview, "intersrc0"); g_signal_connect (shmsink, "need-data", G_CALLBACK (pipe_live_need_data_callback), NULL); if (GST_STATE_CHANGE_FAILURE == gst_element_set_state (GST_ELEMENT (sink), GST_STATE_PLAYING)) { printf("ERROR: change sink state to PLAYING...\n"); } if (GST_STATE_CHANGE_FAILURE == gst_element_get_state (GST_ELEMENT (sink), &state, NULL, GST_CLOCK_TIME_NONE)) { printf("ERROR: wait for sink state failed\n"); } else { printf("Sink state %d\n", state); } // sink_start(); if (GST_STATE_CHANGE_FAILURE == gst_element_set_state (GST_ELEMENT (liveview), GST_STATE_PLAYING)) { printf("ERROR: change liveview state to PLAYING...\n"); } if (GST_STATE_CHANGE_FAILURE == gst_element_get_state (GST_ELEMENT (liveview), &state, NULL, GST_CLOCK_TIME_NONE)) { printf("ERROR: wait for liveview state failed\n"); } else { printf("liveview state %d\n", state); } s_argus = gst_bin_get_by_name(sink, "argussrc"); /* create a server instance */ server = gst_rtsp_server_new (); g_object_set (server, "service", port, NULL); g_signal_connect (server, "client-connected", G_CALLBACK (rtsp_client_connected_callback), NULL); /* get the mount points for this server, every server has a default object * that be used to map uri mount points to media factories */ mounts = gst_rtsp_server_get_mount_points (server); /* make a media factory for a test stream. The default media factory can use * gst-launch syntax to create pipelines. * any launch line works as long as it contains elements named pay%d. Each * element with pay%d names will be a stream */ factory = gst_rtsp_media_factory_new (); if (width < 3840) { gst_rtsp_media_factory_set_launch (factory, "interpipesrc name=intersrc1 listen-to=camsrc is-live=true allow-renegotiation=true enable-sync=true do-timestamp=true format=3 max-bytes=54525952 ! queue " " ! capsfilter caps=video/x-raw(memory:NVMM),format=NV12,framerate=30/1 " " ! nvv4l2h264enc bitrate=16000000 insert-sps-pps=true profile=4 ! rtph264pay name=pay0"); } else { gst_rtsp_media_factory_set_launch (factory, "interpipesrc name=intersrc1 listen-to=camsrc is-live=true allow-renegotiation=true enable-sync=true do-timestamp=true format=3 max-bytes=54525952 ! queue " " ! capsfilter caps=video/x-raw(memory:NVMM),format=NV12,framerate=30/1 " " ! nvv4l2h264enc bitrate=40000000 insert-sps-pps=true profile=4 ! rtph264pay name=pay0"); } gst_rtsp_media_factory_set_shared (factory, TRUE); gst_rtsp_mount_points_add_factory (mounts, "/video1", factory); g_signal_connect(factory, "media-configure", G_CALLBACK(on_configure_cb), factory); g_signal_connect(factory, "media-constructed", G_CALLBACK(on_constructed_cb), factory); /* don't need the ref to the mapper anymore */ g_object_unref (mounts); // Allow 'pellet' use to access this resource gst_rtsp_media_factory_add_role (factory, out_uname, GST_RTSP_PERM_MEDIA_FACTORY_ACCESS, G_TYPE_BOOLEAN, TRUE, GST_RTSP_PERM_MEDIA_FACTORY_CONSTRUCT, G_TYPE_BOOLEAN, TRUE, NULL); gst_rtsp_media_factory_add_role (factory, "anonymous", GST_RTSP_PERM_MEDIA_FACTORY_ACCESS, G_TYPE_BOOLEAN, TRUE, GST_RTSP_PERM_MEDIA_FACTORY_CONSTRUCT, G_TYPE_BOOLEAN, FALSE, NULL); GstRTSPAuth *auth = gst_rtsp_auth_new(); GstRTSPToken *token = gst_rtsp_token_new (GST_RTSP_TOKEN_MEDIA_FACTORY_ROLE, G_TYPE_STRING, "anonymous", NULL); gst_rtsp_auth_set_default_token (auth, token); gst_rtsp_token_unref (token); token = gst_rtsp_token_new (GST_RTSP_TOKEN_MEDIA_FACTORY_ROLE, G_TYPE_STRING, out_uname, NULL); gchar *basic = gst_rtsp_auth_make_basic(out_uname, out_passwd); gst_rtsp_auth_add_basic(auth, basic, token); g_free (basic); gst_rtsp_token_unref (token); gst_rtsp_server_set_auth(server, auth); g_object_unref (auth); pool = gst_rtsp_server_get_session_pool (server); g_signal_connect(pool, "session-removed", G_CALLBACK(session_removed_callback), NULL); g_source = gst_rtsp_session_pool_create_watch(pool); g_print("RTSP session pool cleanup source %p is created\n", g_source); g_source_set_callback(g_source, (GSourceFunc)cleanup_session_pool_callback, server, NULL); g_source_attach(g_source, NULL); g_object_unref(pool); g_object_set_data(server, "session_pool_cleanup_source", g_source); /* attach the server to the default maincontext */ gst_rtsp_server_attach (server, NULL); /* start serving */ g_print ("stream %dx%d ready at rtsp://:@0.0.0.0:%s/video1\n", width, height, port); memset (&action, 0, sizeof (action)); action.sa_handler = _intr_handler; sigaction (SIGINT, &action, NULL); guint count = 0; guint count10s = 0; do { usleep(1000); g_main_iteration(FALSE); } while(!system_quit); printf("=======QUIT=========\r\n"); gst_element_set_state (GST_ELEMENT (sink), GST_STATE_NULL); gst_element_set_state (GST_ELEMENT (liveview), GST_STATE_NULL); g_main_loop_quit(loop); return 0; }