Gstreamer Appsrc

Hello,
I’m trying to use Rtsp plugin with gstreamer appsrc pluging on a xavier nx with the folowing gstreamer class :

#define MAPPING “/live”
#define SERVICE “8000”

GstRtspStreamer::GstRtspStreamer()
{
gst_init(nullptr, nullptr);
}

GstRtspStreamer::~GstRtspStreamer()
{
}
gpointer GstRtspStreamer::start(gpointer data)
{
GMainLoop * loop;
loop = g_main_loop_new (NULL, TRUE);
g_main_loop_run (loop);
g_main_loop_unref (loop);
return NULL;

}

void GstRtspStreamer::init()
{
gint ret = -1;
GstCaps * caps;

std::cout << "initGst\n";

pipeline = gst_pipeline_new (“GstRtspSink”);

/* Creating the different elements in the pipeline /
src = gst_element_factory_make (“appsrc”, “appsrc”);
g_object_set (src, “is-live”, TRUE, NULL);
g_object_set(src, “format”, GST_FORMAT_TIME, NULL);
/
this instructs appsrc that we will be dealing with timed buffer */
//gst_util_set_object_arg (G_OBJECT (app->src), “format”, “GST_FORMAT_TIME”);
//g_object_set(app->src, “do-timestamp”, false, “is-live”, true, NULL);

capsInput = gst_element_factory_make (“capsfilter”, “capsInput”);
caps = gst_caps_from_string (“video/x-raw, format=BGR”);
g_object_set (capsInput, “caps”, caps, NULL);
gst_caps_unref(caps);

videoconvert = gst_element_factory_make(“videoconvert”,“videoconvert”);

capsOutput = gst_element_factory_make (“capsfilter”, “capsOutput”);
caps = gst_caps_from_string (“video/x-raw, format=BGRx”);
g_object_set (capsOutput, “caps”, caps, NULL);
gst_caps_unref(caps);

converter = gst_element_factory_make(“nvvidconv”,“nvvidconv”);

encoder = gst_element_factory_make (“nvv4l2h264enc”, “nvv4l2h264enc”);
g_object_set (encoder, “maxperf-enable”,1,“bitrate”,10000000,“iframeinterval”,40 ,“preset-level”,1,“control-rate”,1,“insert-sps-pps”,1,“insert-vui”,1,NULL);

capsfilter = gst_element_factory_make (“capsfilter”, “capsfilter”);
caps = gst_caps_from_string (“video/x-h264, stream-format=byte-stream, mapping=” MAPPING);
g_object_set (capsfilter, “caps”, caps, NULL);
gst_caps_unref (caps);

rtsp = gst_element_factory_make (“rtspsink”, “rtspsink”);
g_object_set (rtsp, “service”, SERVICE, NULL);

/*Linking all the elements together */
gst_bin_add_many(GST_BIN(pipeline), src,capsInput,videoconvert,capsOutput,converter, encoder, capsfilter, rtsp, NULL);

gst_element_link_many( src,capsInput,videoconvert,capsOutput,converter, encoder, capsfilter, rtsp, NULL);

/* Playing the pipeline */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print (“New RTSP stream started at rtsp://127.0.0.1:” SERVICE MAPPING “\n”);
}

void GstRtspStreamer::push_buffer(const void* data, size_t size)
{
GstBuffer* buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,
const_cast<void*>(data),
size,
0,
size,
nullptr,
nullptr);
// Set buffer timestamp
GST_BUFFER_TIMESTAMP(buffer) = gst_util_uint64_scale(video_frame_count_, GST_SECOND, 30);

// Push buffer to appsrc
GstFlowReturn ret;
g_signal_emit_by_name(src, "push-buffer", buffer, &ret);

// Increment frame counter
video_frame_count_++;

}

But i never have video on rtsp , i try with videotestsrc and it’s works fine.
Do you have any a idea where is my error ?
Where can i find a proper example of a gstreamer cpp class using appsrc and rtsp sink ?

Thanks

Hi,
Please try this sample:
Using RTSP encode (with H264) and stream images at device memory on Xavier - #6 by Appletree

If you use Jetpack 4 release, please try the sample of 1.14.5:
gst-rtsp-server/test-appsrc.c at 1.14.5 · GStreamer/gst-rtsp-server · GitHub

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.