My final application will render to a texture which I want to stream using gstreamer’s RTP plugins. The jetson nano will be running headless and streaming to a surface pro.
I managed to get the following pipeline working (when the jetson was plugged into a screen).
Jetson:
gst-launch-1.0 -v ximagesrc ! nvvidconv ! nvv4l2h264enc ! h264parse ! video/x-h264, stream-format=byte-stream ! rtph264pay name=pay0 pt=96 ! udpsink port=5000 host=192.168.1.176
Surface pro:
gst-launch-1.0 -v udpsrc port=5000 caps='application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96, framerate=(fraction)25/1' ! queue ! rtph264depay ! h264parse ! queue ! avdec_h264 ! queue ! videoscale ! videoconvert ! ximagesink sync=false async=false -e
I now want to move away from gst-launch on the nano and write the following pipeline in C++:
* appsrc !
* nvvidconv !
* nvv4l2h264enc !
* h264parse !
* video/x-h264, stream-format=byte-stream !
* rtph264pay name=pay0 pt=96 !
* udpsink port=5000 host=192.168.1.176
I have the following code which should alternate between a blue and green screen.
#include <cassert>
#include <functional>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <stdexcept>
#include <string>
#include <vector>
#include <iostream>
#include <EGL/egl.h>
using namespace std;
using namespace std::placeholders;
#define all(n) (n).begin(), (n).end()
typedef struct {
GstPipeline *pipeline;
GstElement *src, *convert, *encode, *capsfilter, *pay, *udpsink;
GMainLoop *loop;
guint sourceid;
FILE *file;
}gst_app_t;
#define WIDTH 384
#define HEIGHT 288
#define CHANNELS 3
#define BUFFER_SIZE ((WIDTH)*(HEIGHT)*(CHANNELS))
vector<array<unsigned char, 3>> green;
vector<array<unsigned char, 3>> blue;
static gboolean read_data(gst_app_t *app)
{
static gboolean white = FALSE;
static GstClockTime timestamp = 0;
GstBuffer *buffer;
guint size;
GstFlowReturn ret;
size = green.size() * CHANNELS;
buffer = gst_buffer_new_wrapped_full((GstMemoryFlags) 0, (gpointer)(white? green.data(): blue.data()), size, 0, size, NULL, NULL );
white = !white;
GST_BUFFER_PTS (buffer) = timestamp;
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 1);
timestamp += GST_BUFFER_DURATION (buffer);
ret = gst_app_src_push_buffer((GstAppSrc*) app->src, buffer);
if (ret != GST_FLOW_OK) {
g_warning("pushing buffer fucked up");
g_main_loop_quit (app->loop);
}
return true;
}
static void start_feed (GstElement * pipeline, guint size, gst_app_t *app)
{
assert(pipeline); assert(size);
if (app->sourceid == 0) {
GST_DEBUG ("start feeding");
app->sourceid = g_idle_add ((GSourceFunc) read_data, app);
}
}
static void stop_feed (GstElement * pipeline, gst_app_t *app)
{
assert(pipeline);
if (app->sourceid != 0) {
GST_DEBUG ("stop feeding");
g_source_remove (app->sourceid);
app->sourceid = 0;
}
}
static gboolean bus_callback(GstBus *bus, GstMessage *message, gpointer *ptr)
{
assert(bus); assert(ptr);
gst_app_t *app = (gst_app_t*)ptr;
switch(GST_MESSAGE_TYPE(message)){
case GST_MESSAGE_ERROR:{
gchar *debug;
GError *err;
gst_message_parse_error(message, &err, &debug);
cerr << "Error from " << message->src << ": " << err->message << '\n'
<< "\tdebug info: " << debug << '\n';
g_error_free(err);
g_free(debug);
g_main_loop_quit(app->loop);
}
break;
case GST_MESSAGE_EOS:
cerr << "End of stream, quitting main loop\n";
g_main_loop_quit(app->loop);
break;
default:
// g_print("got message %s\n",
// gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
break;
}
return TRUE;
}
static const EGLint configAttribs[] = {
EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_DEPTH_SIZE, 8,
EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
EGL_NONE
};
static const EGLint pbufferAttribs[] = {
EGL_WIDTH, WIDTH,
EGL_HEIGHT, HEIGHT,
EGL_NONE,
};
/*
* gst-launch-1.0 -v ximagesrc !
* nvvidconv !
* nvv4l2h264enc !
* h264parse !
* video/x-h264, stream-format=byte-stream !
* rtph264pay name=pay0 pt=96 !
* udpsink port=5000 host=192.168.1.176
*/
int main()
{
EGLDisplay egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
EGLint major, minor;
eglInitialize(egl_display, &major, &minor);
assert(eglGetError() == EGL_SUCCESS);
cerr << "EGL version: " << major << "." << minor << '\n';
EGLint numConfigs; EGLConfig eglCfg;
eglChooseConfig(egl_display, configAttribs, &eglCfg, 1, &numConfigs);
EGLSurface eglSurf = eglCreatePbufferSurface(egl_display, eglCfg, pbufferAttribs);
assert(eglGetError() == EGL_SUCCESS);
eglBindAPI(EGL_OPENGL_API);
EGLContext eglCtx = eglCreateContext(egl_display, eglCfg, EGL_NO_CONTEXT, nullptr);
assert(eglGetError() == EGL_SUCCESS);
eglMakeCurrent(egl_display, eglSurf, eglSurf, eglCtx);
assert(eglGetError() == EGL_SUCCESS);
gst_app_t* app = new gst_app_t();
GstBus *bus;
GstStateChangeReturn state_ret;
gst_init(nullptr, nullptr);
app->src = gst_element_factory_make("appsrc", "appsrc");
app->convert = gst_element_factory_make("nvvidconv", "nvvidconv");
app->encode = gst_element_factory_make("nvv4l2h264enc", "nvv4l2h264enc");
app->capsfilter = gst_element_factory_make("capsfilter", "capsfilter");
app->pay = gst_element_factory_make("rtph264pay", "rtph264pay");
app->udpsink = gst_element_factory_make("udpsink", "udpsink");
assert(app->src);
assert(app->convert);
assert(app->encode);
assert(app->capsfilter);
assert(app->pay);
assert(app->udpsink);
g_object_set (G_OBJECT (app->src),
"caps", gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "RGB",
"width", G_TYPE_INT, WIDTH,
"height", G_TYPE_INT, HEIGHT,
"framerate", GST_TYPE_FRACTION, 0, 1,
nullptr),
"stream-type", 0, // GST_APP_STREAM_TYPE_STREAM
"format", GST_FORMAT_TIME,
"is-live", TRUE,
nullptr);
g_object_set(G_OBJECT(app->capsfilter),
"caps", gst_caps_new_simple("video/x-h264",
"stream-format", G_TYPE_STRING, "byte-stream",
nullptr),
nullptr);
g_object_set(G_OBJECT(app->pay),
"name", G_TYPE_STRING, "pay0",
"pt", G_TYPE_INT, 96,
nullptr);
g_object_set(G_OBJECT(app->udpsink),
"port", G_TYPE_INT, 5000,
"host", "192.168.1.176",
nullptr);
app->pipeline = (GstPipeline*) gst_pipeline_new("test");
gst_bin_add_many(GST_BIN(app->pipeline),
app->src, app->convert, app->encode, app->capsfilter, app->pay, app->udpsink, nullptr);
if (not gst_element_link_many(app->src, app->convert, app->encode, app->capsfilter, app->pay, app->udpsink, nullptr)) {
gst_object_unref(app->pipeline);
delete app;
eglTerminate(egl_display);
throw runtime_error("Linking pipeline failed");
}
bus = gst_pipeline_get_bus(app->pipeline);
gst_bus_add_watch(bus, (GstBusFunc)bus_callback, &app);
gst_object_unref(bus);
g_signal_connect(app->src, "need-data", G_CALLBACK(start_feed), app);
g_signal_connect(app->src, "enough-data", G_CALLBACK(stop_feed), app);
green.resize(BUFFER_SIZE);
blue.resize(BUFFER_SIZE);
for (int i = 0; i < BUFFER_SIZE; i++) {
blue[i] = {0, 0, 0xff};
green[i] = {0, 0xff, 0};
}
state_ret = gst_element_set_state((GstElement*)app->pipeline, GST_STATE_PLAYING);
if (state_ret == GST_STATE_CHANGE_FAILURE) {
gst_object_unref(app->pipeline);
delete app;
eglTerminate(egl_display);
throw runtime_error("Could not change pipeline state to playing");
}
app->loop = g_main_loop_new(nullptr, FALSE);
cerr << "Running main loop\n";
g_main_loop_run(app->loop);
state_ret = gst_element_set_state((GstElement*)app->pipeline, GST_STATE_NULL);
gst_object_unref(app->pipeline);
delete app;
eglTerminate(egl_display);
return 0;
}
When I run on my normal machine (without nvidia plugins), I predictably fail as nvvidconvert cannot be made. However, when running on the jetson, I get a segfault. There seems to be a problem with the EGL library. I am linking against EGL and OpenGL. When running under GDB the segfault occurs at random lines but always after I initialise EGL and before I finish checking that all the elements were in fact created.
What am I doing wrong?