Problem with C++ gstreamer pipeline

My final application will render to a texture which I want to stream using gstreamer’s RTP plugins. The jetson nano will be running headless and streaming to a surface pro.

I managed to get the following pipeline working (when the jetson was plugged into a screen).

Jetson:

gst-launch-1.0 -v ximagesrc ! nvvidconv ! nvv4l2h264enc ! h264parse ! video/x-h264, stream-format=byte-stream ! rtph264pay name=pay0 pt=96 ! udpsink port=5000 host=192.168.1.176

Surface pro:

gst-launch-1.0 -v udpsrc port=5000 caps='application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96, framerate=(fraction)25/1' ! queue  ! rtph264depay ! h264parse ! queue  ! avdec_h264 ! queue  ! videoscale  ! videoconvert  ! ximagesink sync=false async=false -e

I now want to move away from gst-launch on the nano and write the following pipeline in C++:

 *    appsrc !
 *    nvvidconv !
 *    nvv4l2h264enc !
 *    h264parse !
 *    video/x-h264, stream-format=byte-stream !
 *    rtph264pay name=pay0 pt=96 !
 *    udpsink port=5000 host=192.168.1.176

I have the following code which should alternate between a blue and green screen.

#include <cassert>
#include <functional>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <stdexcept>
#include <string>
#include <vector>
#include <iostream>
#include <EGL/egl.h>

using namespace std;
using namespace std::placeholders;

#define all(n) (n).begin(), (n).end()

typedef struct {
  GstPipeline *pipeline;
  GstElement  *src, *convert, *encode, *capsfilter, *pay, *udpsink;
  GMainLoop *loop;
  guint sourceid;
  FILE *file;
}gst_app_t;

#define WIDTH 384
#define HEIGHT 288
#define CHANNELS 3
#define BUFFER_SIZE ((WIDTH)*(HEIGHT)*(CHANNELS))

vector<array<unsigned char, 3>> green;
vector<array<unsigned char, 3>> blue;


static gboolean read_data(gst_app_t *app)
{
    static gboolean white = FALSE;
    static GstClockTime timestamp = 0;
    GstBuffer *buffer;
    guint size;
    GstFlowReturn ret;

    size = green.size() * CHANNELS;

    buffer = gst_buffer_new_wrapped_full((GstMemoryFlags) 0, (gpointer)(white? green.data(): blue.data()), size, 0, size, NULL, NULL );

    white = !white;

    GST_BUFFER_PTS (buffer) = timestamp;
    GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 1);

    timestamp += GST_BUFFER_DURATION (buffer);

    ret = gst_app_src_push_buffer((GstAppSrc*) app->src, buffer);

    if (ret != GST_FLOW_OK) {
      g_warning("pushing buffer fucked up");
       g_main_loop_quit (app->loop);
    }
    return true;
}

static void start_feed (GstElement * pipeline, guint size, gst_app_t *app)
{
  assert(pipeline); assert(size);
  if (app->sourceid == 0) {
    GST_DEBUG ("start feeding");
    app->sourceid = g_idle_add ((GSourceFunc) read_data, app);
  }
}

static void stop_feed (GstElement * pipeline, gst_app_t *app)
{
  assert(pipeline);
  if (app->sourceid != 0) {
    GST_DEBUG ("stop feeding");
    g_source_remove (app->sourceid);
    app->sourceid = 0;
  }
}

static gboolean bus_callback(GstBus *bus, GstMessage *message, gpointer *ptr)
{
  assert(bus); assert(ptr);
  gst_app_t *app = (gst_app_t*)ptr;

  switch(GST_MESSAGE_TYPE(message)){

  case GST_MESSAGE_ERROR:{
    gchar *debug;
    GError *err;

    gst_message_parse_error(message, &err, &debug);
    cerr << "Error from " << message->src << ": " << err->message << '\n'
         << "\tdebug info: " << debug << '\n';
    g_error_free(err);
    g_free(debug);
    g_main_loop_quit(app->loop);
  }
    break;

  case GST_MESSAGE_EOS:
    cerr << "End of stream, quitting main loop\n";
    g_main_loop_quit(app->loop);
    break;

  default:
//    g_print("got message %s\n",
//            gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
    break;
  }

  return TRUE;
}

static const EGLint configAttribs[] = {
    EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
    EGL_BLUE_SIZE, 8,
    EGL_GREEN_SIZE, 8,
    EGL_RED_SIZE, 8,
    EGL_DEPTH_SIZE, 8,
    EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
    EGL_NONE
};

static const EGLint pbufferAttribs[] = {
    EGL_WIDTH, WIDTH,
    EGL_HEIGHT, HEIGHT,
    EGL_NONE,
};


/*
 * gst-launch-1.0 -v ximagesrc !
 *    nvvidconv !
 *    nvv4l2h264enc !
 *    h264parse !
 *    video/x-h264, stream-format=byte-stream !
 *    rtph264pay name=pay0 pt=96 !
 *    udpsink port=5000 host=192.168.1.176
 */

int main()
{
  EGLDisplay egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);

  EGLint major, minor;

  eglInitialize(egl_display, &major, &minor);

  assert(eglGetError() == EGL_SUCCESS);

  cerr << "EGL version: " << major << "." << minor << '\n';

  EGLint numConfigs; EGLConfig eglCfg;

  eglChooseConfig(egl_display, configAttribs, &eglCfg,  1, &numConfigs);

  EGLSurface eglSurf = eglCreatePbufferSurface(egl_display, eglCfg, pbufferAttribs);

  assert(eglGetError() == EGL_SUCCESS);

  eglBindAPI(EGL_OPENGL_API);

  EGLContext eglCtx = eglCreateContext(egl_display, eglCfg, EGL_NO_CONTEXT, nullptr);

  assert(eglGetError() == EGL_SUCCESS);

  eglMakeCurrent(egl_display, eglSurf, eglSurf, eglCtx);

  assert(eglGetError() == EGL_SUCCESS);

  gst_app_t* app = new gst_app_t();
  GstBus *bus;
  GstStateChangeReturn state_ret;

  gst_init(nullptr, nullptr);

  app->src        = gst_element_factory_make("appsrc", "appsrc");
  app->convert    = gst_element_factory_make("nvvidconv", "nvvidconv");
  app->encode     = gst_element_factory_make("nvv4l2h264enc", "nvv4l2h264enc");
  app->capsfilter = gst_element_factory_make("capsfilter", "capsfilter");
  app->pay        = gst_element_factory_make("rtph264pay", "rtph264pay");
  app->udpsink    = gst_element_factory_make("udpsink", "udpsink");

  assert(app->src);
  assert(app->convert);
  assert(app->encode);
  assert(app->capsfilter);
  assert(app->pay);
  assert(app->udpsink);

  g_object_set (G_OBJECT (app->src),
                "caps", gst_caps_new_simple ("video/x-raw",
                                             "format", G_TYPE_STRING, "RGB",
                                             "width", G_TYPE_INT, WIDTH,
                                             "height", G_TYPE_INT, HEIGHT,
                                             "framerate", GST_TYPE_FRACTION, 0, 1,
                                             nullptr),
                "stream-type", 0, // GST_APP_STREAM_TYPE_STREAM
                "format", GST_FORMAT_TIME,
                "is-live", TRUE,
                nullptr);

  g_object_set(G_OBJECT(app->capsfilter),
               "caps", gst_caps_new_simple("video/x-h264",
                                           "stream-format", G_TYPE_STRING, "byte-stream",
                                           nullptr),
               nullptr);

  g_object_set(G_OBJECT(app->pay),
               "name", G_TYPE_STRING, "pay0",
               "pt", G_TYPE_INT, 96,
               nullptr);

  g_object_set(G_OBJECT(app->udpsink),
               "port", G_TYPE_INT, 5000,
               "host", "192.168.1.176",
               nullptr);


  app->pipeline = (GstPipeline*) gst_pipeline_new("test");

  gst_bin_add_many(GST_BIN(app->pipeline),
                   app->src, app->convert, app->encode, app->capsfilter, app->pay, app->udpsink,  nullptr);

  if (not gst_element_link_many(app->src, app->convert, app->encode, app->capsfilter, app->pay, app->udpsink,  nullptr)) {
    gst_object_unref(app->pipeline);
    delete app;
    eglTerminate(egl_display);
    throw runtime_error("Linking pipeline failed");
  }

  bus = gst_pipeline_get_bus(app->pipeline);
  gst_bus_add_watch(bus, (GstBusFunc)bus_callback, &app);
  gst_object_unref(bus);

  g_signal_connect(app->src, "need-data", G_CALLBACK(start_feed), app);
  g_signal_connect(app->src, "enough-data", G_CALLBACK(stop_feed), app);

  green.resize(BUFFER_SIZE);
  blue.resize(BUFFER_SIZE);
  for (int i = 0; i < BUFFER_SIZE; i++) {
    blue[i] = {0, 0, 0xff};
    green[i] = {0, 0xff, 0};
  }

  state_ret = gst_element_set_state((GstElement*)app->pipeline, GST_STATE_PLAYING);
  if (state_ret == GST_STATE_CHANGE_FAILURE) {
    gst_object_unref(app->pipeline);
    delete app;
    eglTerminate(egl_display);
    throw runtime_error("Could not change pipeline state to playing");
  }

  app->loop = g_main_loop_new(nullptr, FALSE);
  cerr << "Running main loop\n";
  g_main_loop_run(app->loop);

  state_ret = gst_element_set_state((GstElement*)app->pipeline, GST_STATE_NULL);

  gst_object_unref(app->pipeline);
  delete app;

  eglTerminate(egl_display);

  return 0;
}

When I run on my normal machine (without nvidia plugins), I predictably fail as nvvidconvert cannot be made. However, when running on the jetson, I get a segfault. There seems to be a problem with the EGL library. I am linking against EGL and OpenGL. When running under GDB the segfault occurs at random lines but always after I initialise EGL and before I finish checking that all the elements were in fact created.

What am I doing wrong?

Hi,
Please refer to the following samples:
What is maximum video encoding resolution in pixels? - #11 by DaneLLL
GStreamer freeze when using qtmux and NVIDIA-accelerated h264/h265 encoding - #7 by DaneLLL

There are also samples in DeepStream SDK:

/opt/nvidia/deepstream/deepstream-5.1/sources/apps/sample_apps

Could you be more specific? eglGetDisplay keeps returning EGL_NO_DISPLAY. I don’t see how the first example is helpful and I’ve looked through code samples of the deepstream SDK and don’t see what I’m doing differently.

Running:

#include <cassert>
#include <functional>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <glib.h>
#include <stdexcept>
#include <string>
#include <vector>
#include <iostream>
#include <EGL/egl.h>
#include <cuda_runtime_api.h>

using namespace std;
using namespace std::placeholders;

#define all(n) (n).begin(), (n).end()

int main()
{
  EGLDisplay egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);

  assert(egl_display != EGL_NO_DISPLAY);

  return 0;
}

Running the above when linked against OpenGL, EGL and CUDA results in:
int main(): Assertion `egl_display != EGL_NO_DISPLAY’ failed.
Aborted (core dumped)

Hi,
The sample launches gstreamer pipeline through gst_parse_launch(). Please check if you can call it to launch

gst-launch-1.0 -v ximagesrc ! nvvidconv ! nvv4l2h264enc ! h264parse ! video/x-h264, stream-format=byte-stream ! rtph264pay name=pay0 pt=96 ! udpsink port=5000 host=192.168.1.176

That will fail as I’m now running and testing headless. I haven’t started an xserver for it to get an ximagesrc from. The error message is:

nvbuf_utils: Could not get EGL display connection
Setting pipeline to PAUSED ...
Opening in BLOCKING MODE
Opening in BLOCKING MODE 
ERROR: Pipeline doesn't want to pause.
ERROR: from element /GstPipeline:pipeline0/GstXImageSrc:ximagesrc0: Could not open X display for reading
Additional debug info:
gstximagesrc.c(160): gst_ximage_src_open_display (): /GstPipeline:pipeline0/GstXImageSrc:ximagesrc0:
NULL returned from getting xcontext
Setting pipeline to NULL ...
Freeing pipeline ...

However, a modified pipeline runs and can be streamed as expected. I changed the ximagesrc for a videotestsrc:

gst-launch-1.0 -v videotestsrc ! nvvidconv ! nvv4l2h264enc ! h26
4parse ! video/x-h264, stream-format=byte-stream ! rtph264pay name=pay0 pt=96 ! udpsink port=5000 host=192.168.1.176

I have also run the following snippet and am registering no devices:

#define EGL_EGLEXT_PROTOTYPES
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <iostream>

using namespace std;

#define all(n) (n).begin(), (n).end()

int main()
{
  static const int MAX_DEVICES = 1000;
  EGLDeviceEXT eglDevs[MAX_DEVICES];
  EGLint numDevices;

  PFNEGLQUERYDEVICESEXTPROC eglQueryDevicesEXT =
    (PFNEGLQUERYDEVICESEXTPROC)
    eglGetProcAddress("eglQueryDevicesEXT");

  eglQueryDevicesEXT(MAX_DEVICES, eglDevs, &numDevices);

  cerr << "Detected " << numDevices << " devices\n";

  return 0;
}

Hi,
Generally we run ximagesrc plugin for capturing screen. It is a bit strange you run it in headless mode. Could you share your use-case in detail? Probably for your use-case, you should use other source plugin. Or use jetson_multimedia_api instead of gstreamer.

I want to take an opengl texture (the output of an opengl compute shader) and send it over h264 to another device which will use that texture as part of what it displays to the user (so the client will have to run a gstreamer pipeline which unpacks the output of the stream into a texture).

Initially I wanted to see if i could just get the stream working with ximagesrc to see how it worked with a highly dynamic video stream. Once I was happy with that I wanted to switch to appsrc (in C++ code) to make sure I could still get the stream going but now load my texture into the buffer passed to appsrc through a call to glGetTexImage. The next stage will be using a plugin which allows me to just skip the glGetTexImage call and keeps the data on the GPU. If you can help me with constructing that pipeline, as well as the one I’d the client I’d greatly appreciate it.

Since starting asking questions i’ve realised I can run a headless xserver which will still be hardware accelerated so I’m currently doing that but it doesn’t seem to have sorted my issue. I still crash when I run app->src = gst_element_factory_make("appsrc", "appsrc");. Calling gst_element_factory_make on the other elements in the pipeline seems to be fine.

This topic was automatically closed 60 days after the last reply. New replies are no longer allowed.