If it can help anyone, I used the appsrc.c example of GstRtspServer to write this code :
#include "gstreamer-1.0/gst/gst.h"
#include "gstreamer-1.0/gst/gstmessage.h"
#include "gstreamer-1.0/gst/rtsp-server/rtsp-server.h"
#include "glib-2.0/glib.h"
#include <gstreamer-1.0/gst/app/app.h>
#include <opencv2/opencv.hpp>
#include <iostream>
#include <string>
#define W 1920
#define H 1080
#define FPS 30
typedef struct
{
cv::VideoCapture *cap;
cv::Mat *lastFrame;
int *numberFrames;
GstClockTime timestamp;
} MyContext;
// should be private data of c++ class
int numberFrames = 0;
cv::Mat lastFrame;
std::string launchString = "nvarguscamerasrc ! video/x-raw(memory:NVMM), width=(int)" +
std::to_string(W) + ", height=(int)" +
std::to_string(H) + ", format=(string)NV12, framerate=(fraction)" +
std::to_string(FPS) + "/1 ! nvvidconv flip-method=" +
std::to_string(0) + " ! video/x-raw, width=(int)" +
std::to_string(W) + ", height=(int)" +
std::to_string(H) + ", format=(string)BGRx"
" ! videoconvert ! video/x-raw, format=(string)BGR ! appsink sync=false";
cv::VideoCapture cap = cv::VideoCapture(launchString, cv::CAP_GSTREAMER);
/* called when we need to give data to appsrc */
static void
need_data (GstElement * appsrc, guint unused, MyContext * ctx)
{
if (ctx->cap->isOpened()) {
if (ctx->cap->read(*(ctx->lastFrame))) {
GstBuffer *buffer;
uint64_t size=W*H*4; // Image size * deth of BGRx;
GstFlowReturn ret;
buffer = gst_buffer_new_allocate (NULL, size, NULL);
GstMapInfo map;
gint8 *raw;
gst_buffer_map (buffer, &map, GST_MAP_WRITE); // make buffer writable
raw = (gint8 *)map.data;
for (int i = 0; i<H; i++) {
cv::Vec3b* ptr = ctx->lastFrame->ptr<cv::Vec3b>(i);
for (int j = 0; j<W; j++) {
uint64_t offset = ((i*W)+j)*4;
raw[offset] = ptr[j][0];
raw[offset+1] = ptr[j][1];
raw[offset+2] = ptr[j][2];
raw[offset+3] = 127;
}
}
gst_buffer_unmap (buffer, &map);
/* increment the timestamp every 1/FPS second */
GST_BUFFER_PTS (buffer) = ctx->timestamp;
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, FPS);
ctx->timestamp += GST_BUFFER_DURATION (buffer);
g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
gst_buffer_unref (buffer);
}
}
}
/* called when a new media pipeline is constructed. We can query the
* pipeline and configure our appsrc */
static void
media_configure (GstRTSPMediaFactory * factory, GstRTSPMedia * media, gpointer user_data)
{
// should be incremented once on each frame for timestamping
GstElement *element, *appsrc;
MyContext *ctx;
/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);
/* get our appsrc, we named it 'mysrc' with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), "mysrc");
/* this instructs appsrc that we will be dealing with timed buffer */
gst_util_set_object_arg (G_OBJECT (appsrc), "format", "time");
/* configure the caps of the video */
g_object_set (G_OBJECT (appsrc), "caps",
gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "BGRx",
"width", G_TYPE_INT, W,
"height", G_TYPE_INT, H,
"framerate", GST_TYPE_FRACTION, FPS, 1, NULL), NULL);
ctx = g_new0 (MyContext, 1);
ctx->timestamp = 0;
ctx->cap = ∩
ctx->lastFrame = &lastFrame;
ctx->numberFrames = &numberFrames;
/* make sure ther datais freed when the media is gone */
g_object_set_data_full (G_OBJECT (media), "my-extra-data", ctx, (GDestroyNotify) g_free);
/* install the callback that will be called when a buffer is needed */
g_signal_connect (appsrc, "need-data", (GCallback) need_data, ctx);
gst_object_unref (appsrc);
gst_object_unref (element);
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* create a server instance */
server = gst_rtsp_server_new ();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory,
"( appsrc name=mysrc is-live=true ! videoconvert ! omxh265enc ! rtph265pay mtu=1400 name=pay0 pt=96 )");
gst_rtsp_media_factory_set_shared (factory, TRUE);
/* notify when our media is ready, This is called whenever someone asks for
* the media and a new pipeline with our appsrc is created */
g_signal_connect (factory, "media-configure", (GCallback) media_configure, NULL);
/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, "/test", factory);
/* don't need the ref to the mounts anymore */
g_object_unref (mounts);
/* attach the server to the default maincontext */
gst_rtsp_server_attach (server, NULL);
/* start serving */
g_print ("stream ready at rtsp://127.0.0.1:8554/test\n");
g_main_loop_run (loop);
return 0;
}
All this code can be put in a C++ class and the GMainloop can loop in a separate thread.
With this code, it is possible to send cv::mat data to an RTSP stream, and therefore, any processing can be done on those frames.
I think that there is a lot of improvement that must be done to optimize all this, but it is a working base.