Opencv-rtsp stream

I need receive data from mipi-csi camera process it and send through ethernet.
I tried this as reference:

But get low fps and high cpu usage. What is the problem? Can I use v4l2 in videocapture instead of gst? will this help lower cpu usage?
I litle edited code from reference:

// g++ opencv-gst-rtsp.cpp $(pkg-config --cflags --libs glib-2.0) $(pkg-config --cflags --libs gstreamer-1.0 gstreamer-rtsp-server-1.0) -o opencv_gst_rtsp $(pkg-config --cflags --libs opencv4)

#include “gstreamer-1.0/gst/gst.h”
#include “gstreamer-1.0/gst/gstmessage.h”
#include “gstreamer-1.0/gst/rtsp-server/rtsp-server.h”

#include “glib-2.0/glib.h”
#include <gstreamer-1.0/gst/app/app.h>

#include <opencv2/opencv.hpp>
#include
#include

#define W 1920
#define H 1080
#define FPS 30

typedef struct
{
cv::VideoCapture *cap;
cv::Mat *lastFrame;
int *numberFrames;
GstClockTime timestamp;
} MyContext;

// should be private data of c++ class
int numberFrames = 0;
cv::Mat lastFrame;

// std::string launchString = “nvarguscamerasrc ! video/x-raw(memory:NVMM), width=(int)” +
// std::to_string(W) + “, height=(int)” +
// std::to_string(H) + “, format=(string)NV12, framerate=(fraction)” +
// std::to_string(FPS) + “/1 ! nvvidconv flip-method=” +
// std::to_string(0) + " ! video/x-raw, width=(int)" +
// std::to_string(W) + “, height=(int)” +
// std::to_string(H) + “, format=(string)BGRx”
// " ! videoconvert ! video/x-raw, format=(string)BGR ! appsink sync=false";
// cv::VideoCapture cap = cv::VideoCapture(launchString, cv::CAP_GSTREAMER);

cv::VideoCapture cap = cv::VideoCapture(0 + 200);

/* called when we need to give data to appsrc */
static void
need_data (GstElement * appsrc, guint unused, MyContext * ctx)
{
ctx->cap->set(3,1920);
ctx->cap->set(4,1080);
if (ctx->cap->isOpened()) {
if (ctx->cap->read(</em> (ctx->lastFrame))) {

        GstBuffer *buffer;
        uint64_t size=W*H*4; // Image size * deth of BGRx;
        GstFlowReturn ret;
        buffer = gst_buffer_new_allocate (NULL, size, NULL);
        GstMapInfo map;
        gint8 *raw;

        gst_buffer_map (buffer, &map, GST_MAP_WRITE); // make buffer writable
        raw = (gint8 *)map.data;

        for (int i = 0; i<H; i++) {
            cv::Vec3b* ptr = ctx->lastFrame->ptr<cv::Vec3b>(i);
            for (int j = 0; j<W; j++) {
                uint64_t offset = ((i*W)+j)*4;
                raw[offset] = ptr[j][0];
                raw[offset+1] = ptr[j][1];
                raw[offset+2] = ptr[j][2];
                raw[offset+3] = 127;
            }
        }
        gst_buffer_unmap (buffer, &map);

        /* increment the timestamp every 1/FPS second */
        GST_BUFFER_PTS (buffer) = ctx->timestamp;
        GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, FPS);
        ctx->timestamp += GST_BUFFER_DURATION (buffer);

        g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
        gst_buffer_unref (buffer);
    }
}


}

/* called when a new media pipeline is constructed. We can query the

* pipeline and configure our appsrc */
static void
media_configure (GstRTSPMediaFactory * factory, GstRTSPMedia * media, gpointer user_data)
{// should be incremented once on each frame for timestampingGstElement *element, *appsrc;
MyContext *ctx;/* get the element used for providing the streams of the media */
element = gst_rtsp_media_get_element (media);/* get our appsrc, we named it ‘mysrc’ with the name property */
appsrc = gst_bin_get_by_name_recurse_up (GST_BIN (element), “mysrc”);/* this instructs appsrc that we will be dealing with timed buffer <em>/
gst_util_set_object_arg (G_OBJECT (appsrc), “format”, “time”);
/</em> configure the caps of the video */
g_object_set (G_OBJECT (appsrc), “caps”,
gst_caps_new_simple (“video/x-raw”,
“format”, G_TYPE_STRING, “BGRx”,
“width”, G_TYPE_INT, W,
“height”, G_TYPE_INT, H,
“framerate”, GST_TYPE_FRACTION, FPS, 1, NULL), NULL);ctx = g_new0 (MyContext, 1);
ctx->timestamp = 0;
ctx->cap = ∩
ctx->lastFrame = &lastFrame;
ctx->numberFrames = &numberFrames;/* make sure ther datais freed when the media is gone */
g_object_set_data_full (G_OBJECT (media), “my-extra-data”, ctx, (GDestroyNotify) g_free);/* install the callback that will be called when a buffer is needed */
g_signal_connect (appsrc, “need-data”, (GCallback) need_data, ctx);
gst_object_unref (appsrc);
gst_object_unref (element);
}

int main (int argc, char *argv)
{
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;


gst_init (&argc, &argv);

loop = g_main_loop_new (NULL, FALSE);

/* create a server instance */
server = gst_rtsp_server_new ();
/* get the mount points for this server, every server has a default object
* that be used to map uri mount points to media factories */
mounts = gst_rtsp_server_get_mount_points (server);

/* make a media factory for a test stream. The default media factory can use
* gst-launch syntax to create pipelines.
* any launch line works as long as it contains elements named pay%d. Each
* element with pay%d names will be a stream */
factory = gst_rtsp_media_factory_new ();
gst_rtsp_media_factory_set_launch (factory,
        "( appsrc name=mysrc is-live=true ! videoconvert ! omxh264enc ! rtph264pay mtu=1400 name=pay0 pt=96 )");
gst_rtsp_media_factory_set_shared (factory, TRUE);
/* notify when our media is ready, This is called whenever someone asks for
* the media and a new pipeline with our appsrc is created */
g_signal_connect (factory, "media-configure", (GCallback) media_configure, NULL);

/* attach the test factory to the /test url */
gst_rtsp_mount_points_add_factory (mounts, "/test", factory);

/* don't need the ref to the mounts anymore */
g_object_unref (mounts);

/* attach the server to the default maincontext */
gst_rtsp_server_attach (server, NULL);

/* start serving */
g_print ("stream ready at rtsp://127.0.0.1:8554/test\n");
g_main_loop_run (loop);

return 0;
}

But I get this:

stream ready at rtsp://127.0.0.1:8554/test
Framerate set to : 30 at NvxVideoEncoderSetParameterNvMMLiteOpen : Block : BlockType = 4
===== NVMEDIA: NVENC =====
NvMMLiteBlockCreate : Block : BlockType = 4
H264: Profile = 66, Level = 40
[ WARN:0] global /home/niyaz/opencv/modules/videoio/src/cap_v4l.cpp (1001) tryIoctl VIDEOIO(V4L2:/dev/video0): select() timeout.
[ WARN:0] global /home/niyaz/opencv/modules/videoio/src/cap_v4l.cpp (1001) tryIoctl VIDEOIO(V4L2:/dev/video0): select() timeout.

This code doesnt work?:
cv::VideoCapture cap = cv::VideoCapture(0 + 200);

Hi,
On Jetson Nano we would suggest run gstreamer command and handle/process NVMM buffer from source to sink. Please refer to the steps of launching RTSP through test-launch in Jetson Nano FAQ

It takes certain CPU usage in using OpenCV functions. For optimal performance, please execute sudo nvpmodel -m 0 and sudo jetson_clocks to run CPU cores at max clock.

This topic was automatically closed 14 days after the last reply. New replies are no longer allowed.