Hi,
It still doesn’t work because of the the same problem:
nvbufsurface: mapping of memory type (0) not supported
Is it a problem about environment? I’m working on P4 and Ubuntu18.04. I think that my code should be fine.
This is my code:
#include <gst/gst.h>
#include <gst/rtsp/gstrtsptransport.h>
#include <glib.h>
#include <stdio.h>
#include "gstnvdsmeta.h"
#include <opencv2/core.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/imgproc.hpp>
#include <zmq.hpp>
#include <stdlib.h>
#include "nvbufsurface.h"
/* The muxer output resolution must be set if the input streams will be of
* different resolution. The muxer will scale all the input frames to this
* resolution. */
#define MUXER_OUTPUT_WIDTH 1920
#define MUXER_OUTPUT_HEIGHT 1080
/* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
* based on the fastest source's framerate. */
#define MUXER_BATCH_TIMEOUT_USEC 4000000
#define MEMORY_FEATURES "memory:NVMM"
gint frame_number = 0;
gint frame_rate = 3;
gint frame_interval = 25 / frame_rate;
gint jpeg_quality = 95;
//zmq::context_t context (1);
//zmq::socket_t sender(context, ZMQ_PUB);
void *context = zmq_ctx_new();
void *publisher = zmq_socket(context, ZMQ_PUB);
static GstPadProbeReturn sink_sink_pad_buffer_probe (GstPad * pad, GstPadProbeInfo * info, gpointer u_data){
//if(++frame_number % 30 != 0) return GST_PAD_PROBE_OK;
if(++frame_number % frame_interval != 0) return GST_PAD_PROBE_OK;
g_print("get a frame: %d\n", frame_number);
GstBuffer *buf = (GstBuffer *) info->data;
GstMapInfo in_map_info;
NvBufSurface *surface = NULL;
NvDsBatchMeta *batch_meta = NULL;
NvDsMetaList *l_frame = NULL;
NvDsFrameMeta *frame_meta = NULL;
memset (&in_map_info, 0, sizeof (in_map_info));
if (gst_buffer_map (buf, &in_map_info, GST_MAP_READWRITE)){
surface = (NvBufSurface *) in_map_info.data;
NvBufSurfaceMap(surface, -1, -1, NVBUF_MAP_READ_WRITE);
NvBufSurfaceSyncForCpu(surface, -1, -1);
batch_meta = gst_buffer_get_nvds_batch_meta(buf);
for (l_frame = batch_meta->frame_meta_list; l_frame != NULL; l_frame = l_frame->next){
/*frame_meta = (NvDsFrameMeta *)(l_frame->data);
gint frame_width = (gint)surface->surfaceList[frame_meta->batch_id].width;
gint frame_height = (gint)surface->surfaceList[frame_meta->batch_id].height;
void *frame_data = surface->surfaceList[frame_meta->batch_id].mappedAddr.addr[0];
size_t frame_step = surface->surfaceList[frame_meta->batch_id].pitch;
cv::Mat src = cv::Mat(frame_height*1.5, frame_width, CV_8UC1, frame_data, frame_step);
cv::Mat out = cv::Mat(cv::Size(frame_width, frame_height), CV_8UC3);
cv::cvtColor(src, out, CV_YUV2RGB_NV21);
/*char filename[20];
sprintf(filename, "imgs/frame%d.jpg", frame_number);
cv::imwrite(filename, out);*/
/*std::vector<unsigned char> jpeg_buff_vec;
std::vector<int> param = std::vector<int>(2);
param[0] = CV_IMWRITE_JPEG_QUALITY;
//param[1] = 95; // default(95) 0-100
param[1] = jpeg_quality;
cv::imencode(".jpg", out, jpeg_buff_vec, param);
//zmq::message_t message(jpeg_buff_vec.capacity()*sizeof(unsigned char));
//memcpy(message.data(), &jpeg_buff_vec[0], jpeg_buff_vec.capacity()*sizeof(unsigned char));
//sender.send(message);
//s_sendmore(sender, "frame");
//s_send(sender, message);
zmq_send(publisher, &jpeg_buff_vec[0], jpeg_buff_vec.capacity()*sizeof(unsigned char) + 1, 0);
src.release();
out.release();*/
}
NvBufSurfaceUnMap(surface, -1, -1);
}
gst_buffer_unmap (buf, &in_map_info);
return GST_PAD_PROBE_OK;
}
static void cb_new_rtspsrc_pad(GstElement *element, GstPad*pad, gpointer data){
gchar *name;
GstCaps * p_caps;
gchar * description;
GstElement *p_rtph264depay;
name = gst_pad_get_name(pad);
g_print("A new pad %s was created\n", name);
// here, you would setup a new pad link for the newly created pad
// sooo, now find that rtph264depay is needed and link them?
p_caps = gst_pad_get_pad_template_caps (pad);
description = gst_caps_to_string(p_caps);
//printf("%s\n", p_caps, ", ", description,"\n");
//printf("______\n");
g_free(description);
p_rtph264depay = GST_ELEMENT(data);
// try to link the pads then ...
if (!gst_element_link_pads(element, name, p_rtph264depay, "sink"))
{
//printf("Failed to link elements 3\n");
}
g_free(name);
}
static gboolean bus_call (GstBus * bus, GstMessage * msg, gpointer data){
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR:{
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_printerr ("ERROR from element %s: %s\n",
GST_OBJECT_NAME (msg->src), error->message);
if (debug)
g_printerr ("Error details: %s\n", debug);
g_free (debug);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
int main (int argc, char *argv[]){
GMainLoop *loop = NULL;
GstElement *pipeline = NULL, *source = NULL, *h264parser = NULL, *nvvidconv = NULL, *caps_filter = NULL,
*decoder = NULL, *streammux = NULL, *sink = NULL, *rtppay = NULL;
#ifdef PLATFORM_TEGRA
GstElement *transform = NULL;
#endif
GstBus *bus = NULL;
guint bus_watch_id;
GstPad *sink_sink_pad = NULL;
/* Check input arguments */
if (argc != 5) {
g_printerr ("Usage: %s <rtsp_addr> <zmq_addr> <frame_rate> <jpeg_quality> \n", argv[0]);
return -1;
}else{
frame_rate = atoi(argv[3]);
frame_interval = 25 / frame_rate;
jpeg_quality = atoi(argv[4]);
g_print("frame_rate: %d, frame_interval: %d, jpeg_quality: %d\n", frame_rate, frame_interval, jpeg_quality);
}
/* Standard GStreamer initialization */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements */
/* Create Pipeline element that will form a connection of other elements */
pipeline = gst_pipeline_new ("single-rtsp-pipeline");
/* Source element for reading from the file */
source = gst_element_factory_make ("rtspsrc", "rtsp-source");
rtppay = gst_element_factory_make ( "rtph264depay", "depayl");
/* Since the data format in the input file is elementary h264 stream,
* we need a h264parser */
h264parser = gst_element_factory_make ("h264parse", "h264-parser");
/* Use nvdec_h264 for hardware accelerated decode on GPU */
decoder = gst_element_factory_make ("nvv4l2decoder", "nvv4l2-decoder");
/* Create nvstreammux instance to form batches from one or more sources. */
streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");
nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");
caps_filter = gst_element_factory_make ("capsfilter", NULL);
if (!caps_filter) {
g_printerr ("Failed to create capsfilter element. Exiting.\n");
return -1;
}
GstCaps *caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "RGBA", NULL);
GstCapsFeatures *feature = gst_caps_features_new (MEMORY_FEATURES, NULL);
gst_caps_set_features (caps, 0, feature);
g_object_set (G_OBJECT (caps_filter), "caps", caps, NULL);
if (!pipeline || !streammux) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* Finally render the osd output */
#ifdef PLATFORM_TEGRA
transform = gst_element_factory_make ("nvegltransform", "nvegl-transform");
#endif
sink = gst_element_factory_make ("fakesink", "fake-sink");
g_object_set (G_OBJECT (sink), "host", "localhost", "port", 4935, NULL);
if (!source || !h264parser || !decoder || !sink || !rtppay) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
#ifdef PLATFORM_TEGRA
if(!transform) {
g_printerr ("One tegra element could not be created. Exiting.\n");
return -1;
}
#endif
/* we set the input filename to the source element */
g_object_set (G_OBJECT (source), "location", argv[1], "protocols", GST_RTSP_LOWER_TRANS_TCP, NULL);
g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",
MUXER_OUTPUT_HEIGHT, "batch-size", 1,
"batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* Set up the pipeline */
/* we add all elements into the pipeline */
#ifdef PLATFORM_TEGRA
gst_bin_add_many (GST_BIN (pipeline),
source, rtppay, h264parser, decoder, streammux, nvvidconv, caps_filter, transform, sink, NULL);
#else
gst_bin_add_many (GST_BIN (pipeline),
source, rtppay, h264parser, decoder, streammux, nvvidconv, caps_filter, sink, NULL);
#endif
g_signal_connect(source, "pad-added", G_CALLBACK(cb_new_rtspsrc_pad), rtppay);
GstPad *sinkpad, *srcpad;
gchar pad_name_sink[16] = "sink_0";
gchar pad_name_src[16] = "src";
sinkpad = gst_element_get_request_pad (streammux, pad_name_sink);
if (!sinkpad) {
g_printerr ("Streammux request sink pad failed. Exiting.\n");
return -1;
}
srcpad = gst_element_get_static_pad (decoder, pad_name_src);
if (!srcpad) {
g_printerr ("Decoder request src pad failed. Exiting.\n");
return -1;
}
if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
g_printerr ("Failed to link decoder to stream muxer. Exiting.\n");
return -1;
}
gst_object_unref (sinkpad);
gst_object_unref (srcpad);
/* we link the elements together */
/* file-source -> h264-parser -> nvh264-decoder ->
* nvinfer -> nvvidconv -> nvosd -> video-renderer */
if (!gst_element_link_many (rtppay, h264parser, decoder, NULL)) {
g_printerr ("Elements could not be linked: 1. Exiting.\n");
return -1;
}
#ifdef PLATFORM_TEGRA
if (!gst_element_link_many (streammux, nvvidconv, caps_filter, transform, sink, NULL)) {
g_printerr ("Elements could not be linked: 2. Exiting.\n");
return -1;
}
#else
if (!gst_element_link_many (streammux, nvvidconv, caps_filter, sink, NULL)) {
g_printerr ("Elements could not be linked: 2. Exiting.\n");
return -1;
}
#endif
/* Lets add probe to get informed of the meta data generated, we add probe to
* the sink pad of the osd element, since by that time, the buffer would have
* had got all the metadata. */
sink_sink_pad = gst_element_get_static_pad (sink, "sink");
if (!sink_sink_pad)
g_print ("Unable to get sink pad\n");
else
gst_pad_add_probe (sink_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
sink_sink_pad_buffer_probe, NULL, NULL);
/* Set the pipeline to "playing" state */
g_print ("Now playing: %s\n", argv[1]);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* ZMQ
* The first message is "0" and signals start of batch
*/
//sender.bind("tcp://*:3000");
//zmq_bind(publisher, "tcp://*:3000");
zmq_bind(publisher, argv[2]);
/* Wait till pipeline encounters an error or EOS */
g_print ("Running...\n");
g_main_loop_run (loop);
/* ZMQ */
zmq_close (publisher);
zmq_ctx_destroy (context);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}