Jetson_multimedia_api NvBufferMem Transform to opencv mat

I use v4l2 input signal. I want use openv in \jetson_multimedia_api\samples\12_camera_v4l2_cuda.
How can I convert YUY420 NvBuffer to opencv mat?
This is my code,but it is not work.

static bool
start_capture(context_t * ctx)
{
struct sigaction sig_action;
struct pollfd fds[1];
NvBufferTransformParams transParams;

/* Register a shuwdown handler to ensure
   a clean shutdown if user types <ctrl+c> */
sig_action.sa_handler = signal_handle;
sigemptyset(&sig_action.sa_mask);
sig_action.sa_flags = 0;
sigaction(SIGINT, &sig_action, NULL);

if (ctx->cam_pixfmt == V4L2_PIX_FMT_MJPEG)
    ctx->jpegdec = NvJPEGDecoder::createJPEGDecoder("jpegdec");

/* Init the NvBufferTransformParams */
memset(&transParams, 0, sizeof(transParams));
transParams.transform_flag = NVBUFFER_TRANSFORM_FILTER | NVBUFFER_TRANSFORM_FLIP;
transParams.transform_filter = NvBufferTransform_Filter_Nicest;						// NvBufferTransform_Filter_Smart
transParams.transform_flip = NvBufferTransform_None;								// NvBufferTransform_Rotate90

/* Enable render profiling information */
ctx->renderer->enableProfiling();

fds[0].fd = ctx->cam_fd;
fds[0].events = POLLIN;
/* Wait for camera event with timeout = 5000 ms */
while (poll(fds, 1, 5000) > 0 && !quit)
{
    if (fds[0].revents & POLLIN) {
        struct v4l2_buffer v4l2_buf;

        /* Dequeue a camera buff */
        memset(&v4l2_buf, 0, sizeof(v4l2_buf));
        v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if (ctx->capture_dmabuf)
            v4l2_buf.memory = V4L2_MEMORY_DMABUF;
        else
            v4l2_buf.memory = V4L2_MEMORY_MMAP;
        if (ioctl(ctx->cam_fd, VIDIOC_DQBUF, &v4l2_buf) < 0)
            ERROR_RETURN("Failed to dequeue camera buff: %s (%d)",
                    strerror(errno), errno);

        ctx->frame++;

        /* Save the n-th frame to file */
        if (ctx->frame == ctx->save_n_frame)
            save_frame_to_file(ctx, &v4l2_buf);

        if (ctx->cam_pixfmt == V4L2_PIX_FMT_MJPEG) {
            int fd = 0;
            uint32_t width, height, pixfmt;
            unsigned int i = 0;
            unsigned int eos_search_size = MJPEG_EOS_SEARCH_SIZE;
            unsigned int bytesused = v4l2_buf.bytesused;
            uint8_t *p;

            /* v4l2_buf.bytesused may have padding bytes for alignment
               Search for EOF to get exact size */
            if (eos_search_size > bytesused)
                eos_search_size = bytesused;
            for (i = 0; i < eos_search_size; i++) {
                p =(uint8_t *)(ctx->g_buff[v4l2_buf.index].start + bytesused);
                if ((*(p-2) == 0xff) && (*(p-1) == 0xd9)) {
                    break;
                }
                bytesused--;
            }

            /* Decoding MJPEG frame */
            if (ctx->jpegdec->decodeToFd(fd, ctx->g_buff[v4l2_buf.index].start,
                bytesused, pixfmt, width, height) < 0)
                ERROR_RETURN("Cannot decode MJPEG");

            /* Convert the decoded buffer to YUV420P */
            if (-1 == NvBufferTransform(fd, ctx->render_dmabuf_fd,
                    &transParams))
                ERROR_RETURN("Failed to convert the buffer");
        } else {
            if (ctx->capture_dmabuf) {
                /* Cache sync for VIC operation since the data is from CPU */
                NvBufferMemSyncForDevice(ctx->g_buff[v4l2_buf.index].dmabuff_fd, 0,
                        (void**)&ctx->g_buff[v4l2_buf.index].start);
            } else {
                /* Copies raw buffer plane contents to an NvBuffer plane */
                Raw2NvBuffer(ctx->g_buff[v4l2_buf.index].start, 0,
                         ctx->cam_w, ctx->cam_h, ctx->g_buff[v4l2_buf.index].dmabuff_fd);
            }

            /*  Convert the camera buffer from YUV422 to YUV420P */
            if (-1 == NvBufferTransform(ctx->g_buff[v4l2_buf.index].dmabuff_fd, ctx->render_dmabuf_fd,
                        &transParams))
                ERROR_RETURN("Failed to convert the buffer");

            if (ctx->cam_pixfmt == V4L2_PIX_FMT_GREY) {
                if(!nvbuff_do_clearchroma(ctx->render_dmabuf_fd))
                    ERROR_RETURN("Failed to clear chroma");
            }
        }
        cuda_postprocess(ctx, ctx->render_dmabuf_fd);

        /* Preview */
		#if 1
			cv::Mat display_img;
			void *pdata = NULL;
			NvBufferMemMap(ctx->render_dmabuf_fd, 0, NvBufferMem_Read, &pdata);
			NvBufferMemSyncForCpu(ctx->render_dmabuf_fd, 0, &pdata);
			cv::Mat imgbuf = cv::Mat(ctx->cam_h + (ctx->cam_h >> 1),
									 ctx->cam_w,
									 CV_8UC1, pdata);
			cvtColor(imgbuf, display_img, cv::COLOR_YCrCb2BGR);
			NvBufferMemUnMap(ctx->render_dmabuf_fd, 0, &pdata);
			cv::imshow("img", display_img);
		#else
            ctx->renderer->render(ctx->render_dmabuf_fd);
		#endif

        /* Enqueue camera buffer back to driver */
        if (ioctl(ctx->cam_fd, VIDIOC_QBUF, &v4l2_buf))
            ERROR_RETURN("Failed to queue camera buffers: %s (%d)",
                    strerror(errno), errno);
    }
}

/* Print profiling information when streaming stops */
ctx->renderer->printProfilingStats();

if (ctx->cam_pixfmt == V4L2_PIX_FMT_MJPEG)
    delete ctx->jpegdec;

return true;

}

Hi,
Please convert the buffer to RGBA and map to cv::Mat. A patch for reference:
NVBuffer (FD) to opencv Mat - #6 by DaneLLL

./camera_v4l2_cuda -d /dev/video2 -s 1920x1080 -f YUYV -n 0
It is report this error
Gtk-Message: 17:57:46.493: Failed to load module “canberra-gtk-module”

This is my whole code

camera_v4l2_cuda.cpp (26.1 KB)

Hi,
The code looks OK. But you may need to add the line:

cv::imshow("img", picBGR);
+int key = cv::waitKey(1);
1 Like

Thanks.It works.