Hi,
Please check if you can apply this patch and run successfully:
diff --git a/multimedia_api/ll_samples/samples/12_camera_v4l2_cuda/camera_v4l2_cuda.cpp b/multimedia_api/ll_samples/samples/12_camera_v4l2_cuda/camera_v4l2_cuda.cpp
index 0ec23a4..c63acca 100644
--- a/multimedia_api/ll_samples/samples/12_camera_v4l2_cuda/camera_v4l2_cuda.cpp
+++ b/multimedia_api/ll_samples/samples/12_camera_v4l2_cuda/camera_v4l2_cuda.cpp
@@ -46,6 +46,8 @@
#define MJPEG_EOS_SEARCH_SIZE 4096
+#define OUTPLANE_BUFNUM 6
+static int outplane_fd[OUTPLANE_BUFNUM];
static bool quit = false;
using namespace std;
@@ -330,6 +332,48 @@ display_initialize(context_t * ctx)
return true;
}
+
+
+static bool
+encoder_initialize(context_t * ctx)
+{
+ ctx->enc = NvVideoEncoder::createVideoEncoder("enc0");
+ if (ctx->enc == NULL)
+ ERROR_RETURN("Failed to create video encoder");
+
+ if (ctx->enc->setCapturePlaneFormat(V4L2_PIX_FMT_H264, ctx->cam_w,
+ ctx->cam_h, 2 * 1024 * 1024) < 0)
+ ERROR_RETURN("Failed to set up ENC capture plane format");
+
+ if (ctx->enc->setOutputPlaneFormat(V4L2_PIX_FMT_YUV420M, ctx->cam_w,
+ ctx->cam_h) < 0)
+ ERROR_RETURN("Failed to set up ENC output plane format");
+
+ if (ctx->enc->setBitrate(4<<20) < 0)
+ ERROR_RETURN("Failed to set up ENC bitrate");
+
+ if (ctx->enc->setProfile(V4L2_MPEG_VIDEO_H264_PROFILE_HIGH) < 0)
+ ERROR_RETURN("Failed to set up ENC profile");
+
+ if (ctx->enc->setLevel(V4L2_MPEG_VIDEO_H264_LEVEL_5_0) < 0)
+ ERROR_RETURN("Failed to set up ENC level");
+
+ if (ctx->enc->setRateControlMode(V4L2_MPEG_VIDEO_BITRATE_MODE_VBR) < 0)
+ ERROR_RETURN("Failed to set up ENC rate control mode");
+
+ if (ctx->enc->output_plane.reqbufs(V4L2_MEMORY_DMABUF, OUTPLANE_BUFNUM))
+ ERROR_RETURN("Failed to set up ENC output plane");
+
+ if (ctx->enc->capture_plane.setupPlane(V4L2_MEMORY_MMAP, 6, true, false) < 0)
+ ERROR_RETURN("Failed to set up ENC capture plane");
+
+ ctx->enc->subscribeEvent(V4L2_EVENT_EOS,0,0);
+
+ return true;
+}
+
+
+
static bool
init_components(context_t * ctx)
{
@@ -339,6 +383,9 @@ init_components(context_t * ctx)
if (!display_initialize(ctx))
ERROR_RETURN("Failed to initialize display");
+ if (!encoder_initialize(ctx))
+ ERROR_RETURN("Failed to initialize encoder");
+
INFO("Initialize v4l2 components successfully");
return true;
}
@@ -539,12 +586,57 @@ start_stream(context_t * ctx)
ERROR_RETURN("Failed to start streaming: %s (%d)",
strerror(errno), errno);
+ // Start ENC output plane
+ if (ctx->enc->output_plane.setStreamStatus(true) < 0)
+ ERROR_RETURN("Failed to start ENC output plane streaming");
+
+ // Start ENC capture plane
+ if (ctx->enc->capture_plane.setStreamStatus(true) < 0)
+ ERROR_RETURN("Failed to start ENC capture plane streaming");
+
usleep(200);
INFO("Camera video streaming on ...");
return true;
}
+static bool
+enc_capture_dqbuf_thread_callback(struct v4l2_buffer *v4l2_buf,
+ NvBuffer * buffer, NvBuffer * shared_buffer,
+ void *arg)
+{
+ context_t *ctx = (context_t *) arg;
+ if (v4l2_buf == NULL)
+ {
+ printf("Got nullptr \n");
+ return false;
+ }
+
+ if(v4l2_buf->flags & V4L2_BUF_FLAG_LAST)
+ {
+ struct v4l2_event ev;
+ int ret = 0;
+ memset(&ev,0,sizeof(struct v4l2_event));
+ ret = ctx->enc->dqEvent(ev,1000);
+ if (ret < 0)
+ printf("Error in dqEvent \n");;
+ if(ev.type == V4L2_EVENT_EOS)
+ {
+ printf("Got EOS, exiting...\n");
+ return false;
+ }
+ }
+
+ printf("encoded frame size %d \n", buffer->planes[0].bytesused);
+ if (ctx->enc->capture_plane.qBuffer(*v4l2_buf, NULL) < 0)
+ {
+ ctx->enc->abort();
+ ERROR_RETURN("Failed to queue buffer on ENC capture plane");
+ }
+
+ return true;
+}
+
static void
signal_handle(int signum)
{
@@ -592,6 +684,25 @@ start_capture(context_t * ctx)
if (ctx->cam_pixfmt == V4L2_PIX_FMT_MJPEG)
ctx->jpegdec = NvJPEGDecoder::createJPEGDecoder("jpegdec");
+ ctx->enc->capture_plane.setDQThreadCallback(enc_capture_dqbuf_thread_callback);
+ ctx->enc->capture_plane.startDQThread(ctx);
+ // Enqueue all the empty capture plane buffers
+ for (uint32_t i = 0; i < ctx->enc->capture_plane.getNumBuffers(); i++)
+ {
+ struct v4l2_buffer v4l2_buf;
+ struct v4l2_plane planes[MAX_PLANES];
+
+ memset(&v4l2_buf, 0, sizeof(v4l2_buf));
+ memset(planes, 0, MAX_PLANES * sizeof(struct v4l2_plane));
+
+ v4l2_buf.index = i;
+ v4l2_buf.m.planes = planes;
+
+ if (ctx->enc->capture_plane.qBuffer(v4l2_buf, NULL) < 0)
+ ERROR_RETURN("Failed to queue buffer on ENC capture plane");
+ }
+
+
/* Init the NvBufferTransformParams */
memset(&transParams, 0, sizeof(transParams));
transParams.transform_flag = NVBUFFER_TRANSFORM_FILTER;
@@ -600,6 +711,15 @@ start_capture(context_t * ctx)
/* Enable render profiling information */
ctx->renderer->enableProfiling();
+ int bufferIndex = 0;
+ NvBufferCreateParams input_params = {0};
+ input_params.payloadType = NvBufferPayload_SurfArray;
+ input_params.width = ctx->cam_w;
+ input_params.height = ctx->cam_h;
+ input_params.layout = NvBufferLayout_Pitch;
+ input_params.colorFormat = get_nvbuff_color_fmt(V4L2_PIX_FMT_YUV420M);
+ input_params.nvbuf_tag = NvBufferTag_VIDEO_ENC;
+
fds[0].fd = ctx->cam_fd;
fds[0].events = POLLIN;
/* Wait for camera event with timeout = 5000 ms */
@@ -625,6 +745,59 @@ start_capture(context_t * ctx)
if (ctx->frame == ctx->save_n_frame)
save_frame_to_file(ctx, &v4l2_buf);
+
+#if 1
+ {
+ // Cache sync for VIC operation
+ NvBufferMemSyncForDevice(ctx->g_buff[v4l2_buf.index].dmabuff_fd, 0,
+ (void**)&ctx->g_buff[v4l2_buf.index].start);
+
+ NvBuffer *buffer;
+ int fd = -1;
+
+ struct v4l2_buffer enc_buf;
+ struct v4l2_plane planes[MAX_PLANES];
+
+ memset(&enc_buf, 0, sizeof(enc_buf));
+ memset(planes, 0, MAX_PLANES * sizeof(struct v4l2_plane));
+
+ enc_buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+ enc_buf.memory = V4L2_MEMORY_DMABUF;
+
+ enc_buf.m.planes = planes;
+
+ // Check if we need dqBuffer first
+ if (bufferIndex < OUTPLANE_BUFNUM &&
+ ctx->enc->output_plane.getNumQueuedBuffers() <
+ ctx->enc->output_plane.getNumBuffers())
+ {
+ // The queue is not full, no need to dqBuffer
+ // Prepare buffer index for the following qBuffer
+ enc_buf.index = bufferIndex;
+ // Create Render buffer
+ if (-1 == NvBufferCreateEx(&fd, &input_params))
+ ERROR_RETURN("Failed to create NvBuffer");
+ outplane_fd[bufferIndex] = fd;
+ bufferIndex++;;
+ }
+ else
+ {
+ // Output plane full or max outstanding number reached
+ ctx->enc->output_plane.dqBuffer(enc_buf, &buffer, NULL, 10);
+ fd = enc_buf.m.planes[0].m.fd;
+ }
+
+ if (-1 == NvBufferTransform(ctx->g_buff[v4l2_buf.index].dmabuff_fd, fd,
+ &transParams))
+ ERROR_RETURN("Failed to convert the buffer");
+ // Push the frame into V4L2.
+ enc_buf.m.planes[0].m.fd = fd;
+ enc_buf.m.planes[0].bytesused = 1; // byteused must be non-zero
+ ctx->enc->output_plane.qBuffer(enc_buf, NULL);
+
+ ctx->renderer->render(fd);
+ }
+#else
if (ctx->cam_pixfmt == V4L2_PIX_FMT_MJPEG) {
int fd = 0;
uint32_t width, height, pixfmt;
@@ -679,6 +852,7 @@ start_capture(context_t * ctx)
/* Preview */
ctx->renderer->render(ctx->render_dmabuf_fd);
+#endif
/* Enqueue camera buffer back to driver */
if (ioctl(ctx->cam_fd, VIDIOC_QBUF, &v4l2_buf))
@@ -707,6 +881,9 @@ stop_stream(context_t * ctx)
ERROR_RETURN("Failed to stop streaming: %s (%d)",
strerror(errno), errno);
+ ctx->enc->setEncoderCommand(V4L2_ENC_CMD_STOP, 1);
+ ctx->enc->capture_plane.waitForDQThread(2000);
+
INFO("Camera video streaming off ...");
return true;
}
@@ -715,7 +892,7 @@ int
main(int argc, char *argv[])
{
context_t ctx;
- int error = 0;
+ int error = 0, i;
set_defaults(&ctx);
@@ -767,6 +944,12 @@ cleanup:
NvBufferDestroy(ctx.render_dmabuf_fd);
+ for (i = 0; i < OUTPLANE_BUFNUM ; i++)
+ NvBufferDestroy(outplane_fd[i]);
+
+ if (ctx.enc != NULL)
+ delete ctx.enc;
+
if (error)
printf("App run failed\n");
else
diff --git a/multimedia_api/ll_samples/samples/12_camera_v4l2_cuda/camera_v4l2_cuda.h b/multimedia_api/ll_samples/samples/12_camera_v4l2_cuda/camera_v4l2_cuda.h
index 4155cd2..d7a5047 100644
--- a/multimedia_api/ll_samples/samples/12_camera_v4l2_cuda/camera_v4l2_cuda.h
+++ b/multimedia_api/ll_samples/samples/12_camera_v4l2_cuda/camera_v4l2_cuda.h
@@ -28,6 +28,7 @@
#include <queue>
#include "NvJpegDecoder.h"
+#include "NvVideoEncoder.h"
#define V4L2_BUFFERS_NUM 4
@@ -89,6 +90,7 @@ typedef struct
/* MJPEG decoding */
NvJPEGDecoder *jpegdec;
+ NvVideoEncoder *enc;
/* Verbose option */
bool enable_verbose;
diff --git a/multimedia_api/ll_samples/samples/unittest_samples/camera_unit_sample/camera_unit_sample.cpp b/multimedia_api/ll_samples/samples/unittest_samples/camera_unit_sample/camera_unit_sample.cpp
index c45c524..1a95091 100644
--- a/multimedia_api/ll_samples/samples/unittest_samples/camera_unit_sample/camera_unit_sample.cpp
+++ b/multimedia_api/ll_samples/samples/unittest_samples/camera_unit_sample/camera_unit_sample.cpp
@@ -991,6 +991,7 @@ int capture_proc (context_t &ctx, int argc, char const *argv[])
struct sigaction sig_action;
char camera_device[16];
char renderer[16];
+ int fd;
/* Register a shutdown handler to ensure
** a clean exit if <ctrl+c> is detected.
@@ -1068,6 +1069,11 @@ int capture_proc (context_t &ctx, int argc, char const *argv[])
ctx.fd = v4l2_open(camera_device, flags | O_RDWR);
CHECK_ERROR(ctx.fd == -1, "Error in opening camera device", cleanup);
+ fd = v4l2_open("/dev/video1", flags | O_RDWR);
+ CHECK_ERROR(fd == -1, "Error in opening camera device", cleanup);
+ cout << "open the second camera!!!" << endl;
+ v4l2_close(fd);
+
/* The Querycap Ioctl call queries the video capabilities
** of the opened node and checks for
** V4L2_CAP_VIDEO_CAPTURE_MPLANE capability on the device.
We verify it on r32.5.1/r32.6.1 and it runs well.