I capture the 4k video on mipi0-3 rgb and send it to the drm.
I find that the color is changed.
Here is my test code
bool init_camera()
{
struct v4l2_format fmt;
/* Open camera device */
cam_fd = open("/dev/video0", O_RDWR);
if (cam_fd == -1)
ERROR_RETURN("Failed to open camera device: %s (%d)", strerror(errno), errno);
/* Set camera output format */
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = width;
fmt.fmt.pix.height = height;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB32;
fmt.fmt.pix.field = V4L2_FIELD_ANY;
if (ioctl(cam_fd, VIDIOC_S_FMT, &fmt) < 0)
ERROR_RETURN("Failed to set camera output format: %s (%d)", strerror(errno), errno);
/* Get the real format in case the desired is not supported */
memset(&fmt, 0, sizeof fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(cam_fd, VIDIOC_G_FMT, &fmt) < 0)
ERROR_RETURN("Failed to get camera output format: %s (%d)", strerror(errno), errno);
struct v4l2_streamparm streamparm;
memset(&streamparm, 0x00, sizeof(struct v4l2_streamparm));
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ioctl(cam_fd, VIDIOC_G_PARM, &streamparm);
return true;
}
static bool request_camera_buff(int buffers_num)
{
struct v4l2_requestbuffers rb;
memset(&rb, 0, sizeof(rb));
rb.count = buffers_num;
rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
rb.memory = V4L2_MEMORY_DMABUF;
if (ioctl(cam_fd, VIDIOC_REQBUFS, &rb) < 0)
ERROR_RETURN("Failed to request v4l2 buffers: %s (%d)", strerror(errno), errno);
if (rb.count != buffers_num)
ERROR_RETURN("V4l2 buffer number is not as desired");
for (unsigned int index = 0; index < buffers_num; index++)
{
struct v4l2_buffer buf;
memset(&buf, 0, sizeof buf);
buf.index = index;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_DMABUF;
if (ioctl(cam_fd, VIDIOC_QUERYBUF, &buf) < 0)
ERROR_RETURN("Failed to query buff: %s (%d)", strerror(errno), errno);
/* TODO: add support for multi-planer
Enqueue empty v4l2 buff into camera capture plane */
buf.m.fd = cam_capture_dma_fd[index];
if (ioctl(cam_fd, VIDIOC_QBUF, &buf) < 0)
ERROR_RETURN("Failed to enqueue buffers: %s (%d)\n", strerror(errno), errno);
}
return true;
}
static bool start_stream()
{
enum v4l2_buf_type type;
/* Start v4l2 streaming */
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(cam_fd, VIDIOC_STREAMON, &type) < 0)
ERROR_RETURN("Failed to start streaming: %s (%d)", strerror(errno), errno);
usleep(200);
printf("Camera video streaming on ...\n");
return true;
}
bool start_camera()
{
NvBufferCreateParams input_params = {0};
input_params.payloadType = NvBufferPayload_SurfArray;
input_params.width = width;
input_params.height = height;
input_params.layout = NvBufferLayout_Pitch;
input_params.colorFormat = NvBufferColorFormat_ARGB32;
input_params.nvbuf_tag = NvBufferTag_CAMERA;
/* Create buffer and provide it with camera */
for (unsigned int index = 0; index < CAMERA_BUFFERS_NUM; index++)
{
int fd;
NvBufferParams params = {0};
if (-1 == NvBufferCreateEx(&fd, &input_params))
ERROR_RETURN("Failed to create NvBuffer");
cam_capture_dma_fd[index] = fd;
if (-1 == NvBufferGetParams(fd, ¶ms))
ERROR_RETURN("Failed to get NvBuffer parameters");
}
if (!request_camera_buff(CAMERA_BUFFERS_NUM))
ERROR_RETURN("Failed to set up camera buff");
start_stream();
return true;
}
bool init_drm()
{
int ret = 0;
int error = 0;
uint32_t i;
bool eos = false;
struct drm_tegra_hdr_metadata_smpte_2086 metadata;
drm_renderer = NvDrmRenderer::createDrmRenderer("renderer0",
width, height, 0, 0, 0, 0, metadata, false);
drm_renderer->setFPS(FRAMERATE);
NvBufferCreateParams input_params = {0};
input_params.payloadType = NvBufferPayload_SurfArray;
input_params.width = width;
input_params.height = height;
input_params.layout = NvBufferLayout_Pitch;
input_params.colorFormat = NvBufferColorFormat_NV12;
input_params.nvbuf_tag = NvBufferTag_VIDEO_DEC;
for (size_t i = 0; i < DRM_BUF_SIZE; i++)
{
NvBufferCreateEx(&drm_buf_fd[i], &input_params);
}
return true;
}
int main(int argc, char const *argv[])
{
int ret = -1;
if (!init_drm())
{
LOG( "Could not init decoder");
exit(-1);
}
if (!init_camera())
{
LOG( "Could not init camera");
exit(-1);
}
if (!start_camera())
{
LOG( "Could not start camera");
exit(-1);
}
NvBufferTransformParams transParams = {0};
transParams.transform_flag = NVBUFFER_TRANSFORM_FILTER | NVBUFFER_TRANSFORM_FLIP;
transParams.transform_filter = NvBufferTransform_Filter_Smart;
static int capture_cnt = 0;
/* Decoder capture loop */
while (1)
{
fd_set fds;
struct timeval tv;
FD_ZERO (&fds);
FD_SET(cam_fd, &fds);
tv.tv_sec = 1;
tv.tv_usec = 0;
ret = select(cam_fd + 1, &fds, nullptr, nullptr, &tv);
if (ret < 0)
{
LOG("%m");
break;
}
else if (ret==0)
{
LOG("time out");
continue;
}
{
struct v4l2_buffer v4l2_buf_cam;
/* Dequeue a camera buff */
memset(&v4l2_buf_cam, 0, sizeof(v4l2_buf_cam));
v4l2_buf_cam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
v4l2_buf_cam.memory = V4L2_MEMORY_DMABUF;
if (ioctl(cam_fd, VIDIOC_DQBUF, &v4l2_buf_cam) < 0)
ERROR_RETURN("Failed to dequeue camera buff: %s (%d)", strerror(errno), errno);
struct v4l2_buffer v4l2_buf;
struct v4l2_plane planes[MAX_PLANES];
NvBufferRect src_rect, dest_rect;
src_rect.top = 0;
src_rect.left = 0;
src_rect.width = 3840;
src_rect.height = 2160;
dest_rect.top = 0;
dest_rect.left = 0;
dest_rect.width = 3840;
dest_rect.height = 2160;
NvBufferTransformParams transform_params;
memset(&transform_params, 0, sizeof(transform_params));
/* Indicates which of the transform parameters are valid. */
transform_params.transform_flag = NVBUFFER_TRANSFORM_FILTER;
transform_params.transform_flip = NvBufferTransform_None;
transform_params.transform_filter = NvBufferTransform_Filter_Nearest;
transform_params.src_rect = src_rect;
transform_params.dst_rect = dest_rect;
int fd = -1;
if (capture_cnt < DRM_BUF_SIZE)
{
fd = drm_buf_fd[capture_cnt];
}
else
{
fd = drm_renderer->dequeBuffer();
}
ret = NvBufferTransform(cam_capture_dma_fd[capture_cnt % CAMERA_BUFFERS_NUM], fd, &transform_params);
drm_renderer->enqueBuffer(fd);
if (capture_cnt%60==0)
{
LOG("%d %d\n",capture_cnt);
}
if (ioctl(cam_fd, VIDIOC_QBUF, &v4l2_buf_cam))
ERROR_RETURN("Failed to queue camera buffers: %s (%d)", strerror(errno), errno);
capture_cnt++;
}
}
return 0;
}
I capture a picture from the command v4l2-ctl -d /dev/video0 --set-fmt-video=width=3840,height=2160,pixelformat=RGB32 --set-ctrl bypass_mode=0 --stream-mmap --stream-count=1 --stream-to=test.rgb
The picture is fine.
test.rgb (31.6 MB)
You can play it and you’d better play it without scaling that means pixel is 3840*2160
./ffplay -video_size 3840x2160 -pixel_format rgb32 test.rgb