So I’m trying to use a Jetson Nano, Video For Linux 2, OpenCV, and an IMX219 camera sensor just to capture a single image and use cv::imshow() to display the captured image to the user.
I used ChatGPT to generate the code so I can have somewhat of a framework since I have no idea where to start when it comes to the actual V4L2 documentation.
I’m just trying to capture a 1280x720 image in the YUYV pixel format as shown in the code below, but for some reason, the code always gets ‘hung up’ here
if (ioctl(fd, VIDIOC_DQBUF, &buffer) < 0)
{
perror("Failed to dequeue buffer");
munmap(buffer_start, buffer.length);
close(fd);
return 1;
}
Here’s the generated code in it’s entirely
#include <iostream>
#include <string>
#include <fcntl.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <linux/videodev2.h>
#include <errno.h>
#include <sys/mman.h>
#include <fstream>
#include <cstring>
// g++ -o v4l2_test v4l2_test.cpp
int main()
{
// Open the video device
int fd = open("/dev/video0", O_RDWR);
if (fd < 0)
{
perror("Failed to open device");
return 1;
}
// Query the device capabilities
struct v4l2_capability cap;
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
{
perror("Failed to get device capabilities");
close(fd);
return 1;
}
// Set the format
struct v4l2_format format;
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
// format.fmt.pix.pixelformat = V4L2_PIX_FMT_SRGGB10; // Replace with the actual FourCC code
format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
format.fmt.pix.width = 1280; // Adjust to your desired resolution
format.fmt.pix.height = 720; // Adjust to your desired resolution
if (ioctl(fd, VIDIOC_S_FMT, &format) < 0)
{
perror("Failed to set format");
close(fd);
return 1;
}
else
printf("Format set\r\n");
// Request buffers
struct v4l2_requestbuffers req;
req.count = 1;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (ioctl(fd, VIDIOC_REQBUFS, &req) < 0)
{
perror("Failed to request buffers");
close(fd);
return 1;
}
else
printf("Buffer set\r\n");
// Query the buffer
struct v4l2_buffer buffer;
memset(&buffer, 0, sizeof(buffer));
buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buffer.memory = V4L2_MEMORY_MMAP;
buffer.index = 0;
if (ioctl(fd, VIDIOC_QUERYBUF, &buffer) < 0)
{
perror("Failed to query buffer");
close(fd);
return 1;
}
else
printf("Buffer queried\r\n");
// Map the buffer
void *buffer_start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.offset);
if (buffer_start == MAP_FAILED)
{
perror("Failed to map buffer");
close(fd);
return 1;
}
else
printf("Buffer mapped\r\n");
// Queue the buffer
if (ioctl(fd, VIDIOC_QBUF, &buffer) < 0)
{
perror("Failed to queue buffer");
munmap(buffer_start, buffer.length);
close(fd);
return 1;
}
else
printf("Buffer queued agian\r\n");
// Start streaming
int type = buffer.type;
if (ioctl(fd, VIDIOC_STREAMON, &type) < 0)
{
perror("Failed to start streaming");
munmap(buffer_start, buffer.length);
close(fd);
return 1;
}
else
printf("Stream started\r\n");
// Dequeue the buffer
if (ioctl(fd, VIDIOC_DQBUF, &buffer) < 0)
{
perror("Failed to dequeue buffer");
munmap(buffer_start, buffer.length);
close(fd);
return 1;
}
else
printf("Buffer dequed\r\n");
// Save the raw image data
std::ofstream outFile("raw_frame.data", std::ios::binary);
outFile.write((char *)buffer_start, buffer.bytesused);
outFile.close();
// Stop streaming
if (ioctl(fd, VIDIOC_STREAMOFF, &type) < 0)
{
perror("Failed to stop streaming");
}
// Unmap the buffer and close device
munmap(buffer_start, buffer.length);
close(fd);
std::cout << "Image captured and saved as captured_frame.ppm" << std::endl;
return 0;
}
For what it’s worth, none of the perrors() get called and I’m not sure where to start troubleshooting at.