I want to read synchronized images (stereo) from two CSI-Cameras connected to my Jetson Nano B01 and then process them in OpenCV. It kind of works, but I do not get correct images but something like this:
I think there could be a problem with the image format (YUV420 instead of BGR), the mmap-operation or something else, but I can’t figure it out.
My code to get the images and send them to OpenCV (only for left image for clarity reasons):
bool StereoCameraInterface::getSyncedImages(cv::Mat *left, cv::Mat *right)
{
const uint64_t FIVE_SECONDS_IN_NANOSECONDS = 5000000000;
// Initialize EGL.
PROPAGATE_ERROR(g_display.initialize());
// Initialize the Argus camera provider.
UniqueObj<CameraProvider> cameraProvider(CameraProvider::create());
// Get the ICameraProvider interface from the global CameraProvider.
ICameraProvider *iCameraProvider = interface_cast<ICameraProvider>(cameraProvider);
// Get the camera devices.
std::vector<CameraDevice*> cameraDevices;
iCameraProvider->getCameraDevices(&cameraDevices);
if (cameraDevices.size() < 2)
ORIGINATE_ERROR("Must have at least 2 sensors available");
std::vector <CameraDevice*> lrCameras;
lrCameras.push_back(cameraDevices[0]); // Left Camera (the 1st camera will be used for AC)
lrCameras.push_back(cameraDevices[1]); // Right Camera
// Create the capture session, AutoControl will be based on what the 1st device sees.
UniqueObj<CaptureSession> captureSession(iCameraProvider->createCaptureSession(lrCameras));
ICaptureSession *iCaptureSession = interface_cast<ICaptureSession>(captureSession);
// Create stream settings object and set settings common to both streams.
UniqueObj<OutputStreamSettings> streamSettings(
iCaptureSession->createOutputStreamSettings(STREAM_TYPE_EGL));
IOutputStreamSettings *iStreamSettings = interface_cast<IOutputStreamSettings>(streamSettings);
IEGLOutputStreamSettings *iEGLStreamSettings =
interface_cast<IEGLOutputStreamSettings>(streamSettings);
iEGLStreamSettings->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iEGLStreamSettings->setResolution(STREAM_SIZE);
iEGLStreamSettings->setEGLDisplay(g_display.get());
// Create egl streams
PRODUCER_PRINT("Creating left stream.\n");
iStreamSettings->setCameraDevice(lrCameras[0]);
UniqueObj<OutputStream> streamLeft(iCaptureSession->createOutputStream(streamSettings.get()));
IEGLOutputStream *iStreamLeft = interface_cast<IEGLOutputStream>(streamLeft);
// Create consumer for left frame
Argus::UniqueObj<EGLStream::FrameConsumer> consumerLeft(
EGLStream::FrameConsumer::create(streamLeft.get()));
EGLStream::IFrameConsumer *iFrameConsumerLeft =
Argus::interface_cast<EGLStream::IFrameConsumer>(consumerLeft);
// Create a request
UniqueObj<Request> request(iCaptureSession->createRequest());
IRequest *iRequest = interface_cast<IRequest>(request);
Argus::Status statusLeft;
// Enable both streams in the request.
statusLeft = iRequest->enableOutputStream(streamLeft.get());
uint32_t requestId = iCaptureSession->capture(request.get());
EXIT_IF_NULL(requestId, "Failed to submit capture request");
/*
* Acquire a frame generated by the capture request, get the image from the frame
* and create OpenCV Mat
*/
// LEFT IMAGE:
Argus::UniqueObj<EGLStream::Frame> frameLeft(
iFrameConsumerLeft->acquireFrame(FIVE_SECONDS_IN_NANOSECONDS, &statusLeft));
EXIT_IF_NULL(frameLeft, "Failed to acquire left frame");
EGLStream::IFrame *iFrameLeft = Argus::interface_cast<EGLStream::IFrame>(frameLeft);
EXIT_IF_NULL(iFrameLeft, "Failed to get IFrame interface left");
EGLStream::Image *imageLeft = iFrameLeft->getImage();
EXIT_IF_NULL(imageLeft, "Failed to get Image from iFrame->getImage()");
// Shut down Argus.
cameraProvider.reset();
// create OpenCV Images
EGLStream::NV::IImageNativeBuffer *iImageNativeBufferLeft
= interface_cast<EGLStream::NV::IImageNativeBuffer>(imageLeft);
int fd_left = iImageNativeBufferLeft->createNvBuffer(STREAM_SIZE,
NvBufferColorFormat_YUV420, NvBufferLayout_Pitch, EGLStream::NV::Rotation::ROTATION_0, &statusLeft);
NvBufferParams params;
NvBufferGetParams(fd_left, ¶ms);
int fsizeLeft = params.height[0] * params.pitch[0];
char* data_mem_left;
data_mem_left = (char *)mmap(NULL, 1.5*fsizeLeft, PROT_READ | PROT_WRITE, MAP_SHARED, fd_left, params.offset[0]);
printf("%dx%d, pitch=%d offset=%d data_mem_left=%p\n", params.width[0], params.height[0],
params.pitch[0], params.offset[0], data_mem_left);
NvBufferDestroy(fd_left);
*left = cv::Mat(480, 640, CV_8UC3, *data_mem_left);
std::cout << "*left finished" << "\n" ;
}
And to display the image:
StereoCameraInterface interface;
interface.getSyncedImages(&left, &right);
cv::Mat left_bgr;
cv::cvtColor(left, left_bgr, cv::COLOR_YUV420p2RGBA );
cv::namedWindow("CSI Camera", cv::WINDOW_AUTOSIZE);
cv::imshow("CSI Camera", left_bgr);
cv::waitKey(0);
If I don’t do the cvtColor, the whole image is just blue.
Any ideas what could be the problem?