Hello,
Shortly:
- I have two CSI cameras and Jetson TX1;
- I have tried sycnhSensor example - works fine;
- I have made an OpenCVConsumer, using some ideas other people provided in the forum. It works for the first 400 frames.
Code example:
while (true){
Argus::Status status;
/* Acquire the frame generated by the capture request */
UniqueObj<EGLStream::Frame> frame_l (m_FrameConsumer_l->acquireFrame(1000000000, &status));
if (status != STATUS_OK) CONSUMER_PRINT("Failed to acquire left frame");
UniqueObj<EGLStream::Frame> frame_r (m_FrameConsumer_r->acquireFrame(1000000000, &status));
if (status != STATUS_OK) CONSUMER_PRINT("Failed to acquire right frame");
EGLStream::IFrame *iFrame_l = interface_cast<EGLStream::IFrame>(frame_l);
EGLStream::IFrame *iFrame_r = interface_cast<EGLStream::IFrame>(frame_r);
if(!iFrame_l || !iFrame_r) break;
CONSUMER_PRINT("Acquired Left Frame: %llu, time %llu\n",
static_cast<unsigned long long>(iFrame_l->getNumber()),
static_cast<unsigned long long>(iFrame_l->getTime()));
CONSUMER_PRINT("Acquired Right Frame: %llu, time %llu\n",
static_cast<unsigned long long>(iFrame_r->getNumber()),
static_cast<unsigned long long>(iFrame_r->getTime()));
/* Get the image from the frame */
EGLStream::Image *image_l = iFrame_l->getImage();
EGLStream::Image *image_r = iFrame_r->getImage();
if(!image_l || !image_r) CONSUMER_PRINT("Failed to get the images");
/* Create an NvBuffer */
EGLStream::NV::IImageNativeBuffer *iImageNativeBuffer_l = interface_cast<EGLStream::NV::IImageNativeBuffer>(image_l);
if(!iImageNativeBuffer_l) CONSUMER_PRINT("Failed to create an IImageNativeBuffer");
EGLStream::NV::IImageNativeBuffer *iImageNativeBuffer_r = interface_cast<EGLStream::NV::IImageNativeBuffer>(image_r);
if(!iImageNativeBuffer_r) CONSUMER_PRINT("Failed to create an IImageNativeBuffer");
int dmabuf_fd_l = iImageNativeBuffer_l->createNvBuffer(STREAM_SIZE, NvBufferColorFormat_YUV420, NvBufferLayout_Pitch, &status);
if (status != STATUS_OK) CONSUMER_PRINT("Failed to create a native buffer");
int dmabuf_fd_r = iImageNativeBuffer_r->createNvBuffer(STREAM_SIZE, NvBufferColorFormat_YUV420, NvBufferLayout_Pitch, &status);
if (status != STATUS_OK) CONSUMER_PRINT("Failed to create a native buffer");
NvBufferParams params_l, params_r;
int ret = NvBufferGetParams(dmabuf_fd_l, ¶ms_l);
if (ret < 0) CONSUMER_PRINT("Failed to obtain params");
ret = NvBufferGetParams(dmabuf_fd_r, ¶ms_r);
if (ret < 0) CONSUMER_PRINT("Failed to obtain params");
int i = 0;
uint8_t* data_mem_l = static_cast<uint8_t*>(mmap(NULL, params_l.pitch[i] * params_l.height[i], PROT_READ, MAP_SHARED, dmabuf_fd_l, params_l.offset[i]));
uint8_t* data_mem_r = static_cast<uint8_t*>(mmap(NULL, params_r.pitch[i] * params_r.height[i], PROT_READ, MAP_SHARED, dmabuf_fd_r, params_r.offset[i]));
if (data_mem_r == MAP_FAILED)
printf("mmap failed : %s\n", strerror(errno));
const cv::Mat left = cv::Mat (params_l.height[i], params_l.width[i], CV_8UC1, data_mem_l, params_l.pitch[i]);
const cv::Mat right = cv::Mat (params_r.height[i], params_r.width[i], CV_8UC1, data_mem_r, params_r.pitch[i]);
cv::imshow("left", left);
cv::imshow("right", right);
cv::waitKey(1);
/* Destroy NvBuffers */
ret = NvBufferDestroy(dmabuf_fd_l);
if (ret < 0) CONSUMER_PRINT("Failed to Destroy buff");
ret = NvBufferDestroy(dmabuf_fd_r);
if (ret < 0) CONSUMER_PRINT("Failed to Destroy buff");
}
So I have tried various resolutions and pixel formats - it works fine for the first 400 frames, later I get the following error:
NvRmChannelSubmit: NvError_IoctlFailed with error code 22
NvRmPrivFlush: NvRmChannelSubmit failed (err = 196623, SyncPointIdx = 56, SynchPointValue = 0)
I have noticed that it only happens if mmap is executed(uncommented).