Hi ShaneLLL,
I am following 11_camera_object_identification example. I am looking to get pointer to the frame buffer.
In the code below is there a way we can go directly from “fd” to the pointer to the frame buffer ?
// Acquire a frame.
UniqueObj<Frame> frame(iFrameConsumer->acquireFrame());
IFrame *iFrame = interface_cast<IFrame>(frame);
if (!iFrame)
break;
// Get the IImageNativeBuffer extension interface and create the fd.
NV::IImageNativeBuffer *iNativeBuffer =
interface_cast<NV::IImageNativeBuffer>(iFrame->getImage());
if (!iNativeBuffer)
ORIGINATE_ERROR("IImageNativeBuffer not supported by Image.");
fd = iNativeBuffer->createNvBuffer(Size(ctx.width, ctx.height),
NvBufferColorFormat_YUV420,
NvBufferLayout_BlockLinear);
I was given a patch (for camera reading) using 11_camera_object_identification (https://devtalk.nvidia.com/default/topic/1010111/jetson-tx1/nvmm-memory/3) by DaneLLL. I tried that and it works.
I am trying to comprehend, following, please help with your explaination.
- Cameras are set to produce YUV_420_888
// Create the OutputStream.
PRODUCER_PRINT("Creating output stream\n");
UniqueObj<OutputStreamSettings> streamSettings(iCaptureSession->createOutputStreamSettings());
IOutputStreamSettings *iStreamSettings = interface_cast<IOutputStreamSettings>(streamSettings);
if (iStreamSettings)
{
iStreamSettings->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iStreamSettings->setResolution(Size(p_ctx->width, p_ctx->height));
}
- The converter is trying reading camera and trying to convert from RGBA to BRG. I am not able to understand how is camera giving out RGBA when (in the code above) the output pixel format is set to YUV420 ?
extern "C" void
opencv_img_processing(void *opencv_handler, uint8_t *pdata, int32_t width, int32_t height)
{
struct opencv_sample_handler *handler =
(struct opencv_sample_handler *) opencv_handler;
cv::Mat imgbuf = cv::Mat(height, width, CV_8UC4, pdata);
cv::Mat display_img;
if (handler->detector_busy == 0) {
//handler->detecting_mat = imgbuf.clone();
imgbuf.copyTo(handler->detecting_mat);
handler->detector_busy = 1;
}
struct timeval tp;
gettimeofday(&tp, NULL);
long start = tp.tv_sec * 1000 + tp.tv_usec / 1000;
cvtColor(imgbuf, display_img, CV_RGBA2BGR);
gettimeofday(&tp, NULL);
long end = tp.tv_sec * 1000 + tp.tv_usec / 1000;
handler->milliseconds = end - start;
if (1/*handler->result_update_flag*/)
{
ostringstream ss;
ss.str("");
ss << "FPS = " << std::fixed << std::setprecision(0)
<< 1000. / handler->milliseconds;
matPrint(display_img, 0, CV_RGB(255,0,0), ss.str());
#if 0
ss.str("");
ss << std::fixed << std::setprecision(2) << handler->first_prob << " - " << handler->first_result;
matPrint(display_img, 1, CV_RGB(255,0,0), ss.str());
ss.str("");
ss << std::fixed << std::setprecision(2) << handler->second_prob << " - " << handler->second_result;
matPrint(display_img, 2, CV_RGB(255,0,0), ss.str());
#endif
}
gettimeofday(&tp, NULL);
start = tp.tv_sec * 1000 + tp.tv_usec / 1000;
cv::imshow("img", display_img);
waitKey(1);
gettimeofday(&tp, NULL);
end = tp.tv_sec * 1000 + tp.tv_usec / 1000;
cout << "cvtColor took " << handler->milliseconds << "ms, ";
cout << "imshow took " << end - start << "ms" << endl;
return;
}