Hi Guys,
I am building an application which makes use of two cameras ( Sony IMX274) , each of which runs in a separate thread and fetches the frames from the respective cameras using ‘acquireFrame’ call. I have two TX1 boards - one is a development board and the other one is a carrier board. The development board has JetPack 3.0 (R24.2.1) and the carrier board has JetPack 3.1 ( R28.1) flashed .
My application seems to be running fine on the development board but it does not work satisfactorily on the carrier board as acquireFrame gets stuck (does not return) after a couple of calls. Also, I tried ‘argus’ samples such as multiSensor on the carrier board and they run fine. Please find the code which triggers both the camera threads below and the code which executes the acquireFrame call in the threads.
Code which instantiates 2 cameras and creates a thread for each cameras and runs it
static uint32_t CAPTURE_TIME = 10; // In seconds.
static const uint32_t NUMBER_SESSIONS = 2;
static const int DEFAULT_FPS = 30;
static const int width = 1920;
static const int height = 1080;
static const int lastFrameCount = CAPTURE_TIME * DEFAULT_FPS;
bool execute()
{
using namespace Argus;
// Initialize the preview window and EGL display.
Window &window = Window::getInstance();
PROPAGATE_ERROR(g_display.initialize(window.getEGLNativeDisplay()));
// Initialize the Argus camera provider.
UniqueObj<CameraProvider> cameraProvider(CameraProvider::create());
// Get the ICameraProvider interface from the global CameraProvider.
ICameraProvider *iCameraProvider = interface_cast<ICameraProvider>(cameraProvider);
if (!iCameraProvider)
ORIGINATE_ERROR("Failed to get ICameraProvider interface");
// Get the camera devices.
std::vector<CameraDevice*> cameraDevices;
iCameraProvider->getCameraDevices(&cameraDevices);
CameraDevice *cameraDevice = cameraDevices[0];
ICameraProperties *iCameraProperties = interface_cast<ICameraProperties>(cameraDevice);
if (!iCameraProperties)
ORIGINATE_ERROR("Failed to get ICameraProperties interface");
std::vector <CameraDevice*> lrCameras;
lrCameras.push_back(cameraDevices[0]); // Left Camera (the 1st camera will be used for AC)
// Create the capture session, AutoControl will be based on what the 1st device sees.
UniqueObj<CaptureSession> captureSessionCamera0(iCameraProvider->createCaptureSession(lrCameras[0]));
ICaptureSession *iCaptureSessionCamera0 = interface_cast<ICaptureSession>(captureSessionCamera0);
if (!iCaptureSessionCamera0)
ORIGINATE_ERROR("Failed to get capture session interface");
// Create stream settings object and set settings common to both streams.
UniqueObj<OutputStreamSettings> streamSettingsCamera0(iCaptureSessionCamera0->createOutputStreamSettings());
IOutputStreamSettings *iStreamSettingsCamera0 = interface_cast<IOutputStreamSettings>(streamSettingsCamera0);
if (!iStreamSettingsCamera0)
ORIGINATE_ERROR("Failed to create OutputStreamSettings");
iStreamSettingsCamera0->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iStreamSettingsCamera0->setEGLDisplay(g_display.get());
iStreamSettingsCamera0->setResolution(ARGUSSIZE(width,height ));
// Create egl streams
iStreamSettingsCamera0->setCameraDevice(lrCameras[0]);
UniqueObj<OutputStream> streamLeft(iCaptureSessionCamera0->createOutputStream(streamSettingsCamera0.get()));
IStream *iStreamLeft = interface_cast<IStream>(streamLeft);
if (!iStreamLeft)
ORIGINATE_ERROR("Failed to create left stream");
UniqueObj<OutputStream> videoStreamCamera0(iCaptureSessionCamera0->createOutputStream(streamSettingsCamera0.get()));
IStream *iVideoStreamCamera0 = interface_cast<IStream>(videoStreamCamera0);
if (!iVideoStreamCamera0)
ORIGINATE_ERROR("Failed to create video stream");
aaCamCaptureThread aaCamCaptureLeft(streamLeft.get(), NULL,0, NULL,lastFrameCount,NULL);
PROPAGATE_ERROR(aaCamCaptureLeft.initialize());
PROPAGATE_ERROR(aaCamCaptureLeft.waitRunning());
UniqueObj<Request> requestCamera0(iCaptureSessionCamera0->createRequest());
IRequest *iRequestCamera0 = interface_cast<IRequest>(requestCamera0);
if (!iRequestCamera0)
ORIGINATE_ERROR("Failed to create Request");
iRequestCamera0->enableOutputStream(streamLeft.get());
ISourceSettings *iSourceSettingsCamera0 = interface_cast<ISourceSettings>(iRequestCamera0->getSourceSettings());
if (!iSourceSettingsCamera0)
ORIGINATE_ERROR("Failed to get ISourceSettings interface");
iSourceSettingsCamera0->setFrameDurationRange(Argus::Range<uint64_t>(1e9/DEFAULT_FPS));
std::vector <CameraDevice*> lrCameras1;
lrCameras1.push_back(cameraDevices[1]); // Left Camera (the 1st camera will be used for AC)
// Create the capture session, AutoControl will be based on what the 1st device sees.
UniqueObj<CaptureSession> captureSessionCamera1(iCameraProvider->createCaptureSession(lrCameras1[0]));
ICaptureSession *iCaptureSessionCamera1 = interface_cast<ICaptureSession>(captureSessionCamera1);
if (!iCaptureSessionCamera1)
ORIGINATE_ERROR("Failed to get capture session interface");
// Create stream settings object and set settings common to both streams.
UniqueObj<OutputStreamSettings> streamSettingsCamera1(iCaptureSessionCamera1->createOutputStreamSettings());
IOutputStreamSettings *iStreamSettingsCamera1 = interface_cast<IOutputStreamSettings>(streamSettingsCamera1);
if (!iStreamSettingsCamera1)
ORIGINATE_ERROR("Failed to create OutputStreamSettings");
iStreamSettingsCamera1->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iStreamSettingsCamera1->setResolution(width,height ));
// Create egl streams
iStreamSettingsCamera1->setCameraDevice(lrCameras1[0]);
UniqueObj<OutputStream> streamRight(iCaptureSessionCamera1->createOutputStream(streamSettingsCamera1.get()));
IStream *iStreamRight = interface_cast<IStream>(streamRight);
if (!iStreamRight)
ORIGINATE_ERROR("Failed to create left stream");
UniqueObj<OutputStream> videoStreamCamera1(iCaptureSessionCamera1->createOutputStream(streamSettingsCamera1.get()));
IStream *iVideoStreamCamera1 = interface_cast<IStream>(videoStreamCamera1);
if (!iVideoStreamCamera1)
ORIGINATE_ERROR("Failed to create video stream");
Queue<aaFrameBuffer> inputFrameQ1;
Queue<int > inputFrameFdQ1;
Queue<int > camCapture2NewOCVConsumerMsgQ1;
Queue<aaFrameBuffer> calibFrameQ1;
Queue<int > calibFrameFdQ1;
aaCamCaptureThread aaCamCaptureRight(streamRight.get(), NULL,1, NULL,lastFrameCount,NULL);
PROPAGATE_ERROR(aaCamCaptureRight.initialize());
PROPAGATE_ERROR(aaCamCaptureRight.waitRunning());
UniqueObj<Request> requestCamera1(iCaptureSessionCamera1->createRequest());
IRequest *iRequestCamera1 = interface_cast<IRequest>(requestCamera1);
if (!iRequestCamera1)
ORIGINATE_ERROR("Failed to create Request");
iRequestCamera1->enableOutputStream(streamRight.get());
ISourceSettings *iSourceSettingsCamera1 = interface_cast<ISourceSettings>(iRequestCamera1->getSourceSettings());
if (!iSourceSettingsCamera1)
ORIGINATE_ERROR("Failed to get ISourceSettings interface");
iSourceSettingsCamera1->setFrameDurationRange(Argus::Range<uint64_t>(1e9/DEFAULT_FPS));
if (iCaptureSessionCamera0->repeat(requestCamera0.get()) != STATUS_OK)
ORIGINATE_ERROR("Failed to start repeat capture request for preview");
if (iCaptureSessionCamera1->repeat(requestCamera1.get()) != STATUS_OK)
ORIGINATE_ERROR("Failed to start repeat capture request for preview");
iCaptureSessionCamera0->stopRepeat();
iCaptureSessionCamera0->waitForIdle();
iStreamLeft->disconnect();
// Stop the capture requests and wait until they are complete.
iCaptureSessionCamera1->stopRepeat();
iCaptureSessionCamera1->waitForIdle();
iStreamRight->disconnect();
PROPAGATE_ERROR(aaCamCaptureLeft.shutdown());
PROPAGATE_ERROR(aaCamCaptureRight.shutdown());
cameraProvider.reset();
// Cleanup the EGL display
PROPAGATE_ERROR(g_display.cleanup());
return true;
}
The code running in each camera thread is given below:
bool aaCamCaptureThread::threadExecute()
{
IStream *iStream = interface_cast<IStream>(m_stream);
IFrameConsumer *iFrameConsumer = interface_cast<IFrameConsumer>(m_consumer);
Argus::Status status;
int ret;
// Wait until the producer has connected to the stream.
printf("Waiting until producer is connected...%x\n", m_stream);
if (iStream->waitUntilConnected() != STATUS_OK)
ORIGINATE_ERROR("Stream failed to connect.");
printf("Producer has connected; continuing. %x\n", iFrameConsumer);
while (m_currentFrame < (m_lastFrameCount-2))
{
// Acquire a Frame.
printf("Acquiring Frame %d Camera id : %d\n",m_currentFrame, m_camId);
UniqueObj<Frame> frame(iFrameConsumer->acquireFrame());
IFrame *iFrame = interface_cast<IFrame>(frame);
if (!iFrame)
{
printf("Returning due to break condition\n");
break;
}
// Get the Frame's Image.
Image *image = iFrame->getImage();
EGLStream::NV::IImageNativeBuffer *iImageNativeBuffer
= interface_cast<EGLStream::NV::IImageNativeBuffer>(image);
TEST_ERROR_RETURN(!iImageNativeBuffer, "Failed to create an IImageNativeBuffer");
int fd = iImageNativeBuffer->createNvBuffer(ARGUSSIZE {m_pCamInfo->liveParams.inputVideoInfo.width, m_pCamInfo->liveParams.inputVideoInfo.height},
NvBufferColorFormat_YUV420, NvBufferLayout_Pitch, &status);
if (status != STATUS_OK)
TEST_ERROR_RETURN(status != STATUS_OK, "Failed to create a native buffer");
m_currentFrame++;
}
PROPAGATE_ERROR(requestShutdown());
return true;
}
On the carrier board, the first camera thread executes successfully. However, the second camera thread executes a couple of times and then it gets stuck at ‘acquireFrame’ call since the last print statement executed by second camera is:
“Acquiring Frame 2 Camera id : 1”
NOTE : The carrier board has 3 cameras connected because the driver supports only 3 cameras although it just uses two at a time while the development board has only 2 cameras connected.
Kindly help me out.
Thanks.