Use Case
- Synchronization of four cameras
- 4 Jpeg Consumer or 4 OpenCV Consumer - Image capture from four cameras based on a trigger.
-
Detail:Four cameras should be opened all the time and a single image from all the cameras should be captured based on a trigger
I was following the thread https://devtalk.nvidia.com/default/topic/1015136/two-camera-with-libargus-encode-opencvconsumer-each-camera-/ for the aforementioned task. I was able to duplicate for four cameras but I was not able to achieve synchronization and acquire images based on a trigger. Please go through the code and suggest me with some ideas for the aforementioned use-case.
#include "Error.h"
#include "EGLGlobal.h"
#include "GLContext.h"
#include "JPEGConsumer.h"
#include "PreviewConsumer.h"
#include "Window.h"
#include "Thread.h"
#include <Argus/Argus.h>
#include <unistd.h>
#include <stdlib.h>
#include <sstream>
#include <iomanip>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <errno.h>
#include <unistd.h>
#include <fcntl.h>
#include <poll.h>
#include "jetsonGPIO.h"
using namespace Argus;
namespace ArgusSamples
{
// Constants.
static const uint32_t CAPTURE_TIME = 10; // In seconds.
static const uint32_t NUMBER_SESSIONS = 4;
// Globals and derived constants.
UniqueObj<CameraProvider> g_cameraProvider;
// Debug print macros.
#define PRODUCER_PRINT(...) printf("PRODUCER: " __VA_ARGS__)
static bool execute()
{
Window &window = Window::getInstance();
// Initialize the Argus camera provider.
UniqueObj<CameraProvider> cameraProvider(CameraProvider::create());
// Get the ICameraProvider interface from the global CameraProvider.
ICameraProvider *iCameraProvider = interface_cast<ICameraProvider>(cameraProvider);
// Get the camera devices.
std::vector<CameraDevice*> cameraDevices;
iCameraProvider->getCameraDevices(&cameraDevices);
// Get the second cameras properties since it is used for the storage session.
ICameraProperties *iCameraDevice = interface_cast<ICameraProperties>(cameraDevices[1]);
std::vector<Argus::SensorMode*> sensorModes;
iCameraDevice->getBasicSensorModes(&sensorModes);
// Create the capture sessions, one will be for storing images and one for preview.
UniqueObj<CaptureSession> captureSessions[NUMBER_SESSIONS];
for (uint32_t i = 0; i < NUMBER_SESSIONS; i++)
{
captureSessions[i] = UniqueObj<CaptureSession>(iCameraProvider->createCaptureSession(cameraDevices[i]));
}
ICaptureSession *iStorageCaptureSession1 = interface_cast<ICaptureSession>(captureSessions[0]);
ICaptureSession *iStorageCaptureSession2 = interface_cast<ICaptureSession>(captureSessions[1]);
ICaptureSession *iStorageCaptureSession3 = interface_cast<ICaptureSession>(captureSessions[2]);
ICaptureSession *iStorageCaptureSession4 = interface_cast<ICaptureSession>(captureSessions[3]);
// Use the 1st sensor mode as the size we want to store.
ISensorMode *iMode = interface_cast<ISensorMode>(sensorModes[0]);
// Create streams.
PRODUCER_PRINT("Creating the storage stream.\n");
UniqueObj<OutputStreamSettings> storageSettings1(iStorageCaptureSession1->createOutputStreamSettings(STREAM_TYPE_EGL));
UniqueObj<OutputStreamSettings> storageSettings2(iStorageCaptureSession2->createOutputStreamSettings(STREAM_TYPE_EGL));
UniqueObj<OutputStreamSettings> storageSettings3(iStorageCaptureSession3->createOutputStreamSettings(STREAM_TYPE_EGL));
UniqueObj<OutputStreamSettings> storageSettings4(iStorageCaptureSession4->createOutputStreamSettings(STREAM_TYPE_EGL));
IEGLOutputStreamSettings *iStorageSettings1 = interface_cast<IEGLOutputStreamSettings>(storageSettings1);
IEGLOutputStreamSettings *iStorageSettings2 = interface_cast<IEGLOutputStreamSettings>(storageSettings2);
IEGLOutputStreamSettings *iStorageSettings3 = interface_cast<IEGLOutputStreamSettings>(storageSettings3);
IEGLOutputStreamSettings *iStorageSettings4 = interface_cast<IEGLOutputStreamSettings>(storageSettings4);
if (iStorageSettings1)
{
iStorageSettings1->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iStorageSettings1->setResolution(iMode->getResolution());
}
if (iStorageSettings2)
{
iStorageSettings2->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iStorageSettings2->setResolution(iMode->getResolution());
}
if (iStorageSettings3)
{
iStorageSettings3->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iStorageSettings3->setResolution(iMode->getResolution());
}
if (iStorageSettings4)
{
iStorageSettings4->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iStorageSettings4->setResolution(iMode->getResolution());
}
UniqueObj<OutputStream> storageStream1(iStorageCaptureSession1->createOutputStream(storageSettings1.get()));
UniqueObj<OutputStream> storageStream2(iStorageCaptureSession2->createOutputStream(storageSettings2.get()));
UniqueObj<OutputStream> storageStream3(iStorageCaptureSession3->createOutputStream(storageSettings3.get()));
UniqueObj<OutputStream> storageStream4(iStorageCaptureSession4->createOutputStream(storageSettings4.get()));
JPEGConsumerThread jpegConsumer1(storageStream1.get());
JPEGConsumerThread jpegConsumer2(storageStream2.get());
JPEGConsumerThread jpegConsumer3(storageStream3.get());
JPEGConsumerThread jpegConsumer4(storageStream4.get());
PROPAGATE_ERROR(jpegConsumer1.initialize());
PROPAGATE_ERROR(jpegConsumer2.initialize());
PROPAGATE_ERROR(jpegConsumer3.initialize());
PROPAGATE_ERROR(jpegConsumer4.initialize());
PROPAGATE_ERROR(jpegConsumer1.waitRunning());
PROPAGATE_ERROR(jpegConsumer2.waitRunning());
PROPAGATE_ERROR(jpegConsumer3.waitRunning());
PROPAGATE_ERROR(jpegConsumer4.waitRunning());
// Create the two requests
UniqueObj<Request> storageRequest1(iStorageCaptureSession1->createRequest());
UniqueObj<Request> storageRequest2(iStorageCaptureSession2->createRequest());
UniqueObj<Request> storageRequest3(iStorageCaptureSession3->createRequest());
UniqueObj<Request> storageRequest4(iStorageCaptureSession4->createRequest());
IRequest *iStorageRequest1 = interface_cast<IRequest>(storageRequest1);
IRequest *iStorageRequest2 = interface_cast<IRequest>(storageRequest2);
IRequest *iStorageRequest3 = interface_cast<IRequest>(storageRequest3);
IRequest *iStorageRequest4 = interface_cast<IRequest>(storageRequest4);
iStorageRequest1->enableOutputStream(storageStream1.get());
iStorageRequest2->enableOutputStream(storageStream2.get());
iStorageRequest3->enableOutputStream(storageStream3.get());
iStorageRequest4->enableOutputStream(storageStream4.get());
unsigned int value = 0;
unsigned int val = 1;
// Will be replaced with GPIO Trigger
//jetsonXavierGPIONumber trig = gpio351 ;
//gpioExport(trig) ;
//gpioSetDirection(trig,inputPin) ;
// Wait for CAPTURE_TIME seconds and do a storage capture every second.
for (uint32_t i = 0; i < CAPTURE_TIME; i++)
{
//gpioGetValue(trig, &value) ;
//PRODUCER_PRINT("%d",value);
if(val == 1)
{
if (iStorageCaptureSession1->repeat(storageRequest1.get()) != STATUS_OK)
ORIGINATE_ERROR("Failed to start repeat capture request for jpg");
if (iStorageCaptureSession2->repeat(storageRequest2.get()) != STATUS_OK)
ORIGINATE_ERROR("Failed to start repeat capture request for jpg");
if (iStorageCaptureSession3->repeat(storageRequest3.get()) != STATUS_OK)
ORIGINATE_ERROR("Failed to start repeat capture request for jpg");
if (iStorageCaptureSession4->repeat(storageRequest4.get()) != STATUS_OK)
ORIGINATE_ERROR("Failed to start repeat capture request for jpg");
val = 0;
break;
}
}
// all done shut down
iStorageCaptureSession1->stopRepeat();
iStorageCaptureSession2->stopRepeat();
iStorageCaptureSession3->stopRepeat();
iStorageCaptureSession4->stopRepeat();
iStorageCaptureSession1->waitForIdle();
iStorageCaptureSession2->waitForIdle();
iStorageCaptureSession3->waitForIdle();
iStorageCaptureSession4->waitForIdle();
storageStream1.reset();
storageStream2.reset();
storageStream3.reset();
storageStream4.reset();
// Wait for the consumer threads to complete.
PROPAGATE_ERROR(jpegConsumer1.shutdown());
PROPAGATE_ERROR(jpegConsumer2.shutdown());
PROPAGATE_ERROR(jpegConsumer3.shutdown());
PROPAGATE_ERROR(jpegConsumer4.shutdown());
// Shut down Argus.
cameraProvider.reset();
PRODUCER_PRINT("Done -- exiting.\n");
return true;
}
}; // namespace ArgusSamples
int main(int argc, const char *argv[])
{
if (!ArgusSamples::execute())
return EXIT_FAILURE;
return EXIT_SUCCESS;
}
The code runs successfully capturing images from all the cameras. But, the images captured from one camera is more and the rest is less. I tried replacing repeat with the capture command and modifying the code accordingly. But still no success. Please tell me if I’m heading in the right direction
Are your cameras synchronized with an external trigger? In other words are the cameras physically wired to support syncrhonization?
No, cameras don’t have an external trigger to support synchronization. But, I want to generate a trigger to grab images synchronously or also asynchronously.
All the cameras on —> Trigger -----> Grab single images from all camera -----> opencv/jpeg -----> Wait for next trigger (with the state of cameras being on)
Do you have timing requirements for the triggering? How close in time do the frames need to be?
I don’t have any timing requirements for triggering. The trigger comes from a sensor, whenever it finds a corrugated board. There should be a single grab for a rising edge.
The frame needs to be close as I have to stitch images acquired from all cameras. The synchronization is important as the object is moving.
But let say my object is not moving. The synchronization is not important as I can do perfect stitching even if the frames are not closer.
So, I wanted to solve two cases. Is it possible to open all the cameras throughout the process and grab images only based on an event (asynchronous or synchronous)?
Hi Sathish,
Can you share what type of cameras you are using? Is it a CSI or USB camera?
Regards,
Jason
Hi,
It is a csi camera.
I made an Argus program to grab images from all cameras based on GPIO trigger but asynchronously. The code is attached below
#include "Error.h"
#include "EGLGlobal.h"
#include "GLContext.h"
#include "JPEGConsumer.h"
#include "PreviewConsumer.h"
#include "Window.h"
#include "Thread.h"
#include <chrono>
#include <Argus/Argus.h>
#include <unistd.h>
#include <stdlib.h>
#include <sstream>
#include <iomanip>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <errno.h>
#include <unistd.h>
#include <fcntl.h>
#include <poll.h>
#include "jetsonGPIO.h"
using namespace std::chrono;
using namespace Argus;
namespace ArgusSamples
{
// Constants.
const uint64_t FIVE_SECONDS_IN_NANOSECONDS = 5000000000;
static const uint32_t NUMBER_SESSIONS = 4;
// Debug print macros.
#define PRODUCER_PRINT(...) printf("PRODUCER: " __VA_ARGS__)
static bool execute()
{
clock_t begin = clock();
Window &window = Window::getInstance();
UniqueObj<CameraProvider> cameraProvider(CameraProvider::create());
ICameraProvider *iCameraProvider = interface_cast<ICameraProvider>(cameraProvider);
std::vector<CameraDevice*> cameraDevices;
iCameraProvider->getCameraDevices(&cameraDevices);
ICameraProperties *iCameraDevice = interface_cast<ICameraProperties>(cameraDevices[0]);
std::vector<Argus::SensorMode*> sensorModes;
iCameraDevice->getAllSensorModes(&sensorModes);
ISensorMode *iMode1 = interface_cast<ISensorMode>(sensorModes[0]);
ISensorMode *iMode2 = interface_cast<ISensorMode>(sensorModes[1]);
ISensorMode *iMode3 = interface_cast<ISensorMode>(sensorModes[2]);
ISensorMode *iMode4 = interface_cast<ISensorMode>(sensorModes[3]);
UniqueObj<CaptureSession> captureSessions[NUMBER_SESSIONS];
for (uint32_t i = 0; i < NUMBER_SESSIONS; i++)
{
captureSessions[i] =
UniqueObj<CaptureSession>(iCameraProvider->createCaptureSession(cameraDevices[i]));
}
ICaptureSession *iCaptureSession1 = interface_cast<ICaptureSession>(captureSessions[0]);
ICaptureSession *iCaptureSession2 = interface_cast<ICaptureSession>(captureSessions[1]);
ICaptureSession *iCaptureSession3 = interface_cast<ICaptureSession>(captureSessions[2]);
ICaptureSession *iCaptureSession4 = interface_cast<ICaptureSession>(captureSessions[3]);
UniqueObj<OutputStreamSettings> streamSettings1(iCaptureSession1->createOutputStreamSettings(STREAM_TYPE_EGL));
IEGLOutputStreamSettings *iEGLStreamSettings1 = interface_cast<IEGLOutputStreamSettings>(streamSettings1);
iEGLStreamSettings1->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iEGLStreamSettings1->setResolution(iMode1->getResolution());
iEGLStreamSettings1->setMetadataEnable(true);
UniqueObj<OutputStreamSettings> streamSettings2(iCaptureSession2->createOutputStreamSettings(STREAM_TYPE_EGL));
IEGLOutputStreamSettings *iEGLStreamSettings2 = interface_cast<IEGLOutputStreamSettings>(streamSettings2);
iEGLStreamSettings2->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iEGLStreamSettings2->setResolution(iMode2->getResolution());
iEGLStreamSettings2->setMetadataEnable(true);
UniqueObj<OutputStreamSettings> streamSettings3(iCaptureSession3->createOutputStreamSettings(STREAM_TYPE_EGL));
IEGLOutputStreamSettings *iEGLStreamSettings3 = interface_cast<IEGLOutputStreamSettings>(streamSettings3);
iEGLStreamSettings3->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iEGLStreamSettings3->setResolution(iMode3->getResolution());
iEGLStreamSettings3->setMetadataEnable(true);
UniqueObj<OutputStreamSettings> streamSettings4(iCaptureSession4->createOutputStreamSettings(STREAM_TYPE_EGL));
IEGLOutputStreamSettings *iEGLStreamSettings4 = interface_cast<IEGLOutputStreamSettings>(streamSettings4);
iEGLStreamSettings4->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iEGLStreamSettings4->setResolution(iMode4->getResolution());
iEGLStreamSettings4->setMetadataEnable(true);
UniqueObj<OutputStream> m_outputStream1;
UniqueObj<OutputStream> m_outputStream2;
UniqueObj<OutputStream> m_outputStream3;
UniqueObj<OutputStream> m_outputStream4;
m_outputStream1.reset(iCaptureSession1->createOutputStream(streamSettings1.get()));
m_outputStream2.reset(iCaptureSession2->createOutputStream(streamSettings2.get()));
m_outputStream3.reset(iCaptureSession3->createOutputStream(streamSettings3.get()));
m_outputStream4.reset(iCaptureSession4->createOutputStream(streamSettings4.get()));
UniqueObj<Request> m_request1;
UniqueObj<Request> m_request2;
UniqueObj<Request> m_request3;
UniqueObj<Request> m_request4;
m_request1.reset(iCaptureSession1->createRequest());
m_request2.reset(iCaptureSession2->createRequest());
m_request3.reset(iCaptureSession3->createRequest());
m_request4.reset(iCaptureSession4->createRequest());
IRequest *iRequest1 = interface_cast<IRequest>(m_request1);
IRequest *iRequest2 = interface_cast<IRequest>(m_request2);
IRequest *iRequest3 = interface_cast<IRequest>(m_request3);
IRequest *iRequest4 = interface_cast<IRequest>(m_request4);
iRequest1->enableOutputStream(m_outputStream1.get());
iRequest2->enableOutputStream(m_outputStream2.get());
iRequest3->enableOutputStream(m_outputStream3.get());
iRequest4->enableOutputStream(m_outputStream4.get());
UniqueObj<FrameConsumer> consumer1(FrameConsumer::create(m_outputStream1.get()));
UniqueObj<FrameConsumer> consumer2(FrameConsumer::create(m_outputStream2.get()));
UniqueObj<FrameConsumer> consumer3(FrameConsumer::create(m_outputStream3.get()));
UniqueObj<FrameConsumer> consumer4(FrameConsumer::create(m_outputStream4.get()));
IFrameConsumer *iFrameConsumer1 = interface_cast<IFrameConsumer>(consumer1);
IFrameConsumer *iFrameConsumer2 = interface_cast<IFrameConsumer>(consumer2);
IFrameConsumer *iFrameConsumer3 = interface_cast<IFrameConsumer>(consumer3);
IFrameConsumer *iFrameConsumer4 = interface_cast<IFrameConsumer>(consumer4);
unsigned int value = 0;
jetsonXavierGPIONumber trig = gpio351 ;
gpioExport(trig) ;
gpioSetDirection(trig,inputPin) ;
while(true)
{
gpioGetValue(trig, &value) ;
if(value==1)
{
uint32_t requestId1 = iCaptureSession1->capture(m_request1.get());
uint32_t requestId2 = iCaptureSession2->capture(m_request2.get());
uint32_t requestId3 = iCaptureSession3->capture(m_request3.get());
uint32_t requestId4 = iCaptureSession4->capture(m_request4.get());
UniqueObj<Frame> frame1(iFrameConsumer1->acquireFrame(FIVE_SECONDS_IN_NANOSECONDS));
IFrame *iFrame1 = interface_cast< IFrame>(frame1);
Image *image1 = iFrame1->getImage();
IImageJPEG *iImageJPEG1 = interface_cast<IImageJPEG>(image1);
iImageJPEG1->writeJPEG("1.jpg");
UniqueObj<Frame> frame2(iFrameConsumer2->acquireFrame(FIVE_SECONDS_IN_NANOSECONDS));
IFrame *iFrame2 = interface_cast< IFrame>(frame2);
Image *image2 = iFrame2->getImage();
IImageJPEG *iImageJPEG2 = interface_cast<IImageJPEG>(image2);
iImageJPEG2->writeJPEG("2.jpg");
UniqueObj<Frame> frame3(iFrameConsumer3->acquireFrame(FIVE_SECONDS_IN_NANOSECONDS));
IFrame *iFrame3 = interface_cast< IFrame>(frame3);
Image *image3 = iFrame3->getImage();
IImageJPEG *iImageJPEG3 = interface_cast<IImageJPEG>(image3);
iImageJPEG3->writeJPEG("3.jpg");
UniqueObj<Frame> frame4(iFrameConsumer4->acquireFrame(FIVE_SECONDS_IN_NANOSECONDS));
IFrame *iFrame4 = interface_cast< IFrame>(frame4);
Image *image4 = iFrame4->getImage();
IImageJPEG *iImageJPEG4 = interface_cast<IImageJPEG>(image4);
iImageJPEG4->writeJPEG("4.jpg");
clock_t end = clock();
double time_spent = (double)(end - begin) / CLOCKS_PER_SEC;
printf("Time taken for grabbing: %f",time_spent);
}
}
return true;
}
}; // namespace ArgusSamples
int main(int argc, const char *argv[])
{
if (!ArgusSamples::execute())
return EXIT_FAILURE;
return EXIT_SUCCESS;
}
The code runs successfully grabbing images based on a trigger. I highly doubt the synchronicity if the object is moving. I dont know how close the frames would be?
It’s hard to say how close the frames will be. Are you setting the GPIO from Linux userspace? Is it a single GPIO that is tied together at the hardware level, or 4 independent GPIO signals?
When we want to do precise triggering, we use an FPGA on our board to guarantee timing and send the trigger signals to the cameras at the hardware level.
It is a single GPIO signal that triggers all the cameras. Right now, I have a sensor that produces a high signal on detecting a corrugated board. The output signal of the sensor is connected as an input to the NVIDIA GPIO unit. I read the signal in the code and start the capture request. So, more or less it is kind of a software trigger.
When we want to do precise triggering, we use an FPGA on our board to guarantee timing and send the trigger signals to the cameras at the hardware level.
Can you send me links and more information about these FPGA boards for precise triggering? I’m more interested in precise triggering as normal industrial cameras come with trigger unit attached to it and there is no problem of latency.