Hallo,
I have a small program using the Argus library to test the queueing of capture requests. I have four different Requests which only differ in the used AnalogGain. These Requests are submitted three times in the same order. The reported gain values in the gain values in the CaptureMetadata are the same as in the submitted requests. But the generated image show a different behaviour. Here it seems like the first capture request was dropped and the last was duplicated.
Why do I get the wrong Metadata for the generated image and why is the first configuration seemingly dropped?
My Setup:
Camera: IMX226
Platform: Jetson XavierNX Development Board
L4T: 32.7.2
Jetpack: 4.6.2
Here is my code:
I use the nlohmann json library to export the capture metadata: GitHub - nlohmann/json: JSON for Modern C++
#include <iostream>
#include <fstream>
#include <cinttypes>
#include <string>
#include <Argus/Argus.h>
#include <EGLStream/EGLStream.h>
#include <EGLStream/NV/ImageNativeBuffer.h>
#include <NvJpegEncoder.h>
#include "json.hpp"
#define REPEATS 3
using namespace Argus;
#define NAME_OF( v ) #v
#define CHECK_ARGUS(status, msg) if(status != STATUS_OK){\
std::cout << msg << " failed: " << getArgusStatusMessage(status) << std::endl;\
return -1;\
}else{\
std::cout << msg << " success." << std::endl;\
}
#define CHECK_INTERFACE(interface) if(!interface){ \
std::cout << "Interface cast for " << NAME_OF(interface) << " failed." << std::endl;\
return -1;\
}else{\
std::cout << "Interface cast for " << NAME_OF(interface) << " success." << std::endl;\
}
const char* getArgusStatusMessage(Argus::Status& status){
switch (status) {
case STATUS_OK:
return "OK";
break;
case STATUS_INVALID_PARAMS:
return "Invalid Params";
break;
case STATUS_INVALID_SETTINGS:
return "Invalid Setting";
break;
case STATUS_UNAVAILABLE:
return "Unavailable";
break;
case STATUS_OUT_OF_MEMORY:
return "Out of memory";
break;
case STATUS_UNIMPLEMENTED:
return "Unimplemented";
break;
case STATUS_TIMEOUT:
return "Timeout";
break;
case STATUS_CANCELLED:
return "Cancelled";
break;
case STATUS_DISCONNECTED:
return "Disconnected";
break;
case STATUS_END_OF_STREAM:
return "End of stream";
break;
default:
return "Unknown";
break;
}
}
int main(int argc, char *argv[])
{
setenv("enableCamInfiniteTimeout","1", 1);
std::cout << "Set enableCamInfiniteTimeout to 1." << std::endl;
Argus::Status a_status;
UniqueObj<CameraProvider> unqCameraProvider(CameraProvider::create(&a_status));
CHECK_ARGUS(a_status, "Create CameraProvider");
ICameraProvider* iCameraProvider = interface_cast<ICameraProvider>(unqCameraProvider);
CHECK_INTERFACE(iCameraProvider);
std::vector<CameraDevice*> cameraDevices;
iCameraProvider->getCameraDevices(&cameraDevices);
if(cameraDevices.size() <= 0){
std::cout << "No Camera found." << std::endl;
return -1;
}
size_t camCount = cameraDevices.size();
UniqueObj<CaptureSession> unqCaptureSession(iCameraProvider->createCaptureSession(cameraDevices.at(0), &a_status));
CHECK_ARGUS(a_status ,"Create capture session");
ICaptureSession* iCaptureSession = interface_cast<ICaptureSession>(unqCaptureSession);
CHECK_INTERFACE(iCaptureSession);
//---------- Create Streams ----------//
UniqueObj<OutputStream> unqStream;
UniqueObj<EGLStream::FrameConsumer> unqFrameConsumer;
UniqueObj<OutputStreamSettings> unqOutputStreamSettings(iCaptureSession->createOutputStreamSettings(STREAM_TYPE_EGL, &a_status));
CHECK_ARGUS(a_status, "Create OutputStreamSettings")
IOutputStreamSettings* iOutputStreamSettings = interface_cast<IOutputStreamSettings>(unqOutputStreamSettings);
CHECK_INTERFACE(iOutputStreamSettings)
IEGLOutputStreamSettings* iEGLOutputStreamSettings(interface_cast<IEGLOutputStreamSettings>(unqOutputStreamSettings));
CHECK_INTERFACE(iEGLOutputStreamSettings)
iOutputStreamSettings->setCameraDevice(cameraDevices.at(0));
Size2D<uint32_t> resolution(3904, 3000);
iEGLOutputStreamSettings->setMode(EGL_STREAM_MODE_FIFO);
iEGLOutputStreamSettings->setFifoLength(20);
iEGLOutputStreamSettings->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iEGLOutputStreamSettings->setResolution(resolution);
iEGLOutputStreamSettings->setMetadataEnable(true);
unqStream.reset(iCaptureSession->createOutputStream(unqOutputStreamSettings.get(), &a_status));
CHECK_ARGUS(a_status, "Create OutputStream")
unqFrameConsumer.reset(EGLStream::FrameConsumer::create(unqStream.get(), &a_status));
CHECK_ARGUS(a_status, "Create FrameConsumer")
//---------- Create Capture Request ----------//
std::vector<UniqueObj<Request>*> unqRequests;
for (int id = 0; id < 4; id++){
UniqueObj<Request>* unqRequest = new UniqueObj<Request>(iCaptureSession->createRequest(CAPTURE_INTENT_MANUAL, &a_status));
CHECK_ARGUS(a_status, "Create Request")
unqRequests.push_back(unqRequest);
IRequest* iRequest(interface_cast<IRequest>(*unqRequest));
CHECK_INTERFACE(iRequest)
IDenoiseSettings* iDenoise(interface_cast<IDenoiseSettings>(*unqRequest));
CHECK_INTERFACE(iDenoise)
IEdgeEnhanceSettings* iEdge(interface_cast<IEdgeEnhanceSettings>(*unqRequest));
CHECK_INTERFACE(iEdge)
IAutoControlSettings* iAuto(interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings()));
CHECK_INTERFACE(iAuto)
ISourceSettings* iSource(interface_cast<ISourceSettings>(iRequest->getSourceSettings()));
CHECK_INTERFACE(iSource)
iDenoise->setDenoiseMode(DENOISE_MODE_OFF);
iEdge->setEdgeEnhanceMode(EDGE_ENHANCE_MODE_OFF);
iAuto->setAwbMode(AWB_MODE_AUTO);
iAuto->setIspDigitalGainRange({1.0f, 1.0f});
iAuto->setColorSaturationEnable(false);
iAuto->setColorCorrectionMatrixEnable(false);
iAuto->setToneMapCurveEnable(false);
iAuto->setAeAntibandingMode(AE_ANTIBANDING_MODE_OFF);
iAuto->setAeLock(True);
iAuto->setAwbLock(True);
iAuto->setExposureCompensation(0.0f);
iAuto->setToneMapCurveEnable(false),
iSource->setExposureTimeRange({5000000, 5000000});
iSource->setOpticalBlackEnable(false);
switch (id)
{
case 0: iSource->setGainRange({0.0f, 0.0f}); break;
case 1: iSource->setGainRange({7.0f, 7.0f}); break;
case 2: iSource->setGainRange({14.0f, 14.0f}); break;
case 3: iSource->setGainRange({27.0f, 27.0f}); break;
default:
break;
}
a_status = iRequest->enableOutputStream(unqStream.get());
CHECK_ARGUS(a_status, "Enable Stream")
}
//---------- Queue Request ----------//
for(int i = 0; i < REPEATS; i++){
for(int r = 0; r < unqRequests.size(); r++){
iCaptureSession->capture(unqRequests.at(r)->get(), 1000, &a_status);
CHECK_ARGUS(a_status, "Submit capture")
}
}
a_status = iCaptureSession->waitForIdle();
CHECK_ARGUS(a_status, "Capture wait for idle")
//---------- Consume Frames ----------//
NvJPEGEncoder* encoder = NvJPEGEncoder::createJPEGEncoder("Encoder");
CHECK_INTERFACE(encoder)
constexpr size_t buffer_size = 3904*3000;
static unsigned char encoderOutputBuffer[buffer_size];
IEGLOutputStream* iEGLOutputStream = interface_cast<IEGLOutputStream>(unqStream.get());
CHECK_INTERFACE(iEGLOutputStream);
a_status = iEGLOutputStream->waitUntilConnected();
CHECK_ARGUS(a_status, "Connect EGLStream");
EGLStream::IFrameConsumer* iFrameConsumer = interface_cast<EGLStream::IFrameConsumer>(unqFrameConsumer);
CHECK_INTERFACE(iFrameConsumer);
int fd = -1;
for(int i = 0; i < REPEATS; i++){
for(int id = 0; id < 4; id++){
UniqueObj<EGLStream::Frame> frame(iFrameConsumer->acquireFrame(1000, &a_status));
CHECK_ARGUS(a_status, "Acquire frame")
EGLStream::IFrame* iFrame = interface_cast<EGLStream::IFrame>(frame);
CHECK_INTERFACE(iFrame)
EGLStream::NV::IImageNativeBuffer *iNativeBuffer = interface_cast<EGLStream::NV::IImageNativeBuffer>(iFrame->getImage());
CHECK_INTERFACE(iNativeBuffer);
uint64_t frameNr = iFrame->getNumber();
EGLStream::IArgusCaptureMetadata* iArgusCaptureMetadata = interface_cast<EGLStream::IArgusCaptureMetadata>(frame);
CHECK_INTERFACE(iArgusCaptureMetadata);
CaptureMetadata *captureMetadata = iArgusCaptureMetadata->getMetadata();
CHECK_INTERFACE(captureMetadata);
ICaptureMetadata* iCaptureMetadata = interface_cast<ICaptureMetadata>(captureMetadata);
CHECK_INTERFACE(iCaptureMetadata)
if (fd == -1)
{
fd = iNativeBuffer->createNvBuffer(resolution,
NvBufferColorFormat_NV12,
NvBufferLayout_BlockLinear,
EGLStream::NV::ROTATION_0);
if (fd == -1){
std::cout << "\tFailed to create NvBuffer" << std::endl;
return -1;
}
}
else
{
a_status = iNativeBuffer->copyToNvBuffer(fd, EGLStream::NV::ROTATION_0);
CHECK_ARGUS(a_status, "Copy to NvBuffer" )
}
std::string filename = std::to_string(i) + "-id_" + std::to_string(id) + "-FrameNr_" + std::to_string(frameNr) + ".jpg";
std::cout << "Open Image: " << filename << std::endl;
std::ofstream *outputFile = new std::ofstream(filename);
if (outputFile)
{
unsigned long size = buffer_size;
unsigned char *buffer = encoderOutputBuffer;
if(encoder->encodeFromFd(fd, JCS_YCbCr, &buffer, size, 75) == 0){
outputFile->write((char *)buffer, size);
outputFile->close();
delete outputFile;
std::cout << "Write Image success."<< std::endl;
}else{
std::cout << "Write Image failed."<< std::endl;
return -1;
}
}else{
std::cout << "Can not open output file." << std::endl;
return -1;
}
std::cout << "Write Metadata: " << filename << std::endl;
nlohmann::json metadata;
metadata["FrameDuration"] = iCaptureMetadata->getFrameDuration();
metadata["ISPGain"] = iCaptureMetadata->getIspDigitalGain();
metadata["SceneLux"] = iCaptureMetadata->getSceneLux();
metadata["AWBGains"] = nullptr;
auto awbGains = iCaptureMetadata->getAwbGains();
metadata["AWBGains"]["R"] = awbGains.r();
metadata["AWBGains"]["G_Even"] = awbGains.gEven();
metadata["AWBGains"]["G_Odd"] = awbGains.gOdd();
metadata["AWBGains"]["B"] = awbGains.b();
metadata["Sensor"] = nullptr;
metadata["Sensor"]["AnalogeGain"] = iCaptureMetadata->getSensorAnalogGain();
metadata["Sensor"]["ExposureTime"] = iCaptureMetadata->getSensorExposureTime();
metadata["Sensor"]["ExposureTime"] = iCaptureMetadata->getSensorExposureTime();
metadata["Sensor"]["Sensitivity"] = iCaptureMetadata->getSensorSensitivity();
metadata["Sensor"]["TimeStamp"] = iCaptureMetadata->getSensorTimestamp();
std::string filenameJson = "meta_" + filename + ".json";
outputFile = new std::ofstream(filenameJson);
if (outputFile)
{
*outputFile << metadata.dump(4);
outputFile->close();
delete outputFile;
std::cout << "Write Metadata success." << std::endl;
}else{
std::cout << "Can not open json output file." << std::endl;
return -1;
}
frame.reset();
}
}
if(fd){
NvBufferDestroy(fd);
}
//---------- Clean Up ----------//
delete encoder;
for(auto unq:unqRequests){
unq->reset();
}
unqFrameConsumer.reset();
unqStream.reset();
unqCaptureSession.reset();
unqCameraProvider.reset();
return 0;
}
The generated Images:
First batch of the requests:
Second batch of the requests:
Third batch of the requests: