Here is a binary and the code to regenerate the described behavior with my camera:
CCMTest (435.3 KB)
#include <stdio.h>
#include <fstream>
#include <Argus/Argus.h>
#include <EGLStream/EGLStream.h>
#include <EGLStream/NV/ImageNativeBuffer.h>
#include <NvJpegEncoder.h>
using namespace Argus;
using namespace EGLStream;
// --------------- Config: ---------------
BayerTuple<float> AWBGains( 2.13667f, 1.0f, 1.00099f, 1.59856f);
BayerTuple<float> OpticalBlack( 0.054814453125f, 0.059814453125f, 0.058837890625f, 0.053837890625f);
uint64_t ExposureTime = 13672861;
Range<float> GainRange(0.0f, 1.0f);
// ---------------------------------------
#define LOG_ERROR(_file, _func, _line, _str, ...) \
do { \
fprintf(stderr, "Error generated. %s, %s:%d ", _file, _func, _line); \
fprintf(stderr, _str, ##__VA_ARGS__); \
fprintf(stderr, "\n"); \
} \
while (0)
#define ORIGINATE_ERROR(_str, ...) \
do { \
LOG_ERROR(__FILE__, __FUNCTION__, __LINE__, (_str), ##__VA_ARGS__); \
return false; \
} while (0)
const char* getArgusStatusMessage(Argus::Status status){
switch (status) {
case STATUS_OK:
return "OK";
break;
case STATUS_INVALID_PARAMS:
return "Invalid Params";
break;
case STATUS_INVALID_SETTINGS:
return "Invalid Setting";
break;
case STATUS_UNAVAILABLE:
return "Unavailable";
break;
case STATUS_OUT_OF_MEMORY:
return "Out of memory";
break;
case STATUS_UNIMPLEMENTED:
return "Unimplemented";
break;
case STATUS_TIMEOUT:
return "Timeout";
break;
case STATUS_CANCELLED:
return "Cancelled";
break;
case STATUS_DISCONNECTED:
return "Disconnected";
break;
case STATUS_END_OF_STREAM:
return "End of stream";
break;
default:
return "Unknown";
break;
}
}
bool captuereFrame(ICameraProvider* iCameraProvider, CameraDevice* cam, std::string filename, std::vector<float> & ccm){
Argus::Status a_status;
UniqueObj<CaptureSession> captureSession;
ICaptureSession* iCaptureSession;
UniqueObj<Request> request;
IRequest* iRequest;
//---------- Create Capture Session ----------//
std::cout << "Create Capture Session ..." << std::endl;
captureSession.reset(iCameraProvider->createCaptureSession(cam, &a_status));
if(a_status != STATUS_OK) ORIGINATE_ERROR("Can not create capture session: %s", getArgusStatusMessage(a_status));
iCaptureSession = interface_cast<ICaptureSession>(captureSession);
//---------- Create Capture Request ----------//
std::cout << "Create Capture Request ..." << std::endl;
request.reset(iCaptureSession->createRequest(CAPTURE_INTENT_MANUAL, &a_status));
if(a_status != STATUS_OK) ORIGINATE_ERROR("Can not create capture request: %s", getArgusStatusMessage(a_status));
iRequest = interface_cast<IRequest>(request);
ISourceSettings* iSourceSettings(interface_cast<ISourceSettings>(iRequest->getSourceSettings()));
ISensorMode* iSensorMode(interface_cast<ISensorMode>(iSourceSettings->getSensorMode()));
Size2D<uint32_t> cameraResolution = iSensorMode->getResolution();
std::cout << "Create Stream Settings ..." << std::endl;
UniqueObj<OutputStreamSettings> streamSettings;
streamSettings.reset(iCaptureSession->createOutputStreamSettings(STREAM_TYPE_EGL, &a_status));
if(a_status != STATUS_OK){
ORIGINATE_ERROR("Can not create egl output stream settings: %s", getArgusStatusMessage(a_status));
}
IOutputStreamSettings* iStreamSettings(interface_cast<IOutputStreamSettings>(streamSettings));
iStreamSettings->setCameraDevice(cam);
IEGLOutputStreamSettings* iEGLOutputStreamSettings(interface_cast<IEGLOutputStreamSettings>(streamSettings));
iEGLOutputStreamSettings->setEGLDisplay(eglGetDisplay(EGL_DEFAULT_DISPLAY));
iEGLOutputStreamSettings->setMode(EGL_STREAM_MODE_MAILBOX);
iEGLOutputStreamSettings->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iEGLOutputStreamSettings->setResolution(cameraResolution);
UniqueObj<OutputStream> outputstream;
std::cout << "Create Output Stream ..." << std::endl;
outputstream.reset(iCaptureSession->createOutputStream(streamSettings.get(), &a_status));
if(a_status != STATUS_OK){
ORIGINATE_ERROR("Can not create buffer output stream: %s", getArgusStatusMessage(a_status));
}
//---------- Apply Settings ----------//
IAutoControlSettings* iAutoControlSettings(interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings()));
iAutoControlSettings->setAwbMode(AWB_MODE_MANUAL);
iAutoControlSettings->setWbGains(AWBGains);
iAutoControlSettings->setIspDigitalGainRange(Range<float>(1.0f, 1.0f));
iAutoControlSettings->setColorCorrectionMatrixEnable(true);
iAutoControlSettings->setColorCorrectionMatrix(ccm);
iSourceSettings->setOpticalBlackEnable(true);
iSourceSettings->setOpticalBlack(OpticalBlack);
iSourceSettings->setExposureTimeRange(Range<uint64_t>(ExposureTime, ExposureTime));
iSourceSettings->setGainRange(GainRange);
//---------- Enable Outputstream ----------//
std::cout << "Enable Output Stream ..." << std::endl;
a_status = iRequest->enableOutputStream(outputstream.get());
if (a_status != STATUS_OK){
ORIGINATE_ERROR("Failed to enable output stream; %s", getArgusStatusMessage(a_status));
}
//---------- Create JPEG Encoder ----------//
std::cout << "Create JEPG Encoder ..." << std::endl;
uint32_t buffer_size = cameraResolution.area();
unsigned char* m_OutputBuffer(new unsigned char[buffer_size]);
if (!m_OutputBuffer) ORIGINATE_ERROR("Failed to create output buffer for JPEG encoder.");
NvJPEGEncoder* m_JpegEncoder(NvJPEGEncoder::createJPEGEncoder("jpegenc0"));
if (!m_JpegEncoder) ORIGINATE_ERROR("Failed to create JPEGEncoder.");
//---------- Capture one frame ----------//
int dma_fd = -1;
std::cout << "Create Frame Consumer ..." << std::endl;
UniqueObj<FrameConsumer> frameConsumer = UniqueObj<FrameConsumer>(FrameConsumer::create(outputstream.get()));
IFrameConsumer* iFrameConsumer(interface_cast<IFrameConsumer>(frameConsumer.get()));
std::cout << "Start Capture ..." << std::endl;
iCaptureSession->capture(request.get(), TIMEOUT_INFINITE);
std::cout << "Acquire Frame ..." << std::endl;
UniqueObj<Frame> frame(iFrameConsumer->acquireFrame(TIMEOUT_INFINITE, &a_status));
IFrame* iFrame(interface_cast<IFrame>(frame));
if(a_status == STATUS_TIMEOUT){
ORIGINATE_ERROR("Timeout Buffer\n");
return true;
}
if(a_status != STATUS_OK)
{
ORIGINATE_ERROR("Error at Buffer request, Status: %s.\n", getArgusStatusMessage(a_status));
}
NV::IImageNativeBuffer *iNativeBuffer = interface_cast<NV::IImageNativeBuffer>(iFrame->getImage());
if (dma_fd == -1)
{
dma_fd = iNativeBuffer->createNvBuffer(cameraResolution,
NvBufferColorFormat_NV12,
NvBufferLayout_BlockLinear,
NV::ROTATION_0);
if (dma_fd == -1){
ORIGINATE_ERROR("\tFailed to create NvBuffer\n");
}
}
else if (iNativeBuffer->copyToNvBuffer(dma_fd, NV::ROTATION_0) != STATUS_OK)
{
ORIGINATE_ERROR("Failed to copy frame to NvBuffer.");
}
std::cout << "Save JPEG ..." << std::endl;
std::ofstream *outputFile = new std::ofstream(filename);
if (outputFile)
{
unsigned long size = buffer_size;
unsigned char *buffer = m_OutputBuffer;
m_JpegEncoder->encodeFromFd(dma_fd, JCS_YCbCr, &buffer, size, 75);
outputFile->write((char *)buffer, size);
delete outputFile;
std::cout << "Saved Capture to: " << filename << std::endl;
}
std::cout << "Cleanup ..." << std::endl;
delete m_JpegEncoder;
delete m_OutputBuffer;
frameConsumer.reset();
outputstream.reset();
streamSettings.reset();
request.reset();
captureSession.reset();
}
int main(int argc, char *argv[])
{
Argus::Status a_status;
std::cout << "Create Camera Provider ..." << std::endl;
UniqueObj<CameraProvider> cameraProvider;
cameraProvider.reset(CameraProvider::create(&a_status));
if(a_status != STATUS_OK){
ORIGINATE_ERROR("Can not create camera Provider");
}
std::vector<CameraDevice*> cameraDevices;
ICameraProvider *iCameraProvider(interface_cast<ICameraProvider>(cameraProvider));
iCameraProvider->getCameraDevices(&cameraDevices);
if(cameraDevices.size() <= 0){
ORIGINATE_ERROR("No Camera found");
}
std::vector<float> ccm1({1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0});
captuereFrame(iCameraProvider, cameraDevices[0], "test_100_010_001.jpg", ccm1);
std::vector<float> ccm2({0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0});
captuereFrame(iCameraProvider, cameraDevices[0], "test_000_010_001.jpg", ccm2);
std::vector<float> ccm3({0.0, 0.0, 0.0, 0.0, 0.7, 0.0, 0.0, 0.0, 1.0});
captuereFrame(iCameraProvider, cameraDevices[0], "test_000_0070_001.jpg", ccm3);
std::vector<float> ccm4({0.0, 0.0, 0.0, 0.0, 0.6, 0.0, 0.0, 0.0, 1.0});
captuereFrame(iCameraProvider, cameraDevices[0], "test_000_0060_001.jpg", ccm4);
std::vector<float> ccm5({0.0, 0.0, 0.0, 0.0, 0.65809, 0.0, 0.0, 0.0, 1.0});
captuereFrame(iCameraProvider, cameraDevices[0], "test_000_00658090_001.jpg", ccm5);
cameraProvider.reset();
return EXIT_SUCCESS;
}