/*M/////////////////////////////////////////////////////////////////////////////////////// // @author Viktor V. Smirnov @version 1.0 01/03/20 @e-mail niht@mail.com // //M*/ #include "../ifc/Camera.hpp" namespace hc { Camera::~Camera() { // eglTerminate( m_eglDisplay ); } bool Camera::startCapture() { // if( m_debug ) utils::deviceQueryDrv(); std::cout << "Start camera..." << std::endl; if( system( "sudo systemctl restart nvargus-daemon.service" ) != 0 ) { std::cout << "Restart argus failed!" << std::endl; } sleep( 2 ); // m_eglDisplay = eglGetDisplay( EGL_DEFAULT_DISPLAY ); // if( m_eglDisplay == EGL_NO_DISPLAY ) REPORT_ERROR( "Cannot get EGL display!" ); // eglInitialize( m_eglDisplay, nullptr, nullptr ); // Initialize the Argus camera provider. Argus::Status status; m_cameraProvider = Argus::UniqueObj< Argus::CameraProvider >( Argus::CameraProvider::create( & status ) ); std::cout << "CameraProvider create status: " << status << std::endl; if( status != 0 ) REPORT_ERROR( "Failed to create CameraProvider!" ); Argus::ICameraProvider * iCameraProvider{ Argus::interface_cast< Argus::ICameraProvider >( m_cameraProvider ) }; if( not iCameraProvider ) REPORT_ERROR( "Failed to create iCameraProvider!" ); std::cout << "Argus Version: " << iCameraProvider->getVersion() << std::endl; // Get the camera devices. iCameraProvider->getCameraDevices( & m_cameraDevices ); if( m_cameraDevices.size() < m_camerasNum ) REPORT_ERROR( std::string{ "Not enough cameras! [" + std::to_string( m_cameraDevices.size() ) + "]" }.c_str() ); std::cout << "Cameras: " << m_cameraDevices.size() << " " << m_camerasNum << std::endl; // for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) // m_cameraDevices.emplace_back( ArgusSamples::ArgusHelpers::getCameraDevice( m_cameraProvider.get(), cameraId ) ); if( m_debug ) ArgusSamples::ArgusHelpers::printCameraDeviceInfo( m_cameraDevices.at( 0 ), nullptr ); m_sensorMode = ArgusSamples::ArgusHelpers::getSensorMode( m_cameraDevices.at( 0 ), 0 ); m_iSensorMode = Argus::interface_cast< Argus::ISensorMode >( m_sensorMode ); if( not m_iSensorMode ) REPORT_ERROR( "Failed to get sensor mode interface!" ); if( m_debug ) std::cout << "Resolution: " << m_iSensorMode->getResolution().width() << "x" << m_iSensorMode->getResolution().height() << std::endl; if( m_debug ) ArgusSamples::ArgusHelpers::printSensorModeInfo(m_sensorMode, " "); // Get the camera devices. std::vector < Argus::CameraDevice * > cameras; for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) cameras.push_back( m_cameraDevices.at( cameraId ) ); // (the 1st camera will be used for AC) if( m_singleSessionMode ) { m_captureSession = Argus::UniqueObj< Argus::CaptureSession >{ iCameraProvider->createCaptureSession( cameras, & status ) }; // cameras std::cout << "CameraProvider create CaptureSession status: " << status << std::endl; if( status != 0 ) REPORT_ERROR( "Failed to create CaptureSession!" ); m_iCaptureSession = Argus::interface_cast< Argus::ICaptureSession >( m_captureSession ); if( not m_iCaptureSession ) REPORT_ERROR( "Failed to get ICaptureSession interface!" ); } else { for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) { m_captureSessions.emplace_back( Argus::UniqueObj< Argus::CaptureSession >{ iCameraProvider->createCaptureSession( cameras.at( cameraId ), & status ) } ); // cameras std::cout << "CameraProvider create CaptureSession status: " << status << std::endl; if( status != 0 ) REPORT_ERROR( "Failed to create CaptureSession!" ); m_iCaptureSessions.emplace_back( Argus::interface_cast< Argus::ICaptureSession >( m_captureSessions.at( cameraId ) ) ); if( not m_iCaptureSessions.at( cameraId ) ) REPORT_ERROR( "Failed to get ICaptureSession interface!" ); } } // events m_eventTypes.emplace_back( Argus::EVENT_TYPE_CAPTURE_COMPLETE ); m_eventTypes.emplace_back( Argus::EVENT_TYPE_ERROR ); /* Seems there is bug in Argus, which drops EVENT_TYPE_ERROR if all 3 events are not set. Set it for now */ m_eventTypes.emplace_back( Argus::EVENT_TYPE_CAPTURE_STARTED ); if( m_singleSessionMode ) { m_iEventProvider = Argus::interface_cast< Argus::IEventProvider >( m_captureSession ); if( not m_iEventProvider ) REPORT_ERROR( "iEventProvider is NULL!" ); m_queue = Argus::UniqueObj< Argus::EventQueue >{ m_iEventProvider->createEventQueue( m_eventTypes ) }; m_iQueue = Argus::interface_cast< Argus::IEventQueue >( m_queue ); if( not m_iQueue ) REPORT_ERROR( "Event IQueue interface is NULL!" ); } else { for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) { // events m_iEventProviders.emplace_back( Argus::interface_cast< Argus::IEventProvider >( m_captureSessions.at( cameraId ) ) ); if( not m_iEventProviders.at( cameraId ) ) REPORT_ERROR( "iEventProvider is NULL!" ); m_queues.emplace_back( Argus::UniqueObj< Argus::EventQueue >{ m_iEventProviders.at( cameraId )->createEventQueue( m_eventTypes ) } ); m_iQueues.emplace_back( Argus::interface_cast< Argus::IEventQueue >( m_queues.at( cameraId ) ) ); if( not m_iQueues.at( cameraId ) ) REPORT_ERROR( "Event IQueue interface is NULL!" ); } } if( m_singleSessionMode ) { Argus::UniqueObj< Argus::OutputStreamSettings > outputStreamSettings{ m_iCaptureSession->createOutputStreamSettings( Argus::STREAM_TYPE_EGL ) }; Argus::IOutputStreamSettings * iOutputStreamSettings{ Argus::interface_cast< Argus::IOutputStreamSettings >( outputStreamSettings ) }; if( not iOutputStreamSettings ) std::cout << "Failed to create iOutputStreamSettings!" << std::endl; // Create the IEGLOutputStreamSettings settings. Argus::IEGLOutputStreamSettings * iEGLOutputStreamSettings{ Argus::interface_cast< Argus::IEGLOutputStreamSettings >( outputStreamSettings ) }; if( not iEGLOutputStreamSettings ) std::cout << "Failed to create iEGLOutputStreamSettings!" << std::endl; iEGLOutputStreamSettings->setPixelFormat( Argus::PIXEL_FMT_YCbCr_420_888 ); // iEGLOutputStreamSettings->setEGLDisplay( m_eglDisplay ); iEGLOutputStreamSettings->setResolution( m_iSensorMode->getResolution() ); iEGLOutputStreamSettings->setMetadataEnable( true ); if( m_triggerMode ) iEGLOutputStreamSettings->setMode( Argus::EGL_STREAM_MODE_MAILBOX ); else { iEGLOutputStreamSettings->setMode( Argus::EGL_STREAM_MODE_FIFO ); iEGLOutputStreamSettings->setFifoLength( 100 ); } // Create egl streams for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) { iOutputStreamSettings->setCameraDevice( cameras.at( cameraId ) ); // cameras m_streams.emplace_back( m_iCaptureSession->createOutputStream( outputStreamSettings.get() ) ); } } else { for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) { Argus::UniqueObj< Argus::OutputStreamSettings > outputStreamSettings{ m_iCaptureSessions.at( cameraId )->createOutputStreamSettings( Argus::STREAM_TYPE_EGL ) }; Argus::IOutputStreamSettings * iOutputStreamSettings{ Argus::interface_cast< Argus::IOutputStreamSettings >( outputStreamSettings ) }; if( not iOutputStreamSettings ) std::cout << "Failed to create iOutputStreamSettings!" << std::endl; // Create the IEGLOutputStreamSettings settings. Argus::IEGLOutputStreamSettings * iEGLOutputStreamSettings{ Argus::interface_cast< Argus::IEGLOutputStreamSettings >( outputStreamSettings ) }; if( not iEGLOutputStreamSettings ) std::cout << "Failed to create iEGLOutputStreamSettings!" << std::endl; iEGLOutputStreamSettings->setPixelFormat( Argus::PIXEL_FMT_YCbCr_420_888 ); // iEGLOutputStreamSettings->setEGLDisplay( m_eglDisplay ); iEGLOutputStreamSettings->setResolution( m_iSensorMode->getResolution() ); iEGLOutputStreamSettings->setMetadataEnable( true ); if( m_triggerMode ) iEGLOutputStreamSettings->setMode( Argus::EGL_STREAM_MODE_MAILBOX ); else { iEGLOutputStreamSettings->setMode( Argus::EGL_STREAM_MODE_FIFO ); iEGLOutputStreamSettings->setFifoLength( 100 ); } // Create egl streams iOutputStreamSettings->setCameraDevice( cameras.at( cameraId ) ); // cameras m_streams.emplace_back( m_iCaptureSessions.at( cameraId )->createOutputStream( outputStreamSettings.get() ) ); } } for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) { // Create the FrameConsumer. m_consumers.emplace_back( Argus::UniqueObj< EGLStream::FrameConsumer >( EGLStream::FrameConsumer::create( m_streams.at( cameraId ).get() ) ) ); if( not m_consumers.at( cameraId ) ) ORIGINATE_ERROR( "Failed to create FrameConsumer!\n" ); m_iFrameConsumers.emplace_back( Argus::interface_cast< EGLStream::IFrameConsumer >( m_consumers.at( cameraId ) ) ); } if( m_singleSessionMode ) { m_request = Argus::UniqueObj< Argus::Request >{ m_iCaptureSession->createRequest( Argus::CAPTURE_INTENT_STILL_CAPTURE ) }; m_iRequest = Argus::interface_cast< Argus::IRequest >( m_request ); if( not m_iRequest ) ORIGINATE_ERROR( "Failed to create Request!" ); m_iSourceSettings = Argus::interface_cast< Argus::ISourceSettings >( m_iRequest->getSourceSettings() ); if( not m_iSourceSettings ) ORIGINATE_ERROR( "Failed to get source settings request interface!" ); m_iAutoControlSettings = Argus::interface_cast< Argus::IAutoControlSettings >( m_iRequest->getAutoControlSettings() ); if( not m_iAutoControlSettings ) ORIGINATE_ERROR( "Failed to get IAutoControlSettings interface!\n" ); for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) { if( m_iRequest->enableOutputStream( m_streams.at( cameraId ).get() ) != Argus::STATUS_OK ) ORIGINATE_ERROR( "Failed to enable stream in Request!" ); Argus::IStreamSettings * iStreamSettings{ Argus::interface_cast< Argus::IStreamSettings >( m_iRequest->getStreamSettings( m_streams.at( cameraId ).get() ) ) }; if( not iStreamSettings ) ORIGINATE_ERROR( "Cannot get StreamSettings Interface!" ); // iStreamSettings->setSourceClipRect( { 0.f, 0.45f, 1.f, 0.55f } ); // iStreamSettings->setPostProcessingEnable( true ); } for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) { m_iStreams.emplace_back( Argus::interface_cast< Argus::IEGLOutputStream >( m_streams.at( cameraId ) ) ); if ( not m_iStreams.at( cameraId ) ) ORIGINATE_ERROR( "Failed to create right stream!" ); } configureSensor( m_iRequest ); } else { for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) { m_requests.emplace_back( Argus::UniqueObj< Argus::Request >{ m_iCaptureSessions.at( cameraId )->createRequest( Argus::CAPTURE_INTENT_STILL_CAPTURE ) } ); m_iRequests.emplace_back( Argus::interface_cast< Argus::IRequest >( m_requests.at( cameraId ) ) ); if( not m_iRequests.at( cameraId ) ) ORIGINATE_ERROR( "Failed to create Request!" ); m_iSourceSettings = Argus::interface_cast< Argus::ISourceSettings >( m_iRequests.at( cameraId )->getSourceSettings() ); if( not m_iSourceSettings ) ORIGINATE_ERROR( "Failed to get source settings request interface!" ); m_iAutoControlSettings = Argus::interface_cast< Argus::IAutoControlSettings >( m_iRequests.at( cameraId )->getAutoControlSettings() ); if( not m_iAutoControlSettings ) ORIGINATE_ERROR( "Failed to get IAutoControlSettings interface!\n" ); if( m_iRequests.at( cameraId )->enableOutputStream( m_streams.at( cameraId ).get() ) != Argus::STATUS_OK ) ORIGINATE_ERROR( "Failed to enable stream in Request!" ); Argus::IStreamSettings * iStreamSettings{ Argus::interface_cast< Argus::IStreamSettings >( m_iRequests.at( cameraId )->getStreamSettings( m_streams.at( cameraId ).get() ) ) }; if( not iStreamSettings ) ORIGINATE_ERROR( "Cannot get StreamSettings Interface!" ); m_iStreams.emplace_back( Argus::interface_cast< Argus::IEGLOutputStream >( m_streams.at( cameraId ) ) ); if ( not m_iStreams.at( cameraId ) ) ORIGINATE_ERROR( "Failed to create right stream!" ); configureSensor( m_iRequests.at( cameraId ) ); } } m_offsetNs = hc::utils::getOffsetNs(); if( not m_triggerMode ) { if( m_singleSessionMode ) { if( m_iCaptureSession->repeat( m_request.get() ) != Argus::STATUS_OK ) ORIGINATE_ERROR( "Failed to start repeat burst capture request!" ); std::cout << "Camera started." << std::endl; } else { for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) { if( m_iCaptureSessions.at( cameraId )->repeat( m_requests.at( cameraId ).get() ) != Argus::STATUS_OK ) ORIGINATE_ERROR( "Failed to start repeat burst capture request!" ); std::cout << "Camera started: " << cameraId << std::endl; } } m_threadExecute = std::thread( & hc::Camera::captureExecute, this ); } m_framesCounter = 0; return true; } bool Camera::stopCapture() { std::cout << "Stop capture..." << std::endl; m_doCapture = false; m_abortCapture = true; if( not m_triggerMode ) { m_threadExecute.join(); if( system( "sudo systemctl restart nvargus-daemon.service" ) != 0 ) { std::cout << "Restart argus failed!" << std::endl; } } else { if( m_singleSessionMode ) { m_iCaptureSession->stopRepeat(); m_iCaptureSession->waitForIdle(); } else { for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) { m_iCaptureSessions.at( cameraId )->stopRepeat(); m_iCaptureSessions.at( cameraId )->waitForIdle(); } } m_cameraProvider.reset(); } std::cout << "Capture stopped." << std::endl; return true; } bool Camera::capture( const std::string & recordPath, const int32_t number ) { m_recordPath = recordPath; m_framesNumber = number; m_doCapture = not m_triggerMode; return true; } bool Camera::pause( bool pause ) { if( m_triggerMode ) { if( m_singleSessionMode ) { // capture const auto result{ m_iCaptureSession->capture( m_request.get() ) }; if( result == 0 ) { ORIGINATE_ERROR( "Capture failed!" ); return {}; } size_t cameraId{}; #pragma omp parallel for schedule(static) num_threads(6) /// !!!!!!!!!!!!!!!!!!!!!!!! change to cameras number for( cameraId = 0; cameraId < m_camerasNum; ++ cameraId ) { Argus::UniqueObj< EGLStream::Frame > frame{ m_iFrameConsumers.at( cameraId )->acquireFrame() }; EGLStream::IFrame * iFrame{ Argus::interface_cast< EGLStream::IFrame >( frame ) }; if( not iFrame ) { std::cout << "Failed to acquire frame! " << cameraId << std::endl; } else { // std::cout << cameraId << " " << iFrame->getNumber() << " " << iFrame->getTime() << std::endl; // record { EGLStream::IImageJPEG * iJPEG{ Argus::interface_cast< EGLStream::IImageJPEG >( iFrame->getImage() ) }; if( iJPEG ) { const auto file{ m_recordPath + "/" + std::to_string( cameraId ) + "/" + std::to_string( m_framesCounter ) + ".jpg" }; if( iJPEG->writeJPEG( file.c_str() ) != Argus::STATUS_OK ) { std::cout << "Failed to write JPEG: " << file << std::endl; } } } } } // pragma } else { size_t cameraId{}; #pragma omp parallel for schedule(static) num_threads(6) /// !!!!!!!!!!!!!!!!!!!!!!!! change to cameras number for( cameraId = 0; cameraId < m_camerasNum; ++ cameraId ) { const auto result{ m_iCaptureSessions.at( cameraId )->capture( m_requests.at( cameraId ).get() ) }; if( result == 0 ) REPORT_ERROR( "Capture failed!" ); Argus::UniqueObj< EGLStream::Frame > frame{ m_iFrameConsumers.at( cameraId )->acquireFrame() }; EGLStream::IFrame * iFrame{ Argus::interface_cast< EGLStream::IFrame >( frame ) }; if( not iFrame ) { std::cout << "Failed to acquire frame! " << cameraId << std::endl; } else { // std::cout << cameraId << " " << iFrame->getNumber() << " " << iFrame->getTime() << std::endl; // record { EGLStream::IImageJPEG * iJPEG{ Argus::interface_cast< EGLStream::IImageJPEG >( iFrame->getImage() ) }; if( iJPEG ) { const auto file{ m_recordPath + "/" + std::to_string( cameraId ) + "/" + std::to_string( m_framesCounter ) + ".jpg" }; if( iJPEG->writeJPEG( file.c_str() ) != Argus::STATUS_OK ) { std::cout << "Failed to write JPEG: " << file << std::endl; } } m_doCapture = false; } } } // pragma } m_doCapture = false; m_framesCounter ++; } else m_doCapture = not pause; return true; } bool Camera::capturing() { return m_doCapture; } void Camera::captureExecute() { for( size_t cameraId{}; cameraId < m_camerasNum; ++ cameraId ) { m_iStreams.at( cameraId )->waitUntilConnected(); } std::map< uint64_t, uint32_t > tsMap{}; // auto nano{ hc::utils::nano() }; std::vector< uint64_t > framesNum( m_camerasNum ); const auto dataFilePath{ m_recordPath + "/" + std::to_string( m_offsetNs ) + ".txt" }; std::ofstream dataFile{ dataFilePath }; if( not dataFile.is_open() ) { std::cout << "Failed to open data file! " << dataFilePath << std::endl; return; } if( m_singleSessionMode ) while( true ) { // capture size_t cameraId{}; #pragma omp parallel for schedule(static) num_threads(6) /// !!!!!!!!!!!!!!!!!!!!!!!! change to cameras number for( cameraId = 0; cameraId < m_camerasNum; ++ cameraId ) { Argus::UniqueObj< EGLStream::Frame > frame{ m_iFrameConsumers.at( cameraId )->acquireFrame() }; EGLStream::IFrame * iFrame{ Argus::interface_cast< EGLStream::IFrame >( frame ) }; if( not iFrame ) { std::cout << "Failed to acquire frame! " << cameraId << std::endl; } else { // std::cout << cameraId << " " << iFrame->getNumber() << " " << iFrame->getTime() << std::endl; framesNum.at( cameraId ) = iFrame->getNumber(); // record if( m_doCapture and framesNum.at( cameraId ) > m_skipFrames ) { EGLStream::IImageJPEG * iJPEG{ Argus::interface_cast< EGLStream::IImageJPEG >( iFrame->getImage() ) }; if( iJPEG ) { const auto file{ m_recordPath + "/" + std::to_string( cameraId ) + "/" + std::to_string( framesNum.at( cameraId ) ) + ".jpg" }; if( iJPEG->writeJPEG( file.c_str() ) != Argus::STATUS_OK ) { std::cout << "Failed to write JPEG: " << file << std::endl; } } } } } // pragma // std::cout << "Capture time: " << hc::utils::nano() - nano << std::endl; // nano = hc::utils::nano(); uint64_t ts{}; uint32_t cid{}; m_iEventProvider->waitForEvents( m_queue.get(), 5e9 ); while( m_iQueue->getSize() > 0 ) { const Argus::Event * event{ m_iQueue->getNextEvent() }; const Argus::IEvent * iEvent{ Argus::interface_cast< const Argus::IEvent >( event ) }; if( not iEvent ) std::cout << "Error : Failed to get IEvent interface!" << std::endl; else { if( iEvent->getEventType() == Argus::EVENT_TYPE_CAPTURE_COMPLETE ) { // std::cout << "COMPLETE: " << iEvent->getTime() << " " << iEvent->getCaptureId() << std::endl; const Argus::IEventCaptureComplete * iEventCaptureComplete{ Argus::interface_cast< const Argus::IEventCaptureComplete >( event ) }; if( not iEventCaptureComplete ) std::cout << "Failed to get EventCaptureComplete Interface!" << std::endl; const Argus::CaptureMetadata * metaData = iEventCaptureComplete->getMetadata(); // const Argus::ICaptureMetadata * iMetadata = Argus::interface_cast< const Argus::ICaptureMetadata >( metaData ); // if( not iMetadata ) std::cout << "Failed to get CaptureMetadata Interface!" << std::endl; const Argus::Ext::ISensorTimestampTsc * iSensorTimestampTsc{ Argus::interface_cast< const Argus::Ext::ISensorTimestampTsc>( metaData ) }; if( not iSensorTimestampTsc ) std::cout << "Failed to get iSensorTimestampTsc Interface!" << std::endl; cid = iEvent->getCaptureId();//iMetadata->getCaptureId(); // ts = iMetadata->getSensorTimestamp(); // legacy ts = iSensorTimestampTsc->getSensorSofTimestampTsc() - m_offsetNs; // CLOCK_MONOTONIC_RAW const auto monotonicRawNow{ hc::utils::getMonotonicRawNow() }; const auto nanoNow{ hc::utils::nano() }; const auto tsNow{ nanoNow - ( monotonicRawNow - ts ) }; dataFile << cid << " " << ts << " " << tsNow << std::endl; // std::cout << ts << " " << tsNow << " " << cid << std::endl; // std::cout << ts << " " << iSensorTimestampTsc->getSensorEofTimestampTsc() - m_offsetNs << " " << iMetadata->getSensorTimestamp() << " " << now << " " << m_offsetNs << " " << cid << std::endl; // tsMap[ cid ] = ts; } else if ( iEvent->getEventType() == Argus::EVENT_TYPE_CAPTURE_STARTED ) { // std::cout << "STARTED: " << iEvent->getTime() << " " << iEvent->getCaptureId() << std::endl; // ToDo: Remove the empty after the bug is fixed continue; } else if ( iEvent->getEventType() == Argus::EVENT_TYPE_ERROR ) { const Argus::IEventError * iEventError{ Argus::interface_cast< const Argus::IEventError >( event ) }; std::cout << "Event error: " << iEventError->getStatus() << std::endl; } else { std::cout << "WARNING: Unknown event. Continue." << std::endl; } } // if event } // events > 0 dataFile.flush(); if( m_abortCapture ) { std::cout << "Capture aborted." << std::endl; break; } } // while if( not m_singleSessionMode ) while( true ) { // capture size_t cameraId{}; #pragma omp parallel for schedule(static) num_threads(6) /// !!!!!!!!!!!!!!!!!!!!!!!! change to cameras number for( cameraId = 0; cameraId < m_camerasNum; ++ cameraId ) { Argus::UniqueObj< EGLStream::Frame > frame{ m_iFrameConsumers.at( cameraId )->acquireFrame() }; EGLStream::IFrame * iFrame{ Argus::interface_cast< EGLStream::IFrame >( frame ) }; if( not iFrame ) { std::cout << "Failed to acquire frame! " << cameraId << std::endl; } else { // std::cout << cameraId << " " << iFrame->getNumber() << " " << iFrame->getTime() << std::endl; framesNum.at( cameraId ) = iFrame->getNumber(); // record if( m_doCapture and framesNum.at( cameraId ) > m_skipFrames ) { EGLStream::IImageJPEG * iJPEG{ Argus::interface_cast< EGLStream::IImageJPEG >( iFrame->getImage() ) }; if( iJPEG ) { const auto file{ m_recordPath + "/" + std::to_string( cameraId ) + "/" + std::to_string( framesNum.at( cameraId ) ) + ".jpg" }; if( iJPEG->writeJPEG( file.c_str() ) != Argus::STATUS_OK ) { std::cout << "Failed to write JPEG: " << file << std::endl; } } } } } // pragma // std::cout << "Capture time: " << hc::utils::nano() - nano << std::endl; // nano = hc::utils::nano(); uint64_t ts{}; uint32_t cid{}; for( cameraId = 0; cameraId < m_camerasNum; ++ cameraId ) { m_iEventProviders.at( cameraId )->waitForEvents( m_queues.at( cameraId ).get(), 5e9 ); while( m_iQueues.at( cameraId )->getSize() > 0 ) { const Argus::Event * event{ m_iQueues.at( cameraId )->getNextEvent() }; const Argus::IEvent * iEvent{ Argus::interface_cast< const Argus::IEvent >( event ) }; if( not iEvent ) std::cout << "Error : Failed to get IEvent interface!" << std::endl; else { if( iEvent->getEventType() == Argus::EVENT_TYPE_CAPTURE_COMPLETE ) { // std::cout << "COMPLETE: " << iEvent->getTime() << " " << iEvent->getCaptureId() << std::endl; const Argus::IEventCaptureComplete * iEventCaptureComplete{ Argus::interface_cast< const Argus::IEventCaptureComplete >( event ) }; if( not iEventCaptureComplete ) std::cout << "Failed to get EventCaptureComplete Interface!" << std::endl; const Argus::CaptureMetadata * metaData = iEventCaptureComplete->getMetadata(); // const Argus::ICaptureMetadata * iMetadata = Argus::interface_cast< const Argus::ICaptureMetadata >( metaData ); // if( not iMetadata ) std::cout << "Failed to get CaptureMetadata Interface!" << std::endl; const Argus::Ext::ISensorTimestampTsc * iSensorTimestampTsc{ Argus::interface_cast< const Argus::Ext::ISensorTimestampTsc>( metaData ) }; if( not iSensorTimestampTsc ) std::cout << "Failed to get iSensorTimestampTsc Interface!" << std::endl; cid = iEvent->getCaptureId();//iMetadata->getCaptureId(); // ts = iMetadata->getSensorTimestamp(); // legacy ts = iSensorTimestampTsc->getSensorSofTimestampTsc() - m_offsetNs; // CLOCK_MONOTONIC_RAW const auto monotonicRawNow{ hc::utils::getMonotonicRawNow() }; const auto nanoNow{ hc::utils::nano() }; const auto tsNow{ nanoNow - ( monotonicRawNow - ts ) }; if( cameraId ) dataFile << cid << " " << ts << " " << tsNow << std::endl; // std::cout << cameraId << " " << ts << " " << tsNow << " " << cid << std::endl; // std::cout << ts << " " << iSensorTimestampTsc->getSensorEofTimestampTsc() - m_offsetNs << " " << iMetadata->getSensorTimestamp() << " " << m_offsetNs << " " << cid << std::endl; // tsMap[ cid ] = ts; } else if ( iEvent->getEventType() == Argus::EVENT_TYPE_CAPTURE_STARTED ) { // std::cout << "STARTED: " << iEvent->getTime() << " " << iEvent->getCaptureId() << std::endl; // ToDo: Remove the empty after the bug is fixed continue; } else if ( iEvent->getEventType() == Argus::EVENT_TYPE_ERROR ) { const Argus::IEventError * iEventError{ Argus::interface_cast< const Argus::IEventError >( event ) }; std::cout << "Event error: " << iEventError->getStatus() << std::endl; } else { std::cout << "WARNING: Unknown event. Continue." << std::endl; } } // if event } // events > 0 } // for cameras dataFile.flush(); if( m_abortCapture ) { std::cout << "Capture aborted." << std::endl; break; } } // while } void Camera::abortCapture() { m_abortCapture = true; } bool Camera::setSensorsParams( const SensorParams & sensorParams ) { Argus::BayerTuple< float > wb; wb[ 0 ] = sensorParams.wbGains.at( 0 ); wb[ 1 ] = sensorParams.wbGains.at( 1 ); wb[ 2 ] = sensorParams.wbGains.at( 2 ); wb[ 3 ] = sensorParams.wbGains.at( 3 ); m_iSourceSettings->setFrameDurationRange( { uint64_t( 1e9 / sensorParams.fps ) } ); m_iSourceSettings->setGainRange( { float( sensorParams.analogGain ) } ); m_iAutoControlSettings->setIspDigitalGainRange( { float( sensorParams.digitalGain ) } ); m_iAutoControlSettings->setWbGains( wb ); m_iSourceSettings->setExposureTimeRange( { uint64_t( sensorParams.exposureTime ) } ); return true; } SensorParams Camera::getSensorParams( const Argus::UniqueObj< EGLStream::Frame > & frame ) { EGLStream::IArgusCaptureMetadata * iArgusCaptureMetadata{ Argus::interface_cast< EGLStream::IArgusCaptureMetadata >( frame ) }; Argus::CaptureMetadata * metadata{ iArgusCaptureMetadata->getMetadata() }; Argus::ICaptureMetadata * iMetadata{ Argus::interface_cast< Argus::ICaptureMetadata >( metadata ) }; SensorParams sensorParams { double( 1e9 / iMetadata->getFrameDuration() ), double( iMetadata->getSensorExposureTime() ), iMetadata->getSensorAnalogGain(), iMetadata->getIspDigitalGain(), iMetadata->getSceneLux(), { iMetadata->getAwbGains()[ 0 ], iMetadata->getAwbGains()[ 1 ], iMetadata->getAwbGains()[ 2 ], iMetadata->getAwbGains()[ 3 ] } }; return sensorParams; } bool Camera::configureSensor( Argus::IRequest * iRequest ) { // Get frame rate uint64_t maxFramerate{ uint64_t( 1e9 / ( m_iSensorMode->getFrameDurationRange().min() - 1 ) ) }; uint64_t minFramerate{ uint64_t( 1e9 / m_iSensorMode->getFrameDurationRange().max() ) + 1 }; // ISourceSettings Argus::ISourceSettings * iSourceSettings{ Argus::interface_cast< Argus::ISourceSettings >( iRequest->getSourceSettings() ) }; if( not iSourceSettings ) ORIGINATE_ERROR( "Failed to get source settings request interface!\n" ); // iSourceSettings->setSensorMode( m_sensorMode ); // Argus::IStreamSettings * streamSettings{ Argus::interface_cast< Argus::IStreamSettings>( iRequest->getStreamSettings( m_streams.at( 0 ).get() ) ) }; // streamSettings->setPostProcessingEnable( false ); // streamSettings->setSourceClipRect( { 0.25f, 0.25f, 0.75f, 0.75f } ); // IDeFogSettings; Argus::Ext::IDeFogSettings * iDeFogSettings{ Argus::interface_cast< Argus::Ext::IDeFogSettings >( iRequest->getSourceSettings() ) } ; if( not iDeFogSettings ) ORIGINATE_ERROR( "Failed to get IDeFogSettings interface!\n" ); iDeFogSettings->setDeFogEnable( false ); iDeFogSettings->setDeFogAmount( 1.f ); iDeFogSettings->setDeFogQuality( 1.f ); // IDenoiseSettings Argus::IDenoiseSettings * iDenoiseSettings{ Argus::interface_cast< Argus::IDenoiseSettings >( iRequest->getSourceSettings() ) }; if( not iDenoiseSettings ) ORIGINATE_ERROR( "Failed to get IDenoiseSettings interface!\n" ); iDenoiseSettings->setDenoiseStrength( - 1.f ); iDenoiseSettings->setDenoiseMode( Argus::DENOISE_MODE_FAST ); // IEdgeEnhanceSettings Argus::IEdgeEnhanceSettings * iEdgeEnhanceSettings{ Argus::interface_cast< Argus::IEdgeEnhanceSettings >( iRequest->getSourceSettings() ) }; if( not iEdgeEnhanceSettings ) ORIGINATE_ERROR( "Failed to get IEdgeEnhanceSettings interface!\n" ); iEdgeEnhanceSettings->setEdgeEnhanceStrength( 1.f ); iEdgeEnhanceSettings->setEdgeEnhanceMode( Argus::EDGE_ENHANCE_MODE_OFF ); // IAutoControlSettings Argus::IAutoControlSettings * iAutoControlSettings{ Argus::interface_cast< Argus::IAutoControlSettings >( iRequest->getAutoControlSettings() ) }; if( not iAutoControlSettings ) ORIGINATE_ERROR( "Failed to get IAutoControlSettings interface!\n" ); iAutoControlSettings->setAeAntibandingMode( Argus::AE_ANTIBANDING_MODE_OFF ); iSourceSettings->setFrameDurationRange( { m_fpsTime } ); // iSourceSettings->setGainRange( { m_analogGain } ); // iSourceSettings->setOpticalBlackEnable( true ); if( not m_expoAuto ) { iSourceSettings->setExposureTimeRange( { m_exposureTime } ); iAutoControlSettings->setAeLock( true ); } // iAutoControlSettings->setAwbMode( Argus::AWB_MODE_OFF ); iAutoControlSettings->setAwbMode( m_awbAuto ? Argus::AWB_MODE_AUTO : Argus::AWB_MODE_MANUAL ); Argus::BayerTuple< float > wb; wb[ 0 ] = float( m_wbGains.at( 0 ) ); wb[ 1 ] = float( m_wbGains.at( 1 ) ); wb[ 2 ] = float( m_wbGains.at( 2 ) ); wb[ 3 ] = float( m_wbGains.at( 3 ) ); if( not m_awbAuto ) { iAutoControlSettings->setWbGains( wb ); } iAutoControlSettings->setAwbLock( not m_awbAuto ); // iAutoControlSettings->setColorCorrectionMatrixEnable( false ); // iAutoControlSettings->setColorSaturation( 1 ); // iAutoControlSettings->setColorSaturationBias( 1 ); // iAutoControlSettings->setColorSaturationEnable( false ); iAutoControlSettings->setExposureCompensation( m_expoCompensation ); // - 2 // iAutoControlSettings->setToneMapCurveEnable( false ); iAutoControlSettings->setIspDigitalGainRange( { m_digitalGain } ); // std::vector< Argus::AcRegion > aeRegions{ { uint32_t( m_frameWidth / 4 ), uint32_t( m_frameHeight / 4 ), uint32_t( 3 * m_frameWidth / 4 ), uint32_t( 3 * m_frameHeight / 4 ), 1.0 } }; // iAutoControlSettings->setAeRegions( aeRegions ); // iAutoControlSettings->setAwbRegions( aeRegions ); // std::vector< float > ccm{ 1.10366594, -0.17505009, -0.1957128, 0.08551296, 1.23022819, -0.15721738, -0.00611935, 0.24519491, 1.19627378 }; // std::vector< float > ccm{ 1.67700000f, - 0.31488000f, 0.05227000f, - 0.59869000f, 1.56105000f, - 0.72969000f, - 0.07831000f, - 0.24617000f, 1.67742000f }; iAutoControlSettings->setColorCorrectionMatrixEnable( m_useCCM ); iAutoControlSettings->setColorCorrectionMatrix( m_ccm ); if( m_debug ) { std::cout << "Fps min: " << minFramerate << ", max: " << maxFramerate << std::endl; Argus::Range< uint64_t > expoTimeRange{ iSourceSettings->getExposureTimeRange() }; std::cout << "getExposureTimeRange " << expoTimeRange.min() << ", " << expoTimeRange.max() << std::endl; // std::cout << "getFocusPosition " << iSourceSettings->getFocusPosition() << std::endl; Argus::Range< uint64_t > frameDurationRange{ iSourceSettings->getFrameDurationRange() }; std::cout << "getFrameDurationRange " << frameDurationRange.min() << ", " << frameDurationRange.max() << std::endl; Argus::Range< float > gainRange{ iSourceSettings->getGainRange() }; std::cout << "getGainRange " << gainRange.min() << ", " << gainRange.max() << std::endl; // // std::cout << "getOpticalBlack " << iSourceSettings->getOpticalBlack() << std::endl; // std::cout << "getOpticalBlackEnable " << iSourceSettings->getOpticalBlackEnable() << std::endl; std::cout << "getDeFogEnable " << iDeFogSettings->getDeFogEnable() << std::endl; Argus::Range< float > ispGainRange{ iAutoControlSettings->getIspDigitalGainRange() }; std::cout << "getIspDigitalGainRange " << ispGainRange.min() << ", " << ispGainRange.max() << std::endl; //// std::cout << "getAeAntibandingMode " << iAutoControlSettings->getAeAntibandingMode() << std::endl; // std::cout << "getAeLock " << iAutoControlSettings->getAeLock() << std::endl; //// std::cout << "getAeRegions " << iAutoControlSettings->getAeRegions() << std::endl; //// std::cout << "getAwbLock " << iAutoControlSettings->getAwbLock() << std::endl; //// std::cout << "getAwbMode " << iAutoControlSettings->getAwbMode() << std::endl; //// std::cout << "getAwbRegions " << iAutoControlSettings->getAwbRegions() << std::endl; // ccm iAutoControlSettings->getColorCorrectionMatrix( & m_ccm ); std::cout << "getColorCorrectionMatrix: size = " << iAutoControlSettings->getColorCorrectionMatrixSize().width() << iAutoControlSettings->getColorCorrectionMatrixSize().height() << std::endl; for( const auto & v : m_ccm ) std::cout << v << ", "; std::cout << std::endl; // wb wb = { iAutoControlSettings->getWbGains() }; std::cout << "getWbGains: " << wb[ 0 ] << ", " << wb[ 1 ] << ", " << wb[ 2 ] << ", " << wb[ 3 ] << std::endl; // std::cout << "getColorSaturation " << iAutoControlSettings->getColorSaturation() << std::endl; // std::cout << "getColorSaturationBias " << iAutoControlSettings->getColorSaturationBias() << std::endl; // std::cout << "getColorSaturationEnable " << iAutoControlSettings->getColorSaturationEnable() << std::endl; std::cout << "getExposureCompensation " << iAutoControlSettings->getExposureCompensation() << std::endl; //// std::cout << "getToneMapCurve " << iAutoControlSettings->getToneMapCurve() << std::endl; // std::cout << "getToneMapCurveEnable " << iAutoControlSettings->getToneMapCurveEnable() << std::endl; //// std::cout << "getToneMapCurveSize " << iAutoControlSettings->getToneMapCurveSize() << std::endl; } return true; } } // namespace hc