Getting stereo images by using libargus

I want to get stereo images from two image sensors which are equipped my customized carrier board (CSI-2).
I already checked the sensor working properly by storing the streams as separate videos.
Following commands are what I tested,

gst-launch-1.0 nvcamerasrc sensor-id=0 num-buffers=3000 ! omxh264enc ! qtmux ! filesink location=test0.mp4

gst-launch-1.0 nvcamerasrc sensor-id=1 num-buffers=3000 ! omxh264enc ! qtmux ! filesink location=test1.mp4

And I modified sample code to get stereo images and convert them to opencv matrice.
When running the modified code on separate processes, the function acquireFrame() works successfully.

./samples/syncSensor/argus_syncsensor 0 ← get left image from sensor 0

./samples/syncSensor/argus_syncsensor 1 ← get right image from sensor 1

But I got some error message when trying to get two images at one process.

./samples/syncSensor/argus_syncsensor ← get stereo images from sensor 0 and 1


(Argus) Error InvalidState: Max frames acquired (in src/eglstream/FrameConsumerImpl.cpp, function acquireFrame(), line 266)
… ( The error message occurs whenever executing acquireFrame() )

And, is the following code correct to convert iImage to opencv matrix?
// Get image data from mapbuffer
frames[i].img = cv::Mat(cv::Size(IMG_W, IMG_H), CV_8UC3, (void*)iImage->mapBuffer(), (size_t)iImage2D->getStride());

The whole code is as follows,

class SyncGrabber {
public:
	struct Frame {
		uint64_t id;  // 
		double ts;    // timestamp in seconds 
		cv::Mat img;  // 
	};
	/** @brief  Contructor
	* @param[in] w,h  Width and height of image. 
	*/
	explicit SyncGrabber(const int numCams, const uint32_t w, const uint32_t h, const int cidx = -1);
	~SyncGrabber();
	void run();

private:
	const int NUM_CAMS;
	const uint32_t IMG_W;
	const uint32_t IMG_H;
	const int captureIndex_;
	bool bExit_;
	UniqueObj<CameraProvider> cameraProvider_;
	ICameraProvider *iCameraProvider_;
	std::vector<UniqueObj<CaptureSession> > captureSessions_;
	std::vector<ICaptureSession*> iCaptureSessions_;
	std::vector<UniqueObj<OutputStream> > outputStreams_;
	std::vector<IStream*> iOutputStreams_;
	std::vector<UniqueObj<EGLStream::FrameConsumer> > frameConsumers_;
	std::vector<EGLStream::IFrameConsumer*> iFrameConsumers_; 
	std::vector<UniqueObj<Request> > requests_;

	uint64_t stat_gfrm_; // grab frames
	std::vector<uint64_t> stat_cnts_; // number of captured frames
	uint64_t stat_disp_;

	SyncGrabber();

	void showStatistics();
};

SyncGrabber::SyncGrabber(const int numCams, const uint32_t w, const uint32_t h, const int cidx)
	: NUM_CAMS(cidx < 0 ? numCams : 1), IMG_W(w), IMG_H(h), captureIndex_(cidx), bExit_(false)
	, /* Initialize the Argus camera provider */ cameraProvider_(CameraProvider::create())
	, /* Get the ICameraProvider interface from the global CameraProvider. */ iCameraProvider_(interface_cast<ICameraProvider>(cameraProvider_))
{
	CHECK(NUM_CAMS > 0);
	LOG_IF(FATAL, !iCameraProvider_) << "Failed to get ICameraProvider interface";
	LOG(INFO) << "Version of libargus: " << iCameraProvider_->getVersion();
	// Get the camera devices.
        std::vector<CameraDevice*> cameraDevices;
        iCameraProvider_->getCameraDevices(&cameraDevices);
	LOG_IF(FATAL, cameraDevices.size() < NUM_CAMS) << "Must have at least " << NUM_CAMS << " image sensors available.";
 	LOG(INFO) << cameraDevices.size() << " camera devices are detected and "
 << NUM_CAMS << " camera device will be accessed (camera " << cidx << ")."; 
        // Create the capture session, AutoControl will be based on what the 1st device sees.
	for (int i = 0; i < cameraDevices.size(); i++) {
		if (captureIndex_ >= 0 && i != captureIndex_) continue;
		captureSessions_.push_back(UniqueObj<CaptureSession>(iCameraProvider_->createCaptureSession(cameraDevices[i])));
		LOG_IF(FATAL, !captureSessions_[captureSessions_.size()-1]) << "Failed to create capture session " << i;
		iCaptureSessions_.push_back(interface_cast<ICaptureSession>(captureSessions_[captureSessions_.size()-1]));
	  LOG_IF(FATAL, !iCaptureSessions_[iCaptureSessions_.size()-1]) << "Failed to get capture session interface " << i;
	}
	// Create stream settings object and set settings common to each stream.
  UniqueObj<OutputStreamSettings> streamSettings[NUM_CAMS];
	for (int i = 0; i < NUM_CAMS; i++) {
		streamSettings[i].reset(iCaptureSessions_[i]->createOutputStreamSettings());
	  IOutputStreamSettings *iStreamSettings = interface_cast<IOutputStreamSettings>(streamSettings[i]);
		LOG_IF(FATAL, !iStreamSettings) << "Failed to create OutputStreamSettings " << i;
		iStreamSettings->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
		iStreamSettings->setResolution(Size2D<uint32_t>(w, h));
	}
  // Create egl streams
  LOG(INFO) << "Creating left and right output streams.";
	outputStreams_.resize(NUM_CAMS);
	iOutputStreams_.resize(NUM_CAMS);
	for (int i = 0; i < NUM_CAMS; i++) {
		outputStreams_[i].reset(iCaptureSessions_[i]->createOutputStream(streamSettings[i].get()));
                iOutputStreams_[i] = interface_cast<IStream>(outputStreams_[i]);
		LOG_IF(FATAL, !iOutputStreams_[i]) << "Failed to get OutputStream Interface " << i;
	} 
	// Create frame consumers
	frameConsumers_.resize(NUM_CAMS);
	iFrameConsumers_.resize(NUM_CAMS);
	for (int i = 0; i < NUM_CAMS; i++) {
		frameConsumers_[i].reset(EGLStream::FrameConsumer::create(outputStreams_[i].get(), 1));
		iFrameConsumers_[i] = interface_cast<EGLStream::IFrameConsumer>(frameConsumers_[i]);
		LOG_IF(FATAL, !iFrameConsumers_[i]) << "Failed to initialize Consumer " << i;
	}
	// Create a request
        requests_.resize(NUM_CAMS);
        for (int i = 0; i < NUM_CAMS; i++) {
	  // Enable outputStream
	  requests_[i].reset(iCaptureSessions_[i]->createRequest(CAPTURE_INTENT_PREVIEW));//CAPTURE_INTENT_STILL_CAPTURE));		
		IRequest *iRequest = interface_cast<IRequest>(requests_[i]);
		LOG_IF(FATAL, !iRequest) << "Failed to create Request " << i;
		iRequest->enableOutputStream(outputStreams_[i].get());
		// Set source settings such as framerate
		ISourceSettings *iSourceSettings = interface_cast<ISourceSettings>(iRequest->getSourceSettings());
		LOG_IF(FATAL, !iSourceSettings) << "Failed to get ISourceSettings interface " << i;
    iSourceSettings->setFrameDurationRange(Range<uint64_t>(1e9 / 60));
  }
  // Submit capture for the specified time.
  LOG(INFO) << "Starting repeat capture requests.";
	for (int i = 0; i < NUM_CAMS; i++) {
		int status = iCaptureSessions_[i]->repeat(requests_[i].get());
		LOG_IF(FATAL, status != STATUS_OK) << "Failed to start repeat capture request for still image " << i;
	}

	stat_gfrm_ = stat_disp_ = 0;
	stat_cnts_.resize(NUM_CAMS, 0);
}

SyncGrabber::~SyncGrabber() {
  // Stop the capture requests and wait until they are complete.
  // Disconnect Argus producer from the EGLStreams (and unblock consumer acquire).
  LOG(INFO) << "Try to exit SyncGrabber.";
  for (int i = 0; i < NUM_CAMS; i++) {
	  iCaptureSessions_[i]->stopRepeat();
	  iCaptureSessions_[i]->waitForIdle();
		iOutputStreams_[i]->disconnect();
  }
  LOG(INFO) << "Stop the capture requests, wait until they are complete and disconnect producers.";
  // Shut down Argus.
  cameraProvider_.reset();
	LOG(INFO) << "Shut down argus provider";
}

void SyncGrabber::showStatistics() {
	static uint64_t t_lastDisp_ = v::chrono::tic();
	uint64_t dt;
	if ((dt = v::chrono::toc(t_lastDisp_)) > 10*1e6) {
		std::stringstream ss;
		ss << std::setprecision(2) << std::fixed;
		ss << "Last " << 10 << " secs," << std::endl;
		ss << " Grab frame : " << stat_gfrm_ / 1000. << " ms, ";
		for (size_t i = 0; i < stat_cnts_.size(); i++) {
			ss << "[" << i << "]: " << (double)stat_cnts_[i] / (dt/1e6) << " fps ";
		}
		ss << std::endl;
		ss << " Display    : frame  : " << stat_disp_/ 1000. << " ms";
		LOG(INFO) << ss.str();
		std::fill(stat_cnts_.begin(), stat_cnts_.end(), 0);
		t_lastDisp_ = v::chrono::tic();
	}
}

void SyncGrabber::run()
{
  LOG(INFO) << "Run SyncGrabber loop";
  uint64_t t0;
	while(!bExit_) {
		struct Frame frames[NUM_CAMS];
		t0 = v::chrono::tic();
		for (int i = 0; i < NUM_CAMS; i++) 
		{
			Argus::Status status;
		  UniqueObj<EGLStream::Frame> frame(iFrameConsumers_[i]->acquireFrame(5000000000/*TIMEOUT_INFINITE*/, &status));
		  EGLStream::IFrame *iFrame = Argus::interface_cast<EGLStream::IFrame>(frame);
			if (!iFrame) {
				LOG(WARNING) << "Failed to get IFrame interface " << i << ", status: " << status;
				continue;
			}			

		  EGLStream::Image *image = iFrame->getImage();
			LOG_IF(WARNING, !image) << "Failed to get Image from iFrame->getImage()";
			EGLStream::IImage *iImage = interface_cast<EGLStream::IImage>(image);
			LOG_IF(WARNING, !iImage) << "Failed to cast IImage from image " << i;
			EGLStream::IImage2D *iImage2D = interface_cast<EGLStream::IImage2D>(image);
			LOG_IF(WARNING, !iImage2D) << "Failed to get IImage2D from image " << i;
			if (!image || !iImage || !iImage2D) continue;

			frames[i].id = iFrame->getNumber();
			frames[i].ts = (double)iFrame->getTime() / 1e6; // nanoseconds to secs

                        // Get image data from mapbuffer
			frames[i].img = cv::Mat(cv::Size(IMG_W, IMG_H), CV_8UC3, (void*)iImage->mapBuffer(), (size_t)iImage2D->getStride());


		}
		v::chrono::toc(t0, 0, &stat_gfrm_);
		
		// Display streamed images. 
		t0 = v::chrono::tic();
		{
			std::stringstream ss;
			bool bEmptyAll = true;
			for (int i = 0; i < NUM_CAMS; i++) {
				if (!frames[i].img.empty()) {
					stat_cnts_[i]++;


					cv::imshow("SyncGrabber Viewer (left) - Copyright 2018 Polaris3D", frames[i].img); // SEGFAULT ERROR!!


					bEmptyAll &= false;
				}
			}
		}

		cv::waitKey(5);
		v::chrono::toc(t0, 0, &stat_disp_);
		
		// Display statistics
		showStatistics();
	}

  LOG(INFO) << "Done -- exiting.";
}


int main(int argc, const char *argv[])
{
	FLAGS_alsologtostderr = true;
	google::SetLogDestination(google::GLOG_INFO, "./");
	google::InitGoogleLogging(argv[0]);

	LOG(INFO) << "Executing Argus Sample: " << basename(argv[0]);

	int captureIndex = -1;
	if (argc > 1) {
		captureIndex = atoi(argv[1]);
	}

	SyncGrabber syncGrabber(2, 1948, 1096, captureIndex);
	syncGrabber.run();

  return 0;
}

Hi klauski,
Are you on r28.1 or r28.2?
Please share a patch on samples/syncSensor/ so that we can reproduce the issue. It looks independent of OpenCV so please not have OpenCV code in the patch.

Hi klauski,

Have you clarified the cause and resolved the problem?
Any result can be shared?

Thanks

I found temporary solution.
I waited 1 secs and opened second camera after first camera was opened.
I dont why I should wait the time.
I just suspect that there is a dead lock when trying to connect two camera simultaneously.