Hello Everyone, I’ve been trying to use the VPI stereo disparity algorithm with a luxonis oak-d lite. I’m using the camera’s sdk and opencv to access the stereo output.
When executing the code, the disparity, left frame, right frame is shown for 3-5 seconds and the jetson(Xavier NX with a custom carrier board) completely shuts down. Although the example on VPI’s site works just fine.
This is my first time using VPI so any help regarding this is appreciated :)
I’ve attached the code below:
#include <opencv2/core/version.hpp>
#include <depthai/depthai.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <vpi/OpenCVInterop.hpp>
#include <vpi/Image.h>
#include <vpi/Status.h>
#include <vpi/Stream.h>
#include <vpi/algo/ConvertImageFormat.h>
#include <vpi/algo/Rescale.h>
#include <vpi/algo/StereoDisparity.h>
#include <cstring>
#include <iostream>
#include <sstream>
//Error Handling
#define CHECK_STATUS(STMT) \
do \
{ \
VPIStatus status = (STMT); \
if (status != VPI_SUCCESS) \
{ \
char buffer[VPI_MAX_STATUS_MESSAGE_LENGTH]; \
vpiGetLastStatusMessage(buffer, sizeof(buffer)); \
std::ostringstream ss; \
ss << vpiStatusGetName(status) << ": " << buffer; \
throw std::runtime_error(ss.str()); \
} \
} while (0);
int main(int, char **)
{
cv::Mat frameLeft, frameRight;
dai::Pipeline pipeline;
VPIImage inFrameLeft = NULL;
VPIImage inFrameRight = NULL;
VPIImage tmpLeft = NULL;
VPIImage tmpRight = NULL;
VPIImage stereoLeft = NULL;
VPIImage stereoRight = NULL;
VPIImage disparity = NULL;
VPIImage confidenceMap = NULL;
VPIStream stream = NULL;
VPIPayload stereo = NULL;
//configure depthai
auto monoLeft = pipeline.create<dai::node::MonoCamera>();
auto monoRight = pipeline.create<dai::node::MonoCamera>();
auto xLinkOutLeft = pipeline.create<dai::node::XLinkOut>();
auto xLinkOutRight = pipeline.create<dai::node::XLinkOut>();
xLinkOutLeft->setStreamName("left");
xLinkOutRight->setStreamName("right");
monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_480_P);
monoLeft->setBoardSocket(dai::CameraBoardSocket::LEFT);
monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_480_P);
monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT);
monoLeft->out.link(xLinkOutLeft->input);
monoRight->out.link(xLinkOutRight->input);
dai::Device device(pipeline);
auto leftQueue = device.getOutputQueue("left", 8, false);
auto rightQueue = device.getOutputQueue("right", 8, false);
//proc backend
uint64_t backend = VPI_BACKEND_CUDA;
//input frame dimensions
int32_t inputWidth = 640;
int32_t inputHeight = 480;
//init vpi stream
CHECK_STATUS(vpiStreamCreate(0, &stream));
//conversion params
VPIConvertImageFormatParams convParams;
CHECK_STATUS(vpiInitConvertImageFormatParams(&convParams));
//disparity params
VPIStereoDisparityEstimatorCreationParams stereoParams;
CHECK_STATUS(vpiInitStereoDisparityEstimatorCreationParams(&stereoParams));
stereoParams.maxDisparity = 64;
//in-out format
VPIImageFormat stereoFormat = VPI_IMAGE_FORMAT_Y16_ER;
VPIImageFormat disparityFormat = VPI_IMAGE_FORMAT_U16;
int stereoWidth = inputWidth;
int stereoHeight = inputHeight;
int outputWidth = inputWidth;
int outputHeight = inputHeight;
CHECK_STATUS(vpiCreateStereoDisparityEstimator(VPI_BACKEND_CUDA, stereoWidth, stereoHeight, stereoFormat, &stereoParams,
&stereo));
CHECK_STATUS(vpiImageCreate(outputWidth, outputHeight, disparityFormat, 0, &disparity));
CHECK_STATUS(vpiImageCreate(inputWidth, inputHeight, VPI_IMAGE_FORMAT_U16, 0, &confidenceMap));
while (true)
{
//read left cam frames
auto inLeft = leftQueue->get<dai::ImgFrame>();
frameLeft = inLeft->getFrame();
cv::imshow("Left", frameLeft);
//read right cam frames
auto inRight = rightQueue->get<dai::ImgFrame>();
frameRight = inRight->getFrame();
cv::imshow("Right", frameRight);
//vpi frame wrap
CHECK_STATUS(vpiImageCreateOpenCVMatWrapper(frameLeft, 0, &inFrameLeft));
CHECK_STATUS(vpiImageCreateOpenCVMatWrapper(frameRight, 0, &inFrameRight));
//input stereo images
CHECK_STATUS(vpiImageCreate(stereoWidth, stereoHeight, stereoFormat, 0, &stereoLeft));
CHECK_STATUS(vpiImageCreate(stereoWidth, stereoHeight, stereoFormat, 0, &stereoRight));
CHECK_STATUS(vpiSubmitConvertImageFormat(stream, VPI_BACKEND_CUDA, inFrameLeft, stereoLeft, &convParams));
CHECK_STATUS(vpiSubmitConvertImageFormat(stream, VPI_BACKEND_CUDA, inFrameRight, stereoRight, &convParams));
//submit input and output
CHECK_STATUS(vpiSubmitStereoDisparityEstimator(stream, VPI_BACKEND_CUDA, stereo, stereoLeft, stereoRight, disparity,
confidenceMap, NULL));
// Wait until the algorithm finishes processing
CHECK_STATUS(vpiStreamSync(stream));
VPIImageData data;
CHECK_STATUS(vpiImageLock(disparity, VPI_LOCK_READ, &data));
//opencv matrix
cv::Mat cvDisparity;
CHECK_STATUS(vpiImageDataExportOpenCVMat(data, &cvDisparity));
//scale result from 0-255.
cvDisparity.convertTo(cvDisparity, CV_8UC1, 255.0 / (32 * stereoParams.maxDisparity), 0);
//colormap
cv::Mat cvDisparityColor;
applyColorMap(cvDisparity, cvDisparityColor, cv::COLORMAP_JET);
CHECK_STATUS(vpiImageUnlock(disparity));
cv::imshow("Disparity", cvDisparityColor);
int key = cv::waitKey(1);
if (key == 'q' || key == 'Q')
{
return 0;
}
}
vpiStreamDestroy(stream);
vpiImageDestroy(inFrameLeft);
vpiImageDestroy(inFrameRight);
vpiImageDestroy(tmpLeft);
vpiImageDestroy(tmpRight);
vpiImageDestroy(stereoLeft);
vpiImageDestroy(stereoRight);
vpiImageDestroy(confidenceMap);
vpiImageDestroy(disparity);
vpiPayloadDestroy(stereo);
return 0;
}