Jetpack:jp4.6.1: IImageNativeBuffer::createNvBuffer: Argus::Status* status =2

hello:
i I modified the sample-oneShot,when i didn’t add the modified program to my company’s program,the modifyed sample(oneShot) can work,but when i add to my company’s program ,it did’t work.
here is the modified sample(oneShot)
head file

#pragma once
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
#include <Argus/Argus.h>
#include <EGLStream/EGLStream.h>
#include <EGLStream/NV/ImageNativeBuffer.h>
#include <sys/mman.h>
#include <sys/time.h>
#include <iostream>
#define PRODUCER_PRINT(...) printf("PRODUCER: " __VA_ARGS__)

#define EXIT_IF_NULL(val,msg)   \
        {if (!val) {printf("%s\n",msg); return EXIT_FAILURE;}}

#define EXIT_IF_NOT_OK(val,msg) \
        {if (val!=Argus::STATUS_OK) {printf("%s\n",msg); return EXIT_FAILURE;}}


const uint64_t FIVE_SECONDS_IN_NANOSECONDS = 5000000000;
using namespace Argus;
using namespace EGLStream;
class ArGusCamera
{
private:
  bool m_isInit;
  ICameraProvider *iCameraProvider;
  Argus::ICameraProperties *iCameraProperties;
  Argus::ISensorMode *iSensorMode;
  Argus::ICaptureSession *iCaptureSession;
  Argus::ICaptureSession *iSession;
  Argus::IEGLOutputStreamSettings *iEGLStreamSettings;
  IEGLOutputStream *iStream;
  EGLStream::IFrameConsumer *iFrameConsumer;
  Argus::IRequest *iRequest;
  OutputStream *m_stream;
  Argus::ISourceSettings *iSourceSettings;
  UniqueObj<FrameConsumer> consumer;
  UniqueObj<CameraProvider>cameraProvider;
  UniqueObj<CaptureSession> captureSession;
  Argus::UniqueObj<Argus::OutputStreamSettings> streamSettings;
  Argus::UniqueObj<Argus::OutputStream> stream;
  Argus::UniqueObj<Argus::Request> request;
private:
  std::vector<Argus::SensorMode*> sensorModes;
  std::vector<Argus::CameraDevice*> cameraDevices;
public:
  ArGusCamera();
  ~ArGusCamera();
  bool initCamera(int);
  bool AcquireData(uint8_t *,int);
};

cpp file:

#include "ArgusCamera.hpp"

ArGusCamera::ArGusCamera():m_isInit(false)
{
  iCameraProvider = NULL;
  iCameraProperties = NULL;
  iSensorMode = NULL;
  iCaptureSession = NULL;
  iSession = NULL;
  iEGLStreamSettings = NULL;
  iStream = NULL;
  iFrameConsumer = NULL;
  iRequest = NULL;
  m_stream = NULL;
  iSourceSettings = NULL;
}



ArGusCamera::~ArGusCamera
()
{
}
bool ArGusCamera::initCamera(int device)
{
  if(m_isInit == false)
  {
    Argus::Status CameraProviderstatus;
    cameraProvider = UniqueObj<CameraProvider>(CameraProvider::create(&CameraProviderstatus));
    printf("CameraProviderstatus = %d\n",CameraProviderstatus);
    
    iCameraProvider = Argus::interface_cast<Argus::ICameraProvider>(cameraProvider);
    iCameraProvider->getCameraDevices(&cameraDevices);
    if (cameraDevices.size() == 0)
    {
      printf("No cameras available\n");
    }
    iCameraProperties = Argus::interface_cast<Argus::ICameraProperties>(cameraDevices[0]);
    if (!iCameraProperties)
    {
      printf("Failed to get ICameraProperties interface\n");
    }
    iCameraProperties->getAllSensorModes(&sensorModes);
    if (sensorModes.size() == 0)
    {
      printf("Failed to get sensor modes\n");
    }
    PRODUCER_PRINT("Available Sensor modes :\n");
    for (uint32_t i = 0; i < sensorModes.size(); i++) {
        iSensorMode = Argus::interface_cast<Argus::ISensorMode>(sensorModes[i]);
        Argus::Size2D<uint32_t> resolution = iSensorMode->getResolution();
        PRODUCER_PRINT("[%u] W=%u H=%u\n", i, resolution.width(), resolution.height());
    }
    iSensorMode = Argus::interface_cast<Argus::ISensorMode>(sensorModes[0]);
    Argus::Status status;
    captureSession = UniqueObj<Argus::CaptureSession> (iCameraProvider->createCaptureSession(cameraDevices[0], &status));
    EXIT_IF_NOT_OK(status, "Failed to create capture session");
    
    iCaptureSession = Argus::interface_cast<Argus::ICaptureSession>(captureSession);
    if (!iCaptureSession)
    {
      printf("\nFailed to get ICaptureSession interface \n");
    }
    iSession = Argus::interface_cast<Argus::ICaptureSession>(captureSession);
    EXIT_IF_NULL(iSession, "Cannot get Capture Session Interface");
    
    streamSettings = UniqueObj<Argus::OutputStreamSettings>(iSession->createOutputStreamSettings(Argus::STREAM_TYPE_EGL));
    iEGLStreamSettings =Argus::interface_cast<Argus::IEGLOutputStreamSettings>(streamSettings);
    EXIT_IF_NULL(iEGLStreamSettings, "Cannot get IEGLOutputStreamSettings Interface\n");
    iEGLStreamSettings->setPixelFormat(Argus::PIXEL_FMT_YCbCr_420_888);
    iEGLStreamSettings->setResolution(iSensorMode->getResolution());
    iEGLStreamSettings->setMetadataEnable(true);

    stream = Argus::UniqueObj<Argus::OutputStream>(iSession->createOutputStream(streamSettings.get()));
    m_stream = stream.get();
    EXIT_IF_NULL(stream, "Failed to create EGLOutputStream");

    iStream = interface_cast<IEGLOutputStream>(stream.get());

    consumer = UniqueObj<FrameConsumer>(FrameConsumer::create(stream.get()));
    iFrameConsumer = Argus::interface_cast<EGLStream::IFrameConsumer>(consumer);
    EXIT_IF_NULL(iFrameConsumer, "Failed to initialize Consumer");

    request = Argus::UniqueObj<Argus::Request> (iSession->createRequest(Argus::CAPTURE_INTENT_STILL_CAPTURE));
    iRequest = Argus::interface_cast<Argus::IRequest>(request);
    EXIT_IF_NULL(iRequest, "Failed to get capture request interface");
    Argus::Status status1 = iRequest->enableOutputStream(m_stream);
    EXIT_IF_NOT_OK(status1, "Failed to enable stream in capture request");

    iSourceSettings = Argus::interface_cast<Argus::ISourceSettings>(request);
    EXIT_IF_NULL(iSourceSettings, "Failed to get source settings request interface");
    iSourceSettings->setSensorMode(sensorModes[0]);


    m_isInit = true;
    printf("init sucess\n");
    return true;
  }
  printf("has inited\n");
  return true;
}

bool ArGusCamera::AcquireData(uint8_t *DstData,int device)
{
    printf("start acquire data\n");
    uint8_t *rawdata = (uint8_t*)malloc(3840*2160*3/2);

    printf("Starting repeat capture requests.\n");
    if(!iCaptureSession->isRepeating())
    {
      if (iCaptureSession->repeat(request.get()) != STATUS_OK)
        printf("Failed to start repeat capture request");
    }

    uint32_t requestId = iSession->capture(request.get());
    EXIT_IF_NULL(requestId, "Failed to submit capture request");

//////////////////////////////////////////////////////////////////


    Argus::Status m_status;
    Argus::UniqueObj<EGLStream::Frame> frame(iFrameConsumer->acquireFrame(FIVE_SECONDS_IN_NANOSECONDS,&m_status));
    printf("acquireFrame status=%d\n",m_status);
    // if (iStream->waitUntilConnected() != STATUS_OK)
    //   printf("Stream failed to connect.");
    EGLStream::IFrame *iFrame = Argus::interface_cast<EGLStream::IFrame>(frame);
    EXIT_IF_NULL(iFrame, "Failed to get IFrame interface\n");
    printf("Acquired Frame: %llu, time %llu\n",
                    static_cast<unsigned long long>(iFrame->getNumber()),
                    static_cast<unsigned long long>(iFrame->getTime()));

    // Print out some capture metadata from the frame.
    IArgusCaptureMetadata *iArgusCaptureMetadata = interface_cast<IArgusCaptureMetadata>(frame);
    if (!iArgusCaptureMetadata)
        printf("Failed to get IArgusCaptureMetadata interface.\n");
    CaptureMetadata *metadata = iArgusCaptureMetadata->getMetadata();
    ICaptureMetadata *iMetadata = interface_cast<ICaptureMetadata>(metadata);
    if (!iMetadata)
    {
        printf("Failed to get ICaptureMetadata interface.\n");
    }
    printf("\tSensor Timestamp: %llu, LUX: %f\n",static_cast<unsigned long long>(iMetadata->getSensorTimestamp()),iMetadata->getSceneLux());

    // Print out image details, and map the buffers to read out some data.
    Image *image = iFrame->getImage();
    EGLStream::IImage2D *iImageYUV = Argus::interface_cast<EGLStream::IImage2D>(image);
    EXIT_IF_NULL(iImageYUV, "Failed to get ImageJPEG Interface");
    

    int v_dmabuf = -1;
    
    NV::IImageNativeBuffer *iNativeBuffer =interface_cast<NV::IImageNativeBuffer>(iFrame->getImage());
    if (!iNativeBuffer)
        printf("IImageNativeBuffer not supported by Image.");
    printf("stream size=========================%d\n",iStream->getResolution());

    v_dmabuf = iNativeBuffer->createNvBuffer(iStream->getResolution(),NvBufferColorFormat_NV12,NvBufferLayout_Pitch);

    NvBufferParams params;

    int ret = NvBufferGetParams(v_dmabuf, &params);
    size_t total_size = 0;
    for(int i = 0 ; i<params.num_planes ; i++)
    {
        int32_t width = params.width[i];
        int32_t height = params.height[i];
        int32_t pitch = params.pitch[i];
        printf("width:%d--height:%d--pitch:%d\n", width, height, pitch);
        
        size_t fsize = pitch*height;

        uint8_t* data_mem = (uint8_t*)mmap(0, fsize, PROT_READ | PROT_WRITE, MAP_SHARED, v_dmabuf, params.offset[i]);
        uint8_t* vptr = rawdata;
        memcpy(vptr,data_mem,fsize);
        vptr += fsize;
        total_size += fsize;
        printf("fsize= %d paramsoffset = %d\n", fsize, params.offset[i]);
    }
    printf("total size = %d\n",total_size);
    memcpy(DstData,rawdata,total_size);
    return true;
}

then i add it to company’s program,it didn’t work,the error is:

[2022-11-25 14:44:30.480][libcamera][ERROR]../src/camera_imx585yuv.cpp(862) initCamera cameraDevice[0] info=imx585_bottomleft_liimx585
PRODUCER: Available Sensor modes :
PRODUCER: [0] W=3840 H=2160
PRODUCER: [1] W=1928 H=1090
[2022-11-25 14:44:32.154][libcamera][ERROR]../src/camera_imx585yuv.cpp(923) initCamera init sucess
[2022-11-25 14:44:32.154][libcamera][ERROR]../src/camera_imx585yuv.cpp(933) AcquireData AcquireData--------------------------
[2022-11-25 14:44:32.154][libcamera][ERROR]../src/camera_imx585yuv.cpp(936) AcquireData FFFFFFFFFFFFFFFFFFFFFFFFFFFF--------------------------
[2022-11-25 14:44:32.621][libcamera][ERROR]../src/camera_imx585yuv.cpp(971) AcquireData 44444444444444444444444444444--------------------------
(Argus) Error NotSupported:  (propagating from src/eglstream/ImageImpl.cpp, function copyToNvBuffer(), line 470)
(Argus) Error InvalidState: Failed to copy to NvBuffer (in src/eglstream/ImageImpl.cpp, function createNvBuffer(), line 374)
[2022-11-25 14:44:32.624][libcamera][ERROR]../src/camera_imx585yuv.cpp(990) AcquireData iNativeBuffer->createNvBuffer() failed,status = 2,size=3840

   int v_dmabuf = -1;
    NV::IImageNativeBuffer *iNativeBuffer =interface_cast<NV::IImageNativeBuffer>(iFrame->getImage());
    if (!iNativeBuffer)
        LIBCAMERA_ERROR("IImageNativeBuffer not supported by Image.");

    v_dmabuf = iNativeBuffer->createNvBuffer(iStream->getResolution(),NvBufferColorFormat_NV12,NvBufferLayout_Pitch,EGLStream::NV::ROTATION_0,&status);
	if(v_dmabuf == -1)
	{
		LIBCAMERA_ERROR("iNativeBuffer->createNvBuffer() failed,status = %d,size=%d\n",status,iStream->getResolution());
		return false;
	}

createNvBuffer() always throw error (Argus) Error NotSupported: (propagating from src/eglstream/ImageImpl.cpp, function copyToNvBuffer(), line 470) (Argus) Error InvalidState: Failed to copy to NvBuffer (in src/eglstream/ImageImpl.cpp, function createNvBuffer(), line 374) and the Argus::status =2;I don’t no why i cant create nvbuffer?

Hope to get your answer as soon as possible!
Thanks!

Try NvBufferLayout_BlockLinear instead of NvBufferLayout_Pitch

thanks,when I use NvBufferLayout_BlockLinear ,it sucess. And i want to know can i save yuv by this way ?

Hi, ShaneCCC:
i use the next code to save yuv file ,but the image was strange,i also use dump_dmabuf to save yuv file ,it was same.
` v_dmabuf = iNativeBuffer->createNvBuffer(iStream->getResolution(),NvBufferColorFormat_NV12,NvBufferLayout_Pitch);

NvBufferParams params;

int ret = NvBufferGetParams(v_dmabuf, &params);
size_t total_size = 0;
for(int i = 0 ; i<params.num_planes ; i++)
{
    int32_t width = params.width[i];
    int32_t height = params.height[i];
    int32_t pitch = params.pitch[i];
    printf("width:%d--height:%d--pitch:%d\n", width, height, pitch);
    
    size_t fsize = pitch*height;

    uint8_t* data_mem = (uint8_t*)mmap(0, fsize, PROT_READ | PROT_WRITE, MAP_SHARED, v_dmabuf, params.offset[i]);
    memcpy(rawdata+total_size,data_mem,fsize);
   munmap(data_mem,fsize);
    total_size += fsize;
    printf("fsize= %d paramsoffset = %d\n", fsize, params.offset[i]);
}
printf("total size = %d\n",total_size);
memcpy(DstData,rawdata,total_size);
return true;`

3EC75DBB-E9F9-4dfc-ABFD-9076007251BD
i don’t know why it like that?

i use the sample yuvJpeg to save jpg,and it also strage.

Hi,
Block linear format is not public so please conever to pitch linear and dump it.

You can also try this sample:

/usr/src/jetson_multimedia_api/samples/09_camera_jpeg_capture/

Hi,DaneLLL:
i use NvBufferLayout_Pitch to dump ,but it also strange.

/usr/src/jetson_multimedia_api/argus/samples/yuvJpeg

i use this sample to save jpeg,the image is also strange,but i use argus_camera to preview video,it normal.

Hi,
i use NvBufferLayout_Pitch to dump ,but it also strange.

/usr/src/jetson_multimedia_api/argus/samples/yuvJpeg

i use this sample to save jpeg,the image is also strange,but i use argus_camera to preview video,it normal.

Hi,
You may try 09 sample and check if saved JPEGs are good.

Hi,i hava try 09 sample and the saved JPEGS are strange

What’s the output resolution? Maybe it could be the alignment cause the problem.

output resolution is 3840x2160

hi,
i change the resoultion ,and the jpg is normal,but how i dump yuv?the 09 sample use NvJPEGEncoder to convert to jpg,bu i want dump yuv,this way is not helpful.

hi,
i see the way dump_dmabuf to dump yuv ,but i also need save the yuv-data to a char array,so that i can send this to other way.

Hi,
There are multiple codes of calling dump_dmabuf(). You cam search for it in the samples. Or refer to 00_video_decode.

i know there are multiple codes of calling dump_dmabuf().but that does’t help me,i need save the yuv dump to a char array. Like “char * image = new char[3840x2160x3/2]”.i need put the yuv data into image.

Hi,
The reference samples may not be exactly fitting you use-case and you would need to do customization. If you dump the frame data through dump_dmabuf(), do you see valid data? If yes, it should work by replacing writing to a file with copying to a buffer in dump_dmabuf(). For getting data from NvBuffer, data alignment has to be considered. For certain resolutions, pitch is not equal to width and the data has to be copied line by line.

Hi,
I dump data through mmap(),and the image is strange.


also,when i dump data through dump_dmabuf(),the resault was same.

Hi,
Please call NvBufferMemMap() instead of mmap().

Hi,
i use mmap in 09_sample,i can save the normal yuv image ,but i use mmap in yuvJpeg sample ,it was strange,what’s the differences of 09sample and yuvJpeg?