how to get mipi YCbCr_420_888 frame data to a buffer?

I am using this code

m_dmabuf = iNativeBuffer->createNvBuffer(iStream->getResolution(),
                                                     NvBufferColorFormat_YUV420,
                                                     NvBufferLayout_Pitch);
      and dump_dmabuf() in 00_video_decode

I can get the yuv data in ofstream,but how can I save the data to a buffer or array?

Please download the multimedia API sample code to check if any help.

https://developer.nvidia.com/embedded/dlc/multimedia-api-r2821

Hi,I am using the argus,I added time print and found the Argus::UniqueObjEGLStream::Frame frame(
iFrameConsumer->acquireFrame()); run once needs 300ms,but Why is this happening?

my code:

int main(int argc, char** argv)
{
    printf("Executing Argus Sample: %s\n", basename(argv[0]));

    ArgusSamples::Value<uint32_t> cameraIndex(DEFAULT_CAMERA_INDEX);
    std::vector<Argus::CameraDevice*> cameraDevices;

    /*
     * Set up Argus API Framework, identify available camera devices, and create
     * a capture session for the first available device
     */

    Argus::UniqueObj<Argus::CameraProvider> cameraProvider(Argus::CameraProvider::create());

    Argus::ICameraProvider *iCameraProvider =
        Argus::interface_cast<Argus::ICameraProvider>(cameraProvider);
    EXIT_IF_NULL(iCameraProvider, "Cannot get core camera provider interface");
    printf("Argus Version: %s\n", iCameraProvider->getVersion().c_str());

    Argus::Status status = iCameraProvider->getCameraDevices(&cameraDevices);
    EXIT_IF_NOT_OK(status, "Failed to get camera devices");
    EXIT_IF_NULL(cameraDevices.size(), "No camera devices available");
    if (cameraDevices.size() <= cameraIndex.get())
    {
        printf("Camera device specifed on command line is not available\n");
        return EXIT_FAILURE;
    }

    Argus::UniqueObj<Argus::CaptureSession> captureSession(
        iCameraProvider->createCaptureSession(cameraDevices[cameraIndex.get()], &status));

    Argus::ICaptureSession *iSession =
        Argus::interface_cast<Argus::ICaptureSession>(captureSession);
    EXIT_IF_NULL(iSession, "Cannot get Capture Session Interface");

    /*
     * Creates the stream between the Argus camera image capturing
     * sub-system (producer) and the image acquisition code (consumer).  A consumer object is
     * created from the stream to be used to request the image frame.  A successfully submitted
     * capture request activates the stream's functionality to eventually make a frame available
     * for acquisition.
     */

    Argus::UniqueObj<Argus::OutputStreamSettings> streamSettings(
        iSession->createOutputStreamSettings());

    Argus::IOutputStreamSettings *iStreamSettings =
        Argus::interface_cast<Argus::IOutputStreamSettings>(streamSettings);
    EXIT_IF_NULL(iStreamSettings, "Cannot get OutputStreamSettings Interface");
    iStreamSettings->setPixelFormat(Argus::PIXEL_FMT_YCbCr_420_888);

    iStreamSettings->setResolution(Argus::Size2D<uint32_t>(640, 480));
    //iStreamSettings->setResolution(Argus::Size2D<uint32_t>(1920, 1080));
    iStreamSettings->setMetadataEnable(true);



    unsigned int frameNumber = 0;
    while(1)
    {
		Argus::UniqueObj<Argus::OutputStream> stream(
		    iSession->createOutputStream(streamSettings.get()));

		Argus::IStream *iStream = Argus::interface_cast<Argus::IStream>(stream);
		EXIT_IF_NULL(iStream, "Cannot get OutputStream Interface");

		Argus::UniqueObj<EGLStream::FrameConsumer> consumer(
		EGLStream::FrameConsumer::create(stream.get()));

		EGLStream::IFrameConsumer *iFrameConsumer =
			Argus::interface_cast<EGLStream::IFrameConsumer>(consumer);
		EXIT_IF_NULL(iFrameConsumer, "Failed to initialize Consumer");

		Argus::UniqueObj<Argus::Request> request(
		iSession->createRequest(Argus::CAPTURE_INTENT_STILL_CAPTURE));

		Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(request);
		EXIT_IF_NULL(iRequest, "Failed to get capture request interface");

		status = iRequest->enableOutputStream(stream.get());
		EXIT_IF_NOT_OK(status, "Failed to enable stream in capture request");

		uint32_t requestId = iSession->capture(request.get());
		EXIT_IF_NULL(requestId, "Failed to submit capture request");



		Argus::UniqueObj<EGLStream::Frame> frame(
		    iFrameConsumer->acquireFrame());

			EGLStream::IFrame *iFrame = Argus::interface_cast<EGLStream::IFrame>(frame);   
			EXIT_IF_NULL(iFrame, "Failed to get IFrame interface");


			// Get the IImageNativeBuffer extension interface.
			NV::IImageNativeBuffer *iNativeBuffer =
			Argus::interface_cast<NV::IImageNativeBuffer>(iFrame->getImage());
			if (!iNativeBuffer)
			printf("IImageNativeBuffer not supported by Image.");

			int m_dmabuf = -1;

			m_dmabuf = iNativeBuffer->createNvBuffer(iStream->getResolution(),NvBufferColorFormat_YUV420,NvBufferLayout_Pitch);
			printf("dmabuf=%d,number=%d\n",m_dmabuf,iFrame->getNumber());


		char filename[256];
	
	    sprintf(filename,"output000.yuv",(unsigned)frameNumber);

	    std::ofstream *outputFile = new std::ofstream(filename);
	    dump_dmabuf(m_dmabuf,0,outputFile);
        dump_dmabuf(m_dmabuf,1,outputFile);
        dump_dmabuf(m_dmabuf,2,outputFile);

	    delete outputFile;


		
	   
    }



    return EXIT_SUCCESS;
}

Does every acquireFrame need 300ms ?
JetPack 4.2 just release, Could you please have a try r32.1