Simple argus capture loop slow

Hi, when I run the following code I get ~7FPS. Why is this?

#include <stdio.h>
#include <stdlib.h>

#include <Argus/Argus.h>
#include <EGLStream/EGLStream.h>
#include <EGLStream/NV/ImageNativeBuffer.h>

#include <NvEglRenderer.h>

#include "Error.h"

using namespace Argus;
using namespace EGLStream;

NvEglRenderer *g_renderer = NULL;
Size2D<uint32_t> STREAM_SIZE(3840, 2160);

int main() {
    g_renderer = NvEglRenderer::createEglRenderer("renderer", 0, 0, 0, 0); 
    if (!g_renderer)
        ORIGINATE_ERROR("failed to create renderer");
    g_renderer->enableProfiling();

    UniqueObj<CameraProvider> cameraProvider(CameraProvider::create());
    ICameraProvider *iCameraProvider = interface_cast<ICameraProvider>(cameraProvider);
    if (!iCameraProvider)
        ORIGINATE_ERROR("failed to create camera provider");

    std::vector<CameraDevice*> devices;
    iCameraProvider->getCameraDevices(&devices);
    printf("found %d camera(s)", devices.size());
    if (devices.size() == 0)
        ORIGINATE_ERROR("not enough cameras");

    UniqueObj<CaptureSession> captureSession(iCameraProvider->createCaptureSession(devices[0]));
    ICaptureSession *iSession = interface_cast<ICaptureSession>(captureSession);
    if (!iSession)
        ORIGINATE_ERROR("failed to create capture session");

    UniqueObj<OutputStreamSettings> streamSettings(iSession->createOutputStreamSettings(STREAM_TYPE_EGL));
    IEGLOutputStreamSettings *iEGLStreamSettings = interface_cast<IEGLOutputStreamSettings>(streamSettings);
    if (!iEGLStreamSettings)
        ORIGINATE_ERROR("failed to create output stream settings");
    iEGLStreamSettings->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
    iEGLStreamSettings->setEGLDisplay(g_renderer->getEGLDisplay());
    iEGLStreamSettings->setResolution(STREAM_SIZE);

    UniqueObj<OutputStream> stream(iSession->createOutputStream(streamSettings.get()));
    if (!stream)
        ORIGINATE_ERROR("failed to create stream");

    UniqueObj<Request> request(iSession->createRequest());
    IRequest *iRequest = interface_cast<IRequest>(request);
    if (!iRequest) 
        ORIGINATE_ERROR("failed to create request");
    iRequest->enableOutputStream(stream.get());

    ISourceSettings *iSourceSettings = interface_cast<ISourceSettings>(request);
    iSourceSettings->setFrameDurationRange(Range<uint64_t>(1e9/30));

    UniqueObj<FrameConsumer> consumer(FrameConsumer::create(stream.get()));
    IFrameConsumer *iFrameConsumer = interface_cast<IFrameConsumer>(consumer);
    if (!consumer)
        ORIGINATE_ERROR("failed to create consumer");

    int dmabuf = 0;
    for (int i = 0; i < 100; i++) {
        iSession->capture(request.get());

        UniqueObj<Frame> frame(iFrameConsumer->acquireFrame());
        IFrame *iFrame = interface_cast<IFrame>(frame);
        if (!iFrame)
            ORIGINATE_ERROR("failed to get frame");

        NV::IImageNativeBuffer *iNativeBuffer = interface_cast<NV::IImageNativeBuffer>(iFrame->getImage());
        if (!iNativeBuffer)
            ORIGINATE_ERROR("failed to get native buffer");

        if (!dmabuf) {
            dmabuf = iNativeBuffer->createNvBuffer(iEGLStreamSettings->getResolution(), 
                                                   NvBufferColorFormat_YUV420, 
                                                   NvBufferLayout_BlockLinear);
            if (!dmabuf)
                ORIGINATE_ERROR("failed to create dmabuf");
        } else if (iNativeBuffer->copyToNvBuffer(dmabuf) != STATUS_OK) {
            ORIGINATE_ERROR("failed to copy to nvbuf");
        }

        g_renderer->render(dmabuf);

        printf("rendered frame %d\n", i);
    }

    g_renderer->printProfilingStats();

    cameraProvider.reset();
    delete g_renderer;
}

Hi,
Looks like you miss

renderer->setFPS((float)CAPTURE_FPS);

For comparison, you may run 09_camera_jpeg_capture and check the framerate.
Or gstreamer pipeline:

$ gst-launch-1.0 nvarguscamerasrc ! 'video/x-raw(memory:NVMM),width=3840,height=2160' ! fpsdisplaysink text-overlay=0 video-sink=nvoverlaysink -v

I added renderer->setFPS and it had no effect. Does the frameconsumer have to be in another thread?

Hi,
You may run 09_camera_jpeg_capture with -s

  -s            Enable profiling

We can reach 30fps with default camera board(ov5693). See if your sensor can output 30fps steadily.

I can get 60fps in the samples, I am using an IMX274.

Hi,
Since 09_camera_jpeg_capture works fine, you may refer to it for developing your usecase. If you don’t need jpeg encoding, please check code about –disable-jpg and remove the code.

Just to follow up on this, if you compile the code above with

g++ main.cpp /usr/src/jetson_multimedia_api/samples/common/classes/NvElement.o /usr/src/jetson_multimedia_api/samples/common/classes/NvElementProfiler.o /usr/src/jetson_multimedia_api/samples/common/classes/NvLogging.o /usr/src/jetson_multimedia_api/samples/common/classes/NvEglRenderer.o -I/usr/src/jetson_multimedia_api/include -I/usr/src/jetson_multimedia_api/argus/samples/utils/ -L/usr/lib/aarch64-linux-gnu/tegra -lnvargus -lEGL -lnvbuf_utils -lpthread -lGL -lX11

you get 30fps?