vpiImageWrapEglImage doesn't work with OpenGL ES textures?

Is this expected? How should I process images rendered in GLES using VPI?

The code below gives the output at the bottom.
Run on a Xavier with JetPack 4.4.
Copy the code to yourFileName.cpp, chmod +x the file, run ./yourFileName.cpp to compile and run it.

It’s odd that the output says NVMEDIA_VPI : 156, Version 2.3 but /opt/nvidia/include/vpi says version 3.7.0 and so does the lib

//usr/bin/g++ $0 -lEGL -lGLESv2 -lnvvpi && ./a.out; exit
#include <GLES3/gl32.h>
#include <GLES2/gl2ext.h>
#include <vpi/VPI.h>
#include <vpi/EGL.h>
#include <stdio.h>
#include <vector>
int main() {
    EGLDisplay eglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
    eglInitialize(eglDisplay, nullptr, nullptr);
    EGLContext eglContext;
    {
        EGLint configAttributes[] = {
            EGL_RENDERABLE_TYPE, EGL_OPENGL_ES3_BIT,
            EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
            EGL_RED_SIZE, 8,
            EGL_GREEN_SIZE, 8,
            EGL_BLUE_SIZE, 8,
            EGL_ALPHA_SIZE, 8,
            EGL_NONE
        };
        EGLConfig config;
        EGLint numConfigs;
        eglChooseConfig(eglDisplay, configAttributes, &config, 1, &numConfigs);
        EGLint contextAttributes[] = {
            EGL_CONTEXT_MAJOR_VERSION, 3,
            EGL_CONTEXT_MINOR_VERSION, 2,
            EGL_NONE
        };
        eglContext = eglCreateContext(eglDisplay, config, EGL_NO_CONTEXT, contextAttributes);
    }
    eglMakeCurrent(eglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, eglContext);
    GLuint texture;
    glGenTextures(1, &texture);
    glBindTexture(GL_TEXTURE_2D, texture);
    int width = 256, height = 256;
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, std::vector<uint64_t>(width*height, 0xFFFFFFFF).data());
    GLenum oglError = glGetError();
    if (oglError != GL_NO_ERROR) fprintf(stderr, "OGL Error %x\n", oglError);
    EGLAttrib eglImgAttrs[] = { EGL_IMAGE_PRESERVED, EGL_FALSE, EGL_NONE, EGL_NONE };
    EGLImageKHR eglImage = eglCreateImage(eglDisplay, eglContext, EGL_GL_TEXTURE_2D_KHR, (EGLClientBuffer)(intptr_t)texture, eglImgAttrs);
    EGLint eglErrorCode = eglGetError();
    if (eglErrorCode != EGL_SUCCESS) fprintf(stderr, "EGL Error %x\n", eglErrorCode);
    VPIImage img = nullptr;
    VPIStatus vpiErrorCode = vpiImageWrapEglImage(eglImage, 0, &img);
    if (vpiErrorCode != VPI_SUCCESS) fprintf(stderr, "VPI Error %x\n", vpiErrorCode);
    return 0;
}
/*
NVMEDIA_ARRAY:   53,  Version 2.1
NVMEDIA_VPI :  156,  Version 2.3
[ERROR] 2020-08-22 17:45:33 VPI_ERROR_INVALID_IMAGE_TYPE: Image type isn't accepted
[WARN ] 2020-08-22 17:45:33 src/private/mem/img/Mapping_EGLImage_NvMediaImage.cpp:157 (EGLExport:NvError_Timeout)
VPI Error 3
*/

Hi,

This issue can be reproduced in our environment.
We are checking this with our internal team. Will share more information later.

Thanks.

Thanks! This could make one of our super high-frequency video pipelines more efficient. Appreciate the investigation.

Hi,

The root cause of this issue is the memory format.
The GL texture is stored in block-linear format which VPI doesn’t support yet.

This feature is added in our next release.
We have tested your sample with the internal release and it can work without error.

Will let you know once the package is available for the public.

Thanks.

1 Like

Good to hear.

Are there other ways to set up a GL texture that will work with VPI? I’m primarily interested in processing render-to-texture targets.