It’s a bit outdated, but here goes my 2cents contribution:
I was also looking for a code snipet to create EGL context on desktop NVidia.
I was aware that on Windows, NVidia exposes EGL through extensions WGL_EXT_create_context_es_profile and WGL_EXT_create_context_es2_profile, and on Linux NVidia allows EGL through GLX_EXT_create_context_es_profile and GLX_EXT_create_context_es2_profile extensions.
Of course, you also have the afore mentioned OpenGL-ES drivers/emulators from PowerVR / Adreno / Mali / Angle, but I was looking for a raw implementation, i.e., my own homegrown libGLESv2.dll and libEGL.dll
After some struggle, I’ve got something like below.
The basic thing is to get required extensions, by hand or using GLEW, create dummy context and then create your GL-ES context. Error handling is omitted for clearance.
You also need to define and load all core and extension proc functions, and make then public for your libs.
/**
* adapted from
* from https://www.opengl.org/wiki/Tutorial:_OpenGL_3.1_The_First_Triangle_%28C%2B%2B/Win%29
*/
bool CGLRenderer::CreateGLContext(CDC* pDC){
PIXELFORMATDESCRIPTOR pfd =
{
sizeof(PIXELFORMATDESCRIPTOR),
1,
PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER, //Flags
PFD_TYPE_RGBA, //The kind of framebuffer. RGBA or palette.
32, //Colordepth of the framebuffer.
0, 0, 0, 0, 0, 0,
0,
0,
0,
0, 0, 0, 0,
24, //Number of bits for the depthbuffer
8, //Number of bits for the stencilbuffer
0, //Number of Aux buffers in the framebuffer.
PFD_MAIN_PLANE,
0,
0, 0, 0
};
int nPixelFormat = ChoosePixelFormat(pDC->m_hDC, &pfd);
if (nPixelFormat == 0) return false;
BOOL bResult = SetPixelFormat (pDC->m_hDC, nPixelFormat, &pfd);
if (!bResult) return false;
HGLRC tempContext = wglCreateContext(pDC->m_hDC);
wglMakeCurrent(pDC->m_hDC, tempContext);
// Using GLEW. Init it after ctx creation
GLenum err = glewInit();
if (GLEW_OK != err){
AfxMessageBox(_T("GLEW is not initialized!"));
}
// create OPENGL ES 2 profile. It may return a compatible, but higher GL-ES, as 3.0 or 3.1
int attribList[] = {
WGL_CONTEXT_MAJOR_VERSION_ARB, 2,
WGL_CONTEXT_MINOR_VERSION_ARB, 0,
WGL_CONTEXT_PROFILE_MASK_ARB, WGL_CONTEXT_ES2_PROFILE_BIT_EXT,
0,
};
if(wglewIsSupported("WGL_ARB_create_context") == 1){
m_hrc = wglCreateContextAttribsARB(pDC->m_hDC,0, attribs);
wglMakeCurrent(NULL,NULL);
wglDeleteContext(tempContext);
wglMakeCurrent(pDC->m_hDC, m_hrc);
}else{ //Failed to create a GL-ES context.
m_hrc = NULL;
}
//Debug info - print out GL version
const GLubyte *glVersionString = glGetString(GL_VERSION);
const char *vendorChar = (char*)glGetString(GL_VENDOR);
const char *rendererChar = (char*)glGetString(GL_RENDERER);
int glVersion[2] = {-1,-1};
glGetIntegerv(GL_MAJOR_VERSION,&glVersion[0]);
glGetIntegerv(GL_MINOR_VERSION,&glVersion[1]);
cout<<"GL version string: "<< glVersionString << <<endl;
cout<<"OpenGL version: "<<glVersion[0]<<"."<<glVersion[1]<<endl;
cout<<"GPU: "<<vendorChar<<" - "<<rendererChar<<endl;
if (!m_hrc) return false;
return true;
} // end of CreateGLContext
This is available at
http://stackoverflow.com/questions/31971373/how-to-create-egl-context-on-nvidia-desktop