Dear Folks @ NVIDIA,
i experienced a crash in paraview http://paraview.org.
i was able to make a simple OpenGL program that reproduces the issue.
Here is the snippet :
int i, n=98454384;
float x = 0.5f, y = -0.5f, z = 0, delta = 100/(float)n ;
fprintf(stderr,"start render\n");
glClear( GL_COLOR_BUFFER_BIT );
GLuint list = glGenLists(1);
glNewList(list, GL_COMPILE);
glBegin( GL_TRIANGLE_STRIP );
/* add the first two points */
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( x, y, 0.0f );
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( -x, y, 0.0f );
i = 2;
while (i < n) {
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( x, y, 0.0f );
y += delta; i++;
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( -x, y, 0.0f );
y += delta; i++;
}
glEnd();
glEndList();
glCallList(list);
glDeleteLists(list,1);
fprintf(stderr,"stop render\n");
the very last call to glVertex3f will crash.
here is the gdb stacktrace :
Program received signal SIGSEGV, Segmentation fault.
0x0000003b80089c57 in memcpy () from /lib64/libc.so.6
Missing separate debuginfos, use: debuginfo-install SDL-1.2.14-3.el6.x86_64 glibc-2.12-1.107.el6_4.5.x86_64 libX11-1.5.0-4.el6.x86_64 libXau-1.0.6-4.el6.x86_64 libXcursor-1.1.13-2.el6.x86_64 libXext-1.3.1-2.el6.x86_64 libXfixes-5.0-3.el6.x86_64 libXrandr-1.4.0-1.el6.x86_64 libXrender-0.9.7-2.el6.x86_64 libgcc-4.4.7-4.el6.x86_64 libstdc++-4.4.7-4.el6.x86_64 libxcb-1.8.1-1.el6.x86_64 mesa-libGLU-9.0-0.8.el6_4.3.x86_64
(gdb) bt
#0 0x0000003b80089c57 in memcpy () from /lib64/libc.so.6
#1 0x00007ffff68cfaf6 in ?? () from /usr/lib64/libnvidia-glcore.so.334.16
#2 0x00007ffff68cfbae in ?? () from /usr/lib64/libnvidia-glcore.so.334.16
#3 0x0000000000400fe3 in render () at ubug.cpp:59
#4 0x0000000000401076 in main (argc=1, argv=0x7fffffffe098) at ubug.cpp:75
when running the application with the DDT debugger and memory debugging option, i get an error message claiming memcpy is trying to write above an allocated memory area.
so far, i was able to reproduce the bug on RHEL6 like distro, x86_64 processor, NDIVIA Quadro K5000 and the latest available drivers : 331.49 and 334.16 (beta)
i can run the program without any issues with tigervnc and MESA implementation of OpenGL.
but when i run with a NVIDIA card, it crashes.
The full source code is below, in order to compile :
g++ -g -o SDL2 -DNOTIMER ubug.cpp -lSDL -lGLU -lGL
#include <unistd.h>
#include <SDL/SDL.h>
#include <SDL/SDL_opengl.h>
const int SCREEN_WIDTH = 640;
const int SCREEN_HEIGHT = 480;
const int SCREEN_BPP = 32;
bool initGL()
{
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
glClearColor( 0.f, 0.f, 0.f, 1.f );
GLenum error = glGetError();
if( error != GL_NO_ERROR )
{
printf( "Error initializing OpenGL! %s\n", gluErrorString( error ) );
return false;
}
return true;
}
bool init()
{
if( SDL_Init( SDL_INIT_VIDEO) < 0 )
return false;
if( SDL_SetVideoMode( SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_BPP, SDL_OPENGL ) == NULL )
return false;
SDL_EnableUNICODE( SDL_TRUE );
if( initGL() == false )
return false;
SDL_WM_SetCaption( "OpenGL Test", NULL );
return true;
}
void render()
{
int i, n=98454384;
float x = 0.5f, y = -0.5f, z = 0, delta = 100/(float)n ;
fprintf(stderr,"start render\n");
glClear( GL_COLOR_BUFFER_BIT );
GLuint list = glGenLists(1);
glNewList(list, GL_COMPILE);
glBegin( GL_TRIANGLE_STRIP );
/* add the first two points */
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( x, y, 0.0f );
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( -x, y, 0.0f );
i = 2;
while (i < n) {
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( x, y, 0.0f );
y += delta; i++;
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( -x, y, 0.0f );
y += delta; i++;
}
glEnd();
glEndList();
glCallList(list);
glDeleteLists(list,1);
fprintf(stderr,"stop render\n");
SDL_GL_SwapBuffers();
}
int main( int argc, char *argv[] )
{
if( init() == false )
return 1;
for ( ; ; ) {
render();
sleep(1);
}
return 0;
}
i was able to get a correct output by rewriting the render primitive.
basically, i use several and smaller OpenGL blocks :
void render()
{
int i, n=98454384;
float x = 0.5f, y = -0.5f, z = 0, delta = 100/(float)n ;
fprintf(stderr,"start render\n");
glClear( GL_COLOR_BUFFER_BIT );
GLuint list = glGenLists(1);
glNewList(list, GL_COMPILE);
glBegin( GL_TRIANGLE_STRIP );
/* add the first two points */
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( x, y, 0.0f );
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( -x, y, 0.0f );
i = 2;
while (i < n) {
if (i == 499998) {
i -= 2;
y -= 2*delta;
glEnd();
glBegin( GL_TRIANGLE_STRIP );
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( x, y, 0.0f );
y += delta; i++;
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( -x, y, 0.0f );
y += delta; i++;
}
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( x, y, 0.0f );
y += delta; i++;
glNormal3f( 1.0f, 1.0f, 1.0f);
glVertex3f( -x, y, 0.0f );
y += delta; i++;
}
glEnd();
glEndList();
glCallList(list);
glDeleteLists(list,1);
fprintf(stderr,"stop render\n");
SDL_GL_SwapBuffers();
}
That being said, this approach is virtually impossible to correctly implement within paraview.
so i hope this bug can be fixed soon.
Thanks and regards,
Gilles