3

I am trying to set up in Linux a very basic program with OpenGL 3.2 core profile, and GLEW. I tried that with the help of this article.

This is my code:

#define GLEW_STATIC
#include <iostream>
#include <cstdio>
#include <string>
#include <GL/glew.h>
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <GL/gl.h>
#include <GL/glx.h>

//#include <GL/glut.h>
#include "stb_image_write.h"
#include <cstdlib>
#include <GL/glfw.h>


static float _viewPortHeight = 30;
static float _viewPortWidth = 10;

typedef GLXContext (*glXCreateContextAttribsARBProc)(Display*, GLXFBConfig, GLXContext, Bool, const int*);
typedef Bool (*glXMakeContextCurrentARBProc)(Display*, GLXDrawable, GLXDrawable, GLXContext);
static glXCreateContextAttribsARBProc glXCreateContextAttribsARB = NULL;
static glXMakeContextCurrentARBProc   glXMakeContextCurrentARB   = NULL;

int main (int argc, char *argv[]){
    glXCreateContextAttribsARB = (glXCreateContextAttribsARBProc) glXGetProcAddressARB( (const GLubyte *) "glXCreateContextAttribsARB" );
    glXMakeContextCurrentARB   = (glXMakeContextCurrentARBProc)   glXGetProcAddressARB( (const GLubyte *) "glXMakeContextCurrent");
    Display *display = XOpenDisplay(NULL);
    if (display == NULL){
        std::cout  << "error getting the X display";
        return -1;
     }

    static int visualAttribs[] = {None};
    int numberOfFrameBufferConfigurations;
    GLXFBConfig *fbConfigs = glXChooseFBConfig(display, DefaultScreen(display), visualAttribs, &numberOfFrameBufferConfigurations);

     int context_attribs[] = {
          GLX_CONTEXT_MAJOR_VERSION_ARB ,3,
          GLX_CONTEXT_MINOR_VERSION_ARB, 2,
          GLX_CONTEXT_FLAGS_ARB, GLX_CONTEXT_DEBUG_BIT_ARB,
          GLX_CONTEXT_PROFILE_MASK_ARB, GLX_CONTEXT_CORE_PROFILE_BIT_ARB,None
     };

    std::cout << "initialising context...";
    GLXContext openGLContext = glXCreateContextAttribsARB(display, fbConfigs[0], 0, True, context_attribs);

    int pBufferAttribs[] = {
        GLX_PBUFFER_WIDTH, 32,
        GLX_PBUFFER_HEIGHT, 32,
        None
    };

    GLXPbuffer pbuffer = glXCreatePbuffer(display, fbConfigs[0], pBufferAttribs);
    XFree(fbConfigs);
    XSync(display, False);
    if(!glXMakeContextCurrent(display, pbuffer, pbuffer, openGLContext)){
        std::cout << "error with content creation";
        return -1;
    }
    glXMakeCurrent(display, pbuffer, openGLContext);

    GLenum error = glewInit();
    if (error != GLEW_OK){
       std::cout << "error with glew init()\n";
    }else{
        std::cout << "glew is ok\n\n";
    }
    GLuint test;
    GLuint framebuffer;
    glGenFramebuffers(1, &framebuffer);

    glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT);

    std::string path("output.png");
    exportToPath(path);

    return 0;
 }

The output I get is this:

Initialising context... glew is ok

Segmentation Fault (core dumped)

The line that is causing the problem is at the glGenFrameBuffers call, which is also the first call to the functions produced by GLEW. Something is very wrong here, but I can't seem to figure out what and why.

Can anyone point me in the right direction?

csotiriou
  • 5,653
  • 5
  • 36
  • 45

2 Answers2

1

glew.h already includes gl.h and with GLX available glx.h… and then uses some macro magic to bend some symbols. I suggest you remove the lines

#include <GL/gl.h>
#include <GL/glx.h>

And just use

#include <GL/glew.h>

With the original includes in place after the GLEW include some of the macro magic might get lost and you end up with a wrongly associated symbol resulting in the crash.


On a side note: Why do you include GLFW and GLUT headers? If your intention is to use naked GLX then you don't need them and should not include them.

datenwolf
  • 159,371
  • 13
  • 185
  • 298
1

Found the problem, whose solution seems to be the addition of 'glewExperimental' = GL_TRUE before the call to glewInit();

csotiriou
  • 5,653
  • 5
  • 36
  • 45