I'm reading the OpenGl red book, and I'm pretty much stuck at the first tutorial. Everything works fine if I use freeglut and glew, but I'd like to handle input and such myself. So I ditched freeglut and glew and wrote my own code. I've looked at some other tutorials and finished the code, but nothing is displayed. It seems like FreeGlut does some voodoo magic in the background, but I don't know what I'm missing. I've tried this:
int attributeListInt[19];
int pixelFormat[1];
unsigned int formatCount;
int result;
PIXELFORMATDESCRIPTOR pixelFormatDescriptor;
int attributeList[5];
context = GetDC (hwnd);
if (!context)
return -1;
attributeListInt[0] = WGL_SUPPORT_OPENGL_ARB;
attributeListInt[1] = TRUE;
attributeListInt[2] = WGL_DRAW_TO_WINDOW_ARB;
attributeListInt[3] = TRUE;
attributeListInt[4] = WGL_ACCELERATION_ARB;
attributeListInt[5] = WGL_FULL_ACCELERATION_ARB;
attributeListInt[6] = WGL_COLOR_BITS_ARB;
attributeListInt[7] = 24;
attributeListInt[8] = WGL_DEPTH_BITS_ARB;
attributeListInt[9] = 24;
attributeListInt[10] = WGL_DOUBLE_BUFFER_ARB;
attributeListInt[11] = TRUE;
attributeListInt[12] = WGL_SWAP_METHOD_ARB;
attributeListInt[13] = WGL_SWAP_EXCHANGE_ARB;
attributeListInt[14] = WGL_PIXEL_TYPE_ARB;
attributeListInt[15] = WGL_TYPE_RGBA_ARB;
attributeListInt[16] = WGL_STENCIL_BITS_ARB;
attributeListInt[17] = 8;
attributeListInt[18] = 0;
result = wglChoosePixelFormatARB (context, attributeListInt, NULL, 1, pixelFormat, &formatCount);
if (result != 1)
return -1;
result = SetPixelFormat (context, pixelFormat[0], &pixelFormatDescriptor);
if (result != 1)
return -1;
attributeList[0] = WGL_CONTEXT_MAJOR_VERSION_ARB;
attributeList[1] = 4;
attributeList[2] = WGL_CONTEXT_MINOR_VERSION_ARB;
attributeList[3] = 2;
attributeList[4] = 0;
rendercontext = wglCreateContextAttribsARB (context, 0, attributeList);
if (rendercontext == NULL)
return -1;
result = wglMakeCurrent (context, rendercontext);
if (result != 1)
return -1;
glClearDepth (1.0f);
glFrontFace (GL_CCW);
glEnable (GL_CULL_FACE);
glCullFace (GL_BACK);
return 0;
This sets up a graphics context, but is apparently not enough to make everything work. The tutorial didn't include anything about view or projection matrices, so I'm not sure whether I should add anything like that. But the window remains black.
This is the tutorial code, adjusted to my code:
#define BUFFER_OFFSET(offset) ((void *)(offset))
bool init ();
bool mainloop ();
enum VAO_IDs { Triangles, NumVAOs };
enum Buffer_IDs { ArrayBuffer, NumBuffers };
enum Attrib_IDs { vPosition = 0 };
GLuint VAOs[NumVAOs];
GLuint Buffers[NumBuffers];
const GLuint NumVertices = 6;
int main (int argc, char** argv)
{
Window w;
w.init (&mainloop);
if (!init ())
return 0;
w.run ();
w.shutdown ();
return 0;
}
bool init ()
{
glGenVertexArrays (NumVAOs, VAOs);
glBindVertexArray (VAOs[Triangles]);
GLfloat vertices[NumVertices][2] = {
{-0.90f, -0.90f}, // Triangle 1
{0.85f, -0.90f},
{-0.90f, 0.85f},
{0.90f, -0.85f}, // Triangle 2
{0.90f, 0.90f},
{-0.85f, 0.90f}
};
glGenBuffers (NumBuffers, Buffers);
glBindBuffer (GL_ARRAY_BUFFER, Buffers[ArrayBuffer]);
glBufferData (GL_ARRAY_BUFFER, sizeof(vertices),
vertices, GL_STATIC_DRAW);
ShaderInfo shaders[] = {
{GL_VERTEX_SHADER, "triangles.vert"},
{GL_FRAGMENT_SHADER, "triangles.frag"},
{GL_NONE, NULL}
};
GLuint program = LoadShaders (shaders);
glUseProgram (program);
glVertexAttribPointer (vPosition, 2, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET (0));
glEnableVertexAttribArray (vPosition);
return true;
}
bool mainloop ()
{
glClear (GL_COLOR_BUFFER_BIT);
glBindVertexArray (VAOs[Triangles]);
glDrawArrays (GL_TRIANGLES, 0, NumVertices);
glFlush ();
return true;
}