1

I am trying to implement deferred shading with OpenGL 4.4 on a NVIDIA GTX 970 with latest drivers installed.
My code worked with rendering directly to screen. To add a second pass, I create a FBO to which I render my scene and a quad to which I render the final image. When I try to render nothing in the first pass and draw the quad after the second pass, the quad is visible. When I try to render a mesh (for example a cube) during the first pass, the quad disappears. Also I get the following error messages:
enter image description here

The mesh was loaded with AssImp.
I use the following code to create VBO / VAO:

void Mesh::genGPUBuffers()
{
    glGenBuffers(1, &vertexbuffer);
    glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
    glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(vec3), &vertices[0], GL_STATIC_DRAW);
    glBindBuffer(GL_ARRAY_BUFFER, 0);

    glGenBuffers(1, &uvbuffer);
    glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
    glBufferData(GL_ARRAY_BUFFER, uvs.size() * sizeof(vec2), &uvs[0], GL_STATIC_DRAW);
    glBindBuffer(GL_ARRAY_BUFFER, 0);

    glGenBuffers(1, &normalbuffer);
    glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
    glBufferData(GL_ARRAY_BUFFER, normals.size() * sizeof(vec3), &normals[0], GL_STATIC_DRAW);
    glBindBuffer(GL_ARRAY_BUFFER, 0);

    glGenBuffers(1, &indexbuffer);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexbuffer);
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned short), &indices[0], GL_STATIC_DRAW);
    glBindBuffer(GL_ARRAY_BUFFER, 0);
}


And I render my mesh like this:

void Mesh::render()
{
    if (shader != nullptr)
    {
        shader->use();
        shader->setModelMatrix(modelMatrix);
    }

    for (int i = 0x1; i <= 0xB; i++)
    {
        if (texture[i] != nullptr)
        {
            glBindMultiTextureEXT(GL_TEXTURE0 + i, GL_TEXTURE_2D, texture[i]->getTextureID());
        }
    }

    // 1rst attribute buffer : vertices
    glEnableVertexAttribArray(0);
    glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
    glVertexAttribPointer(
        0,                  // attribute
        3,                  // size
        GL_FLOAT,           // type
        GL_FALSE,           // normalized?
        0,                  // stride
        (void*)0            // array buffer offset
    );

    // 2nd attribute buffer : UVs
    glEnableVertexAttribArray(1);
    glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
    glVertexAttribPointer(
        1,                                // attribute
        2,                                // size
        GL_FLOAT,                         // type
        GL_FALSE,                         // normalized?
        0,                                // stride
        (void*)0                          // array buffer offset
    );

    // 3rd attribute buffer : normals
    glEnableVertexAttribArray(2);
    glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
    glVertexAttribPointer(
        2,                                // attribute
        3,                                // size
        GL_FLOAT,                         // type
        GL_FALSE,                         // normalized?
        0,                                // stride
        (void*)0                          // array buffer offset
    );

    // Index buffer
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexbuffer);

    // Draw the triangles
    glDrawElements(
        GL_TRIANGLES,      // mode
        (GLsizei)indexCount,    // count
        GL_UNSIGNED_SHORT,   // type
        (void*)0           // element array buffer offset
    );

    glDisableVertexAttribArray(0);
    glDisableVertexAttribArray(1);
    glDisableVertexAttribArray(2);
}


With gDEBugger I found out that the error messages come from glVertexAttribPointer. But since gDEBugger does not support OpenGL 4, it throws a lot of errors itself and does not really work.

The FBO is generated like this:

GLuint depthBuf;
Texture
    posTex(this),
    normTex(this),
    colorTex(this)
;

// Create and bind the FBO
glGenFramebuffers(1, &fbo);
glBindFramebuffer(GL_FRAMEBUFFER, fbo);

// The depth buffer
glGenRenderbuffers(1, &depthBuf);
glBindRenderbuffer(GL_RENDERBUFFER, depthBuf);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT, static_cast<GLsizei>(resolution.x), static_cast<GLsizei>(resolution.y));

// Create the textures for position, normal and color
posTex.createGBuf(GL_TEXTURE0, GL_RGB32F, static_cast<int>(resolution.x), static_cast<int>(resolution.y));
normTex.createGBuf(GL_TEXTURE1, GL_RGB32F, static_cast<int>(resolution.x), static_cast<int>(resolution.y));
colorTex.createGBuf(GL_TEXTURE2, GL_RGB8, static_cast<int>(resolution.x), static_cast<int>(resolution.y));

// Attach the textures to the framebuffer
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthBuf);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, posTex.getTextureID(), 0);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT1, GL_TEXTURE_2D, normTex.getTextureID(), 0);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT2, GL_TEXTURE_2D, colorTex.getTextureID(), 0);

glBindFramebuffer(GL_FRAMEBUFFER, 0);


The createGBuf() function looks like this:

void C0::Texture::createGBuf(GLenum texUnit, GLenum format, int width, int height)
{
    this->width = width;
    this->height = height;
    glActiveTexture(texUnit);
    glGenTextures(1, &textureID);
    glBindTexture(GL_TEXTURE_2D, textureID);
    glTexStorage2D(GL_TEXTURE_2D, 1, format, width, height);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
}
genpfault
  • 51,148
  • 11
  • 85
  • 139
Maru
  • 894
  • 1
  • 12
  • 29
  • gDEBugger works fine with GL4 by the way. You're probably using the old gRemedy version before AMD bought it. It supports modern GL Debug Output and you can use that to get pretty much the same errors you listed in your debug trace. I think your actual problem here is simply that you do not have a non-zero Vertex Array Object bound. – Andon M. Coleman Nov 19 '14 at 22:50
  • call glGetError a few time and find where the error is actually generated – ratchet freak Nov 19 '14 at 22:50

1 Answers1

4

The error with your code is probably because you never generated a VertexArrayObject, call glGenVertexArray(1, your vao pointer); if you dont have one, modern opengl will mark it as a mistake, which is probably what is happening to you. Generate an vertex array then bind it with glBindVertexArray(your vao);

Secondly, you should not be buffering your data every single render cycle. That is incredibly inefficient. You should create multiple VAOs, bind vertex data to them then unbind the vao, when you are rendering the corresponding mesh, just rebind the VAO and glDrawElements.

So an example would be this,

if you have a struct Vertex like so

struct Vertex{
  vec3 position;
  vec2 uv;
  vec3 normal;
};
struct Mesh{
   std::vector<Vertex> vertices;
   std::vector<unsigned int> indices;

   GLuint VAO;

   void initializeVAO();
   void Render();
};

Then you initialize like so.

Mesh::initializeVAO(){
    GLuint vertexBuffer, indiceBuffer;
    glGenVertexArrays(1, &VAO);
    glBindVertexArray(VAO);

    glGenBuffers(1, &vertexBuffer);
    glGenBuffers(1, &indiceBuffer);

    glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
    glBufferData(GL_ARRAY_BUFFER, vertices.size()*sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
    glEnableVertexAttribArray(0);
    glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), 0);

    glEnableVertexAttribArray(1);
    glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)sizeof(vec3));

    glEnableVertexAttribArray(2);
    glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)(sizeof(:vec3) + sizeof(glm::vec2)));

    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indiceBuffer);
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned int), &indices[0], GL_STATIC_DRAW);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indiceBuffer);

    glBindVertexArray(0);
    glDeleteBuffers(1, &vertexBuffer);
    glDeleteBuffers(1, &indiceBuffer);
}

Then to render

Mesh::Render()
{
     glBindVertexArray(VAO);
     glDrawElements(GL_TRIANGLES, indices.size(), 0);
     glBindVertexArray(0);
}

If you have any questions, feel free to ask.

user3427457
  • 162
  • 1
  • 4
  • 1
    Thank you a lot! The errors indeed disappear. But the quad is also still not rendered when I render a cube in the first pass. I will try to do a little more debugging on the new code tomorrow, it's very late where I live ;) – Maru Nov 19 '14 at 23:55
  • Scene renders good without passes. With pass it does not work :/ Question: What is this Texture state usage warning? Might it be the cause of my trouble? – Maru Nov 20 '14 at 20:32
  • 2
    @MarcoAlka The warning means that you did not bind a valid texture before performing texture functions. Check that you have glBindTexture called on the texture you generated before using any glTextureFunctions. – user3427457 Nov 20 '14 at 23:49
  • I came here due to the same issue, upgrading some old code from GL 2.0 that didn't have explicit VAOs. Your answer helped a lot, except I think it's wrong to glDeleteBuffers() in your initialize when you're planning to still use those buffers to render. When I tried that, my program crashed; getting rid of the early delete fixed it. – D0SBoots Aug 11 '20 at 06:02