I've been using std::vector<glm::vec3>
's for storing vertex attributes and everything was working just fine, rendering all kinds of different meshes. But after refactoring so that my vertex attributes are stored in a struct I can't get the simplest thing to render. Here is the struct (simplified):
struct Vertex{
GLfloat x, y, z; //Vertex
GLfloat r, g, b, a; //Color
};
I have two std::vector
's, one for storing the vertex attributes and one for the index:
std::vector<GLushort> indices;
std::vector<struct Vertex> vertices;
In the initialization function I fill these vectors with a simple green triangle:
struct Vertex vertex1;
vertex1.x=1.0;
vertex1.y=0.0;
vertex1.z=0.0;
vertex1.r=0.0;
vertex1.g=1.0;
vertex1.b=0.0;
vertex1.a=1.0;
vertices.push_back(vertex1);
struct Vertex vertex2;
vertex2.x=0.0;
vertex2.y=1.0;
vertex2.z=0.0;
vertex2.r=0.0;
vertex2.g=1.0;
vertex2.b=0.0;
vertex2.a=1.0;
vertices.push_back(vertex2);
struct Vertex vertex3;
vertex3.x=1.0;
vertex3.y=1.0;
vertex3.z=0.0;
vertex3.r=0.0;
vertex3.g=1.0;
vertex3.b=0.0;
vertex3.a=1.0;
vertices.push_back(vertex3);
indices.push_back(1);
indices.push_back(2);
indices.push_back(3);
Then I bind the buffers:
glGenBuffers(1, &ibo_elements);
glBindBuffer(GL_ARRAY_BUFFER, ibo_elements);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(struct Vertex), &vertices[0], GL_STATIC_DRAW);
glGenBuffers(1, &elementbuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, elementbuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(GLushort), &indices[0], GL_STATIC_DRAW);
Then after setting up the shader program and binding attribute names I use glutDisplayFunc
to run this callback:
#define BUFFER_OFFSET(i) ((char *)NULL + (i))
void onDisplay()
{
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
glUseProgram(program);
glBindBuffer(GL_ARRAY_BUFFER, ibo_elements);
glVertexAttribPointer(
attribute_v_coord,
3,
GL_FLOAT,
GL_FALSE,
sizeof(struct Vertex),
BUFFER_OFFSET(0)
);
glEnableVertexAttribArray(attribute_v_coord);
glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
glVertexAttribPointer(
attribute_v_color,
4,
GL_FLOAT,
GL_FALSE,
sizeof(struct Vertex),
BUFFER_OFFSET(sizeof(GLfloat)*3)
);
glEnableVertexAttribArray(attribute_v_color);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, elementbuffer);
int size; glGetBufferParameteriv(GL_ELEMENT_ARRAY_BUFFER, GL_BUFFER_SIZE, &size);
glDrawElements(GL_TRIANGLES, size/sizeof(GLushort), GL_UNSIGNED_SHORT, 0);
glDisableVertexAttribArray(attribute_v_coord);
glDisableVertexAttribArray(attribute_v_color);
glutSwapBuffers();
}
Everything is very similar to what I had working before. So I'm guessing it has something to do with the change of data structure. Valgrind shows this error:
==5382== Invalid read of size 4
==5382== at 0x404EF6A: ??? (in /tmp/glR69wrn (deleted))
==5382== by 0x870E8A9: ??? (in /usr/lib/libnvidia-glcore.so.325.15)
==5382== by 0x200000003: ???
==5382== by 0x404EEBF: ??? (in /tmp/glR69wrn (deleted))
==5382== by 0x2: ???
==5382== by 0xAFFC09F: ???
==5382== by 0x41314D3: ???
==5382== by 0x40E6FFF: ??? (in /dev/nvidia0)
==5382== by 0xFFFFFFFE: ???
==5382== Address 0x28 is not stack'd, malloc'd or (recently) free'd
Am I not defining the vertex attribute pointers correctly? It looks like OpenGL is trying to read a float that was never set properly.