When I use a custom column major matrix in my code, and pass it to the vertex shader, the triangle is not drawn as expected, but when I use a row major matrix, it draws the triangle in its correct position.
I googled it and found some answers related to this question: Like this and this, but I could not understand what I'm doing wrong.
If I'm not mistaken, a row-major matrix is:
{ 0, 1, 2, 3,
4, 5, 6, 7,
8, 9, 10, 11,
Tx, Ty, Tz, w}
So, using this row-major matrix, the multiplication order should be: v' = v*M.
And a column-major matrix is:
{ 0, 4, 8, Tx,
1, 5, 9, Ty,
2, 6, 10, Tz,
3, 7, 11, w}
Using this column-major matrix, the multiplication order should be: v' = M*v.
Where Tx, Ty, and Tz hold the translation values for x, y and z, respectively.
Having said that, I will focus on what I think I'm having trouble with, in order to have a more compact question, but I will post an example code in the end, using GLFW and GLAD(<glad/gl.h>)
This is my vertex shader:
#version 330 core
layout (location = 0) in vec3 aPos;
uniform mat4 transform;
void main()
{
gl_Position = transform * vec4(aPos, 1.0);
};
These are my Mat4 struct and its functions:
typedef struct Mat4
{
float data[16];
} Mat4;
// Return Mat4 identity matrix
Mat4 mat4_identity()
{
Mat4 m = {0};
m.data[0] = 1.0f;
m.data[5] = 1.0f;
m.data[10] = 1.0f;
m.data[15] = 1.0f;
return m;
}
// Translate Mat4 using row-major order
Mat4 mat4_row_translation(Mat4 a, float x, float y, float z)
{
Mat4 m = mat4_identity();
m.data[12] += x;
m.data[13] += y;
m.data[14] += z;
return m;
}
// Translate Mat4 using column-major order
Mat4 mat4_column_translation(Mat4 a, float x, float y, float z)
{
Mat4 m = mat4_identity();
m.data[3] += x;
m.data[7] += y;
m.data[11] += z;
return m;
}
This is my update_triangle function where I translate the matrix:
Mat4 trans = mat4_identity();
trans = mat4_column_translation(trans, 0.5f, 0.5f, 0.0f);
unsigned int transformLoc = glGetUniformLocation(shader, "transform");
glUniformMatrix4fv(transformLoc, 1, GL_FALSE, trans.data);
Note that I'm passing GL_FALSE in glUniformMatrix4v, which tells OpenGL that the matrix is already in a column-major order.
However, when running the program, I do not get a triangle 0.5f up and 0.5f right, I get this: Weird triangle translation
But when I use a row-major matrix and change the multiplication order in the vertex shader(v' = v*M), I get the result that I was expecting.
The vertex shader:
#version 330 core
layout (location = 0) in vec3 aPos;
uniform mat4 transform;
void main()
{
gl_Position = vec4(aPos, 1.0) * transform;
};
The update_triangle function:
Mat4 trans = mat4_identity();
trans = mat4_row_translation(trans, 0.5f, 0.5f, 0.0f);
unsigned int transformLoc = glGetUniformLocation(shader, "transform");
glUniformMatrix4fv(transformLoc, 1, GL_TRUE, trans.data);
Note that I'm passing GL_TRUE in glUniformMatrix4v, which tells OpenGL that the matrix is not in a column-major order.
The result: Triangle drawn as expected
Here is the code in a single file, it needs to be compiled with GLFW and glad/gl.c.
Comment[0] and Comment1 are just to help with which lines to comment together, for example: If you comment a line with "// Comment[0]" in int, you need to comment the other lines with "// Comment[0]" as well. But in the Vertex Shader, both matrices use the same line to be drawn correct(which is why I don't understand).
If you are on linux, you can compile with: g++ -o ex example.cpp gl.c -lglfw && ./ex (You will need to download gl.c from Glad generator)
Code:
#include <glad/gl.h>
#include <GLFW/glfw3.h>
#include <stdio.h>
#include <stdlib.h>
// Mat4 structure
typedef struct Mat4
{
float data[16];
} Mat4;
int c = 0;
// Return Mat4 identity matrix
Mat4 mat4_identity()
{
Mat4 m = {0};
m.data[0] = 1.0f;
m.data[5] = 1.0f;
m.data[10] = 1.0f;
m.data[15] = 1.0f;
return m;
}
// Translate Mat4 using row-major order
Mat4 mat4_row_translation(Mat4 a, float x, float y, float z)
{
Mat4 m = mat4_identity();
m.data[12] += x;
m.data[13] += y;
m.data[14] += z;
return m;
}
// Translate Mat4 using column-major order
Mat4 mat4_column_translation(Mat4 a, float x, float y, float z)
{
Mat4 m = mat4_identity();
m.data[3] += x;
m.data[7] += y;
m.data[11] += z;
return m;
}
GLFWwindow *glfw_window;
// Window functions
int init_glfw(const char *window_title, int x, int y, int width, int height);
void framebuffer_size_callback(GLFWwindow* window, int width, int height);
void processInput();
// Shader functions
static unsigned int compile_shader(unsigned int type, const char *source);
static unsigned int create_shader(const char *vertex_shader, const char *fragment_shader);
// Triangle functions
void init_triangle();
void draw_triangle();
void update_triangle();
unsigned int shader = -1;
unsigned int vao = -1;
unsigned int vbo = -1;
float vertices[] = {
-0.5f, -0.5f, 0.0f, // left
0.5f, -0.5f, 0.0f, // right
0.0f, 0.5f, 0.0f // top
};
const char *vshader = "#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"uniform mat4 transform;\n"
"void main()\n"
"{\n"
// " gl_Position = vec4(aPos, 1.0) * transform;\n" // Comment [0] -> Inverted for column-major
" gl_Position = transform * vec4(aPos, 1.0);\n" // Comment [1] -> Inverted for column-major
"}\0";
const char *fshader = "#version 330 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(1.0f, 0.5f, 0.2f, 1.0f);\n"
"}\n\0";
int main()
{
int result = init_glfw("LearnOpenGL", 0, 0, 800, 600);
if(result != 0)
return result;
init_triangle();
while (!glfwWindowShouldClose(glfw_window))
{
// input
processInput();
// Update triangle vertices
update_triangle();
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// Draw triangle example
draw_triangle();
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
glfwSwapBuffers(glfw_window);
glfwPollEvents();
}
// glfw: terminate, clearing all previously allocated GLFW resources.
glfwTerminate();
return 0;
}
// My confusion is here
void update_triangle()
{
Mat4 trans = mat4_identity();
trans = mat4_column_translation(trans, 0.5f, 0.5f, 0.0f); // Comment [0]
// trans = mat4_row_translation(trans, 0.5f, 0.5f, 0.0f); // Comment [1]
// Print Mat4
if(c == 0)
{
// TODO: Remove this
printf("==== Trans: ====\n");
for(int i = 1; i <= 16; i++)
{
printf("%.2f, ", trans.data[i-1]);
if(i % 4 == 0 && i != 0)
printf("\n");
}
c++;
}
unsigned int transformLoc = glGetUniformLocation(shader, "transform");
glUniformMatrix4fv(transformLoc, 1, GL_FALSE, trans.data); // Comment [0]
// glUniformMatrix4fv(transformLoc, 1, GL_TRUE, trans.data); // Comment [1]
}
// Window functions
int init_glfw(const char *window_title, int x, int y, int width, int height)
{
// glfw: initialize and configure
// ------------------------------
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
#ifdef __APPLE__
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
#endif
// glfw window creation
// --------------------
glfw_window = glfwCreateWindow(width, height, window_title, NULL, NULL);
if (glfw_window == NULL)
{
printf("Failed to create GLFW window\n");
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(glfw_window);
glfwSetFramebufferSizeCallback(glfw_window, framebuffer_size_callback);
// glad: load all OpenGL function pointers
// ---------------------------------------
int version = gladLoadGL(glfwGetProcAddress);
printf("Current GL loaded: %d.%d\n", GLAD_VERSION_MAJOR(version), GLAD_VERSION_MINOR(version));
return 0;
}
void framebuffer_size_callback(GLFWwindow* window, int width, int height)
{
glViewport(0, 0, width, height);
}
void processInput()
{
if(glfwGetKey(glfw_window, GLFW_KEY_ESCAPE) == GLFW_PRESS)
glfwSetWindowShouldClose(glfw_window, true);
}
/* Default Compilation for Shader */
static unsigned int compile_shader(unsigned int type, const char *source)
{
unsigned int id = glCreateShader(type);
glShaderSource(id, 1, &source, NULL);
glCompileShader(id);
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if(!result)
{
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char* msg = (char*) alloca(length * sizeof(char));
glGetShaderInfoLog(id, length, &length, msg);
printf("Vertex / Fragment Shader Failed:\n %s", msg);
glDeleteShader(id);
return 0;
}
return id;
}
static unsigned int create_shader(const char *vertex_shader, const char *fragment_shader)
{
unsigned int program = glCreateProgram();
unsigned int vs = compile_shader(GL_VERTEX_SHADER, vertex_shader);
unsigned int fs = compile_shader(GL_FRAGMENT_SHADER, fragment_shader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vs);
glDeleteShader(fs);
return program;
}
// Triangle functions
void init_triangle()
{
shader = create_shader(vshader, fshader);
printf("shader=%d", shader);
glUseProgram(shader);
glGenVertexArrays(1, &vao);
printf("vao=%d", vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
printf("vbo=%d\n", vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo); // Using this vbo
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), NULL);
}
void draw_triangle()
{
glUseProgram(shader);
glBindVertexArray(vao);
glDrawArrays(GL_TRIANGLES, 0, 3);
}
This is my first question in this forum, so please let me know if there is anything missing.