I'm trying to learn OpenGL and C by following the learnopengl.com tutorial and the JDAH series of videos about OpenGL in C. I especially got really inspired by this project of him : minecraft in one week.
I had a perfectly working code up until the lighting chapter of learnopengl.com but my code was messy and I wanted to refactor and clean up so I changed the Makefile to JDAH's one, split the code in several files. Nothing is working anymore.
I've spent the past 7 hours trying to debug this and I'm learning that debugging segfault in C is not that fun. Anyway, I think I pinpointed the problem, it seems that my second call to glEnableVertexAttribArray
dereference a null pointer which creates the segfault. I tried changing Glew to Glad. I reinstalled the libs, tried make changes to the Makefile but nothing worked, I'm losing hope.
Below you can find my Makefile, a part of my main.c and a picture of the segfault as shown in lldb. I'm running on MacOS 13.2, and using OpenGL 4.1. I have Glew installed in my /usr/local/include but it's not those that are referenced by the makefile, I don't know if that can cause problems.
JDAH's code run perfectly on my Mac with almost the same Makefile (didn't include noise), but he uses OpenGL 3.3 and I use OpenGL 4.1 so I changed the glad folder.
Makefile
UNAME_S = $(shell uname -s)
CC = clang
CFLAGS = -std=c11 -O3 -g -Wall -Wextra -Wpedantic -Wstrict-aliasing
CFLAGS += -Wno-pointer-arith -Wno-newline-eof -Wno-unused-parameter -Wno-gnu-statement-expression
CFLAGS += -Wno-gnu-compound-literal-initializer -Wno-gnu-zero-variadic-macro-arguments
CFLAGS += -Ilib/cglm/include -Ilib/glad/include -Ilib/glfw/include -Ilib/stb -fbracket-depth=1024
LDFLAGS = lib/glad/src/glad.o lib/cglm/libcglm.a lib/glfw/src/libglfw3.a -lm
# GLFW required frameworks on OSX
ifeq ($(UNAME_S), Darwin)
LDFLAGS += -framework OpenGL -framework IOKit -framework CoreVideo -framework Cocoa
endif
ifeq ($(UNAME_S), Linux)
LDFLAGS += -ldl -lpthread
endif
SRC = $(wildcard src/**/*.c) $(wildcard src/*.c) $(wildcard src/**/**/*.c) $(wildcard src/**/**/**/*.c)
OBJ = $(SRC:.c=.o)
BIN = bin
.PHONY: all clean
all: dirs libs game
libs:
cd lib/cglm && cmake . -DCGLM_STATIC=ON && make
cd lib/glad && $(CC) -o src/glad.o -Iinclude -c src/glad.c
cd lib/glfw && cmake . && make
dirs:
mkdir -p ./$(BIN)
run: all
$(BIN)/game
game: $(OBJ)
$(CC) -o $(BIN)/game $^ $(LDFLAGS)
%.o: %.c
$(CC) -o $@ -c $< $(CFLAGS)
clean:
rm -rf $(BIN) $(OBJ)
main.c (there is more code but I didn't include all for the sake of readability. LLDB is crashing on the SECOND glEnableVertexArray)
#include "glad/glad.h"
#define GLFW_INCLUDE_NONE
#include "GLFW/glfw3.h"
#include <stdio.h>
#include <math.h>
#include <string.h>
#include <cglm/cglm.h>
#include "gfx/shaders.h"
#define STB_IMAGE_IMPLEMENTATION
#include <stb_image.h>
int main() {
if (!glfwInit()) {
printf("Error initializing GLFW");
exit(EXIT_FAILURE);
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 1);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwSetErrorCallback(error_callback);
GLFWwindow* window = glfwCreateWindow(800, 600, "LearnOpenGL", NULL, NULL);
if (window == NULL) {
printf("Failed to create Window\n");
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_DISABLED);
glfwSetCursorPosCallback(window, mouse_callback);
int version = gladLoadGLLoader((GLADloadproc) glfwGetProcAddress);
if (version == 0) {
printf("Failed to initialize OpenGL context\n");
return -1;
}
// Successfully loaded OpenGL
printf("Loaded OpenGL %d.%d\n", GLVersion.major, GLVersion.minor);
printf("test");
printf("%s", glGetString(GL_VERSION));
printf("test");
//Enable Depth Buffer
glEnable(GL_DEPTH_TEST);
//Define viewport for the size of the window
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
//Generate a VAO so we don't have to do this configuration for every vertices
unsigned int VAO, VBO;
glGenVertexArrays(1, &VAO);
//Get Vertex Buffer Object from OpenGL
glGenBuffers(1, &VBO);
//Push vertex data into the buffer
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindVertexArray(VAO);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(GL_FLOAT), (void*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(GL_FLOAT), (void*)(3*sizeof(float)));
glEnableVertexAttribArray(1);
Output of LLDB
Since I'm new at C I might have forgotten something, feel free to ask for more information, I'll do my best to provide them.
(lldb) n
Process 28686 stopped
* thread #1, stop reason = step over
frame #0: 0x000000010000a990 game`main at main.c:238:5 [opt]
235 glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(GL_FLOAT), (void*)0);
236 glEnableVertexAttribArray(0);
237 glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(GL_FLOAT), (void*)(3*sizeof(float)));
-> 238 glEnableVertexAttribArray(1);
239
240
241 unsigned int vertexShader;
Target 0: (game) stopped.
(lldb) n
Process 28686 stopped
* thread #1, stop reason = EXC_BAD_ACCESS (code=1, address=0x0)
frame #0: 0x00007ffa20fc4f00
-> 0x7ffa20fc4f00: movl (%rax), %edi
0x7ffa20fc4f02: popq %rbp
0x7ffa20fc4f03: jmpq *%r8
0x7ffa20fc4f06: pushq %rbp
Target 0: (game) stopped.
EDIT 1 I tried to follow Haris input and catch error codes. The results are weird and I don't really understand everything. I'm logging the version of OpenGL after GLAD loading, this is the lines of code involved :
// Successfully loaded OpenGL
printf("Loaded OpenGL %d.%d\n", GLVersion.major, GLVersion.minor);
GLenum err;
err = glGetError();
printf("Error: %u", err);
printf("%s", glGetString(GL_VERSION));
err = glGetError();
printf("Error2: %u", err);
Since LLDB is going through those lines without complaining I assumed that they were not a problem, but in fact, only the first printf in the console. This observation is true for every printf after that. I tried to glGetError()
after every function call, but nothing appears in the console.
Console Output
Loaded OpenGL 4.1
[1] 31313 segmentation fault bin/game
Edit 2
I tried to change error handling so it looks like this as Haris pointed out.
// Successfully loaded OpenGL
printf("Loaded OpenGL %d.%d\n", GLVersion.major, GLVersion.minor);
if(!glGetString(GL_VERSION)) {
err = glGetError();
printf("Error %u", err);
return -1;
}
printf("TEST");
err = glGetError();
printf("Error2: %u", err);
As for Derhass request, you can find bellow the rest of my main.c. It was working perfectly fine yesterday before I started changing makefile and libs.
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(GL_FLOAT), (void*)(3*sizeof(float)));
err = glGetError();
printf("%u", err);
glEnableVertexAttribArray(1);
err = glGetError();
printf("%u", err);
unsigned int vertexShader;
unsigned int fragmentShaderOrange = glCreateShader(GL_FRAGMENT_SHADER); // the first fragment shader that outputs the color orange
unsigned int shaderProgramOrange = glCreateProgram();
//const char* vertexShaderSourceFromFile = getShader(BASE_VERTEX_SHADER);
compile_shader(&vertexShader, GL_VERTEX_SHADER, BASE_VERTEX_SHADER);
compile_shader(&fragmentShaderOrange, GL_FRAGMENT_SHADER, BASE_FRAGMENT_SHADER);
// link the first program object
glAttachShader(shaderProgramOrange, vertexShader);
glAttachShader(shaderProgramOrange, fragmentShaderOrange);
glLinkProgram(shaderProgramOrange);
// then link the second program object using a different fragment shader (but same vertex shader)
// this is perfectly allowed since the inputs and outputs of both the vertex and fragment shaders are equally matched.
unsigned int texture1;
glGenTextures(1, &texture1);
glBindTexture(GL_TEXTURE_2D, texture1);
// set the texture wrapping/filtering options (on the currently bound texture object)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// load and generate the texture
int width, height, nrChannels;
stbi_set_flip_vertically_on_load(1);
unsigned char *data = stbi_load("./res/tibo.jpg", &width, &height, &nrChannels, 0);
if (data)
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
}
else
{
printf("Failed to load texture.");
}
stbi_image_free(data);
unsigned int texture2;
glGenTextures(1, &texture2);
glBindTexture(GL_TEXTURE_2D, texture2);
// set the texture wrapping/filtering options (on the currently bound texture object)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
data = stbi_load("./res/awesomeface.png", &width, &height, &nrChannels, 0);
if (data)
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
}
stbi_image_free(data);
glUseProgram(shaderProgramOrange);
glUniform1i(glGetUniformLocation(shaderProgramOrange, "texture1"), 0);
glUniform1i(glGetUniformLocation(shaderProgramOrange, "texture2"), 1);
//#FREE RAM
glDeleteShader(vertexShader);
glDeleteShader(fragmentShaderOrange);
mat4 projection;
glm_perspective(glm_rad(45.0f), 800.0f / 600.0f, 0.1f, 100.0f, projection);
unsigned int projectionLoc = glGetUniformLocation(shaderProgramOrange, "projection");
glUniformMatrix4fv(projectionLoc, 1, GL_FALSE, projection[0]);
// uncomment this call to draw in wireframe polygons.
//glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
while(!glfwWindowShouldClose(window)) {
processInput(window);
float currentFrame = glfwGetTime();
deltaTime = currentFrame - lastFrame;
lastFrame = currentFrame;
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(shaderProgramOrange);
glBindVertexArray(VAO);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture1);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, texture2);
vec3 cameraTarget;
glm_vec3_add(cameraFront, cameraPos, cameraTarget);
mat4 view;
glm_lookat(
cameraPos,
cameraTarget,
cameraUp,
view);
unsigned int viewLoc = glGetUniformLocation(shaderProgramOrange, "view");
glUniformMatrix4fv(viewLoc, 1, GL_FALSE, view[0]);
for(unsigned int i = 0; i < 10; i++) {
mat4 model;
glm_mat4_identity(model);
glm_translate(model, cubePositions[i]);
glm_rotate(model, glm_rad(20.0f * i), (vec3){1.0f, 0.3f, 0.5f});
unsigned int modelLoc = glGetUniformLocation(shaderProgramOrange, "model");
glUniformMatrix4fv(modelLoc, 1, GL_FALSE, model[0]);
glDrawArrays(GL_TRIANGLES, 0, 36);
}
//glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 0;
}
EDIT 3
I tried some more old school methods of debugging, aka commenting every line of code and decommenting them one by one until I see the crash. I got something new, a 38900 bus error bin/game
when calling the functions I wrote to compile shaders.
Those functions read shaders from files, pass them as strings to GLSL that compiles them and returns them.
You can see below the snippet of code that does that. It was working great before my refactoring and I made sure to update my paths when I moved the files
void compile_shader(GLuint* shaderId, GLenum shaderType, int shaderFilePath)
{
GLint isCompiled = 0;
/* Calls the Function that loads the Shader source code from a file */
const char* shaderSource = getShader(shaderFilePath);
*shaderId = glCreateShader(shaderType);
if(*shaderId == 0) {
printf("COULD NOT LOAD SHADER: %d!\n", shaderFilePath);
}
glShaderSource(*shaderId, 1, (const char**)&shaderSource, NULL);
glCompileShader(*shaderId);
glGetShaderiv(*shaderId, GL_COMPILE_STATUS, &isCompiled);
if(isCompiled == GL_FALSE) { /* Here You should provide more error details to the User*/
printf("Shader Compiler Error: %d\n", shaderFilePath);
glDeleteShader(*shaderId);
return;
}
}