1

The context is pretty the same as this one: Apple OSX OpenGL offline rendering with CoreGL.

I wrote a very small C++/OpenGL program (we actually can't imagine more simple), which should draw offscreen a white triangle with the help of shaders... but it doesn't. I use basically a couple framebuffer/renderbuffer linked to an allocated buffer within a CoreGL context inspired from here: http://renderingpipeline.com/2012/05/windowless-opengl-on-macos-x/. Then, a bitmap picture is written with the content of the buffer.

The result is odd. I get on the picture the correct background color set with my glClearColor command. It makes me think that the CoreGL context is successfully initialized... but unfortunately I get nothing more (no triangle).

There is no shader compilation/link problems. Nothing wrong with my framebuffers / VBO / VAO / vertex attributes binding... but it's like my shaders were inexistant to the CoreGL context.

Here is the function defining and compiling my shaders:

int loadShader()
{
  const GLchar* vertexSource = 
    "#version 150\n"
    "in vec3 position;"
    "void main(void)"
    "{ gl_Position = vec4(position, 1.); }";

  const GLchar* fragmentSource =
    "#version 150\n"
    "out vec4 fragColor;"
    "void main(void)"
    "{ fragColor = vec4(1.); }";

  vertexID = glCreateShader(GL_VERTEX_SHADER);
  fragmentID = glCreateShader(GL_FRAGMENT_SHADER);

  glShaderSource(vertexID, 1, &vertexSource, NULL);
  glShaderSource(fragmentID, 1, &fragmentSource, NULL);

  glCompileShader(vertexID);
  glCompileShader(fragmentID);

  if (!checkShaderCompilation(vertexID) || !checkShaderCompilation(fragmentID))
    throw std::runtime_error("bad shader compilation");

  programID = glCreateProgram();
  if (programID == 0)
    throw std::runtime_error("unable to create shader program");

  glAttachShader(programID, vertexID);
  glAttachShader(programID, fragmentID);
  glLinkProgram(programID);

  GLint programState = 0;
  glGetProgramiv(programID, GL_LINK_STATUS, &programState);
  if (programState != GL_TRUE)
    throw std::runtime_error("bad shader link");

  glUseProgram(programID);

  return 0;
}

Here, the rendering function:

GLfloat triangleVertices[] =
  {
      0.0f , 0.5f, 0.0f,    
      0.5f, -0.5f, 0.0f,
     -0.5f, -0.5f, 0.0f
  };

void displayTriangle()
{
  glClearColor(0.3, 0.1, 0.1, 1.);
  glPointSize(40.f);

  glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

  loadShader();

  GLint fragmentGPUProcessing, vertexGPUProcessing;
  CGLGetParameter(CGLGetCurrentContext(), kCGLCPGPUFragmentProcessing, &fragmentGPUProcessing);
  CGLGetParameter(CGLGetCurrentContext(), kCGLCPGPUVertexProcessing, &vertexGPUProcessing);

  if (!fragmentGPUProcessing || !vertexGPUProcessing)
    throw std::runtime_error("vertex and fragment shaders seem not correctly bound!");

  GLuint vaoID, vboID;
  glGenVertexArrays(1, &vaoID);
  glBindVertexArray(vaoID);

  glGenBuffers(1, &vboID);
  glBindBuffer(GL_ARRAY_BUFFER, vboID);
  glBufferData(GL_ARRAY_BUFFER, sizeof(triangleVertices), triangleVertices, GL_STATIC_DRAW);

  positionID = glGetAttribLocation(programID, "position");
  if (positionID < 0)
    throw std::runtime_error("unable to find 'position' name in vertex shader!");

  glVertexAttribPointer(positionID, 3, GL_FLOAT, GL_TRUE, 0, NULL);
  glEnableVertexAttribArray(positionID);

  glDrawArrays(GL_TRIANGLES, 0, 3);

  glFinish();
}

and main(), defining the CoreGL context and framebuffer objects: (note that OpenGL 3.2 core profile is set)

int main(int argc, char** argv)
{
  CGLContextObj context;

  CGLPixelFormatAttribute attributes[4] = { kCGLPFAOpenGLProfile, (CGLPixelFormatAttribute)kCGLOGLPVersion_3_2_Core,
                        kCGLPFAAccelerated,  // no software rendering
                        (CGLPixelFormatAttribute)0
  };

  CGLPixelFormatObj pix;
  GLint num;
  CGLError errorCode = CGLChoosePixelFormat(attributes, &pix, &num);
  if (errorCode != kCGLNoError)
    throw std::runtime_error("CGLChoosePixelFormat failure");

  errorCode = CGLCreateContext(pix, NULL, &context);
  if (errorCode != kCGLNoError)
    throw std::runtime_error("CGLCreateContext failure");

  CGLDestroyPixelFormat(pix);

  errorCode = CGLSetCurrentContext(context);
  if (errorCode != kCGLNoError)
    throw std::runtime_error("CGLSetCurrentContext failure");

  GLuint framebufferID, renderbufferID;
  glGenFramebuffers(1, &framebufferID);
  glBindFramebuffer(GL_FRAMEBUFFER, framebufferID);

  glGenRenderbuffers(1, &renderbufferID);
  glBindRenderbuffer(GL_RENDERBUFFER, renderbufferID);
  glRenderbufferStorage(GL_RENDERBUFFER, GL_RGB8, WIDTH, HEIGHT);

  glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderbufferID);

  if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
    throw std::runtime_error("framebuffer is not complete!");

  displayTriangle();

  GLubyte* buffer = new GLubyte[WIDTH * HEIGHT * 3 * sizeof(GLubyte)];
  glReadPixels(0, 0, WIDTH, HEIGHT, GL_RGB, GL_UNSIGNED_BYTE, buffer);

  writeBitmap("path/to/the/bitmap/file", buffer, WIDTH, HEIGHT);

  glDeleteRenderbuffers(1, &renderbufferID);
  glDeleteFramebuffers(1, &framebufferID);

  CGLSetCurrentContext(NULL);
  CGLDestroyContext(context);

  delete[] buffer;

  return 0;
}

What's wrong?

Community
  • 1
  • 1
aorasy
  • 11
  • 4
  • Unrelated, but ... you should continue adding newlines to all of your GLSL lines in the shaders rather than just the pre-processor version directive. If you were to get a compiler log right now, it would be unable to give you line numbers and you'd have to guess which line is problematic. – Andon M. Coleman May 06 '16 at 22:10
  • For starters you could call glGetError and see what comes up. – Andreas May 06 '16 at 22:10
  • `GL_RGB8` is not a supported format for renderbuffers. Try `GL_RGBA8`. – Reto Koradi May 07 '16 at 01:35
  • Andon: thanks. Usually I create files for my shaders. But, on this very simple example, I do it this way to avoid making the subject too complex. I don't think the problem here comes from the shaders themselves. – aorasy May 09 '16 at 09:09
  • Andreas: Yes. Actually I already did it before after every line, but caught unfortunately no error from OpenGL side. It's like everything was going fine. – aorasy May 09 '16 at 09:12
  • Reto: Thanks. I tried `GL_RGBA8` but it did not work unfortunately. As I said, the buffer has been **successfully** cleared by `glClear` instruction with the color given to `glClearColor`. So, maybe I'm wrong but I don't think the problem comes from the renderbuffer. As I said, it's quite odd. – aorasy May 09 '16 at 09:15
  • Update: I have tested the same code with a _GLFW_ context and it works. The triangle is displayed correctly. The problem must come from the _CoreGL_ library itself. I really would like to use _CoreGL_ for its native offscren display support, instead of _GLFW_ which has a "fake" offscreen mode which consists in displaying an invisible window... – aorasy May 09 '16 at 10:19

1 Answers1

0

I finally managed to fix the problem. I actually forgot to set the viewport with this command:

glViewport(0, 0, WIDTH, HEIGHT);

Now, everything looks good and my shaders are correctly used.

This code sample which has been reduced to its most basic form here is actually part of a cross-platform offscreen rendering project. The thing is that I already made it work on Linux before that, with the help of OSMesa library. It means that OSMesa doesn't need a viewport initialization to perform a correct rendering contrary to CoreGL. Good to know!

aorasy
  • 11
  • 4