2

Learning to display images using QOpenGLWidget. However, I've met some problems.

  1. How can I pass the GLuint texture variable (the actual texture loaded from the image) into the shader scripts? Like how to bind GLuint texture to uniform sampler2D texture? Maybe I am just not realising I already did that.
  2. What's the difference between attribute vec4 vertexColorIn and uniform sampler2D texture? I think the color comes from the texture.
  3. Can I use glTexCoord2f() and glVertex2f() instead of glVertexAttribPointer() and glVertexAttribPointer()? It's because they seem better to me.

I am still not clear on the concept about how openGL displays an image, although I've done many researches. I'm not quit sure what I'm doing wrong. The image is NOT showing up.

MyGLWiget.cpp

shader scipts:

#define STR(x) #x
#define VS_LOCATION 0
#define FS_LOCATION 1

const char* vertextShader = STR(
    attribute vec4 position;
    attribute vec4 vertexColorIn;
    varying vec4 vertexColorOut;
    void main(void)
    {
        gl_Position = position;
        vertexColorOut = vertexColorIn;
    }
);

const char* fragmentShader = STR(
    varying vec4 vertexColorOut;
    uniform sampler2D texture;
    void main(void)
    {
        ??? = texture2D(???, textureOut).r // no clue how to use it
        gl_FragColor = vertexColorOut;
    }   
);

loading an Image texture:

void MyGLWiget::loadTexture(const char* file_path)
{
    img_data = SOIL_load_image(file_path, &width, &height, &channels, SOIL_LOAD_RGB);

    glEnable(GL_TEXTURE_2D);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

    glGenTextures(1, &texture);
    glBindTexture(GL_TEXTURE_2D, texture);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, img_data);

    SOIL_free_image_data(img_data);
}

initialization:

void MyGLWiget::initializeGL()
{
    initializeOpenGLFunctions();

    program.addShaderFromSourceCode(QGLShader::Vertex, vertextShader);
    program.bindAttributeLocation("position", VS_LOCATION);

    program.addShaderFromSourceCode(QGLShader::Fragment, fragmentShader);
    program.bindAttributeLocation("vertexColorIn", FS_LOCATION);

    program.link();
    program.bind();


    static const GLfloat ver[] = {
        -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f
    };

    static const GLfloat  tex[] = {
        0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f
    };

    glVertexAttribPointer(VS_LOCATION, 2, GL_FLOAT, 0, 0, ver);
    glEnableVertexAttribArray(VS_LOCATION);

    glVertexAttribPointer(FS_LOCATION, 2, GL_FLOAT, 0, 0, tex);
    glEnableVertexAttribArray(FS_LOCATION);

    program.setUniformValue("texture", texture); 
    //texture = program.uniformLocation("texture");
}

paintGL:

I'm really confused with this part. I have no idea what should I use to make it to draw an image.

void MyGLWiget::paintGL()
{
    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, texture);
    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, img_data);
    glUniform1i(texture, 0);
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 1);
}
SpellTheif
  • 715
  • 2
  • 12
  • 26

2 Answers2

3

How can I pass the GLuint texture variable (the actual texture loaded from the image) into the shader scripts? Like how to bind GLuint texture to uniform sampler2D texture? Maybe I am just not realising I already did that.

This binds the texture to texture unit 0:

glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);

This is invalid because texture is not a uniform location, so remove this line:

glUniform1i(texture, 0); // <-- invalid

This is invalid too, because the uniform texture should be set to the number of the texture unit:

program.setUniformValue("texture", texture); // <-- invalid

So replace it with:

program.setUniformValue("texture", 0); // <-- sampler2D texture uses GL_TEXTURE0

Note: I'm assuming here that setUniformValue works correctly.


What's the difference between attribute vec4 vertexColorIn and uniform sampler2D texture? I think the color comes from the texture.

vertexColorIn comes from the VAO and is different for each vertex. texture is the sampler that samples from the texture that's bound to the texture unit that you set above.

In your code you don't need a vertex color, but you do need texture coordinates. So your shaders should look like:

const char* vertextShader = STR(
    attribute vec4 position;
    attribute vec4 texcoordIn;
    varying vec4 texcoordOut;
    void main(void)
    {
        gl_Position = position;
        texcoordOut = texcoordIn;
    }
);

const char* fragmentShader = STR(
    varying vec4 texcoordOut;
    uniform sampler2D texture;
    void main(void)
    {
        gl_FragColor = texture2D(texture, texcoordOut);
    }   
);

Can I use glTexCoord2f() and glVertex2f() instead of glVertexAttribPointer() and glVertexAttribPointer()? It's because they seem better to me.

glTexCoord2f and glVertex2f are legacy functions that were removed in OpenGL 3, and are available only in the compatibility profile. You shall not use them.


This lines are in the wrong place:

 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

They shall go after you bound the texture:

 glGenTextures(1, &texture);
 glBindTexture(GL_TEXTURE_2D, texture);
 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, img_data);
 // sets the filtering for the bound texture:
 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);

Since the question is tagged : you don't need to set any uniforms in this case. You can specify the locations and the bindings directly in the shaders:

const char* vertextShader =
    "#version 450 core\n" STR(
    layout(location = 0) in vec4 position;
    layout(location = 1) in vec4 texcoordIn;
    layout(location = 0) out vec4 texcoordOut;
    void main(void)
    {
        gl_Position = position;
        texcoordOut = texcoordIn;
    }
);

const char* fragmentShader =
    "#version 450 core\n" STR(
    layout(location = 0) in vec4 texcoord;
    layout(binding = 0) uniform sampler2D TEX;
    layout(location = 0) out vec4 OUT;
    void main(void)
    {
        OUT = texture(TEX, texcoord);
    }   
);
Yakov Galka
  • 70,775
  • 16
  • 139
  • 220
  • Thanks for your explaination! But it's still not showing the image. Do I actually need to use `glBindTexture()` twice, one in `loadTexture()` and the other in `paintGL()`? – SpellTheif Apr 16 '19 at 07:22
  • 1
    @Jinx: There's no harm in doing that. In fact it's better this way because some other code might change the binding between those two calls. However, you shouldn't call `glTexSubImage2D` every time you render the frame, only once you load the texture. – Yakov Galka Apr 16 '19 at 07:25
  • I know why it wasn't working. I changed `glDrawArrays(GL_TRIANGLE_STRIP, 0, 1)` to `glDrawArrays(GL_TRIANGLE_STRIP, 0, 4)`. Thank you so much, I learnt seomthing. And it won't work without your corretion. – SpellTheif Apr 16 '19 at 07:42
1

a few edits

   const char* vertextShader = STR(
    attribute vec4 position;
    attribute vec4 vertexColorIn;
    varying vec4 vertexColorOut;
    out vec2 TexCoord;//--->add
    void main(void)
    {
        gl_Position = position;
        vertexColorOut = vertexColorIn;
        TexCoord = vec2(aPos.x/2.0+0.5, 0.5-aPos.y/2.0);//a hack,ideally you need to pass the UV coordinates for proper texture mapping.UVs need to be passed in as a uniform or an attribute depending on preference.
    }
);

const char* fragmentShader = STR(
    varying vec4 vertexColorOut;
    uniform sampler2D texture;
    in vec2 TexCoord; //---->add
    void main(void)
    {
        gl_FragColor  = texture2D(texture,TexCoord) //( no clue how to use it) -->here is the change
        //gl_FragColor = vertexColorOut;
    }   
);