15

Currently, I am trying to draw an image using openGL (the image updates very often, and thus must be redrawn). Previously, I was converting my image from YUV to RGB, and then using this new image to draw with openGL. All worked fine, but the conversion process was not particularly fast.

I am now attempting to change the code so that the conversion is taken care of in the openGL shaders. After looking around, I've found a couple code snippets (particularly the shaders and the bulk of my renderImage function) that have helped me get a baseline, but I can't seem to actually get the image to draw properly - all I ever see is a black image.

It's quite likely that I'm missing something extremely simple and important - My experience with openGL is rather limited. If anyone can take a look and see if they recognize anything wrong, please let me know.

I should point out, I'm trying to support iOS 4.x, so CVOpenGLESTextureCacheCreateTextureFromImage shouldn't be usable (and I'm not really positive how to setup to use it even if I wanted to).

Any help would be appreciated. Code below -

My Vertex shader:

attribute vec4 position;
attribute vec4 inputTextureCoordinate;

varying vec2 textureCoordinate;

void main()
{
    gl_Position = position;
    textureCoordinate = inputTextureCoordinate.xy;
}

Fragment Shader:

#ifdef GL_ES
precision mediump float;
#endif

varying vec2 textureCoordinate;

uniform sampler2D videoFrame;
uniform sampler2D videoFrameUV;

const mat3 yuv2rgb = mat3(
                          1, 0, 1.2802,
                          1, -0.214821, -0.380589,
                          1, 2.127982, 0
                          );

void main() {
    vec3 yuv = vec3(
                    1.1643 * (texture2D(videoFrame, textureCoordinate).r - 0.0625),
                    texture2D(videoFrameUV, textureCoordinate).r - 0.5,
                    texture2D(videoFrameUV, textureCoordinate).a - 0.5
                    );
    vec3 rgb = yuv * yuv2rgb;

    gl_FragColor = vec4(rgb, 1.0);
}

The renderImage function:

-(void)renderImage:(ImageBuffer *)image
{
    if (image)
    {        
        int bufferHeight = image->GetHeight();
        int bufferWidth = image->GetWidth();

        if(!imageTexture){
            // Dealing with the Y portion of the YCbCr
            glActiveTexture(GL_TEXTURE0);
            glGenTextures(1, &imageTexture);
            //Bind Y texture
            glBindTexture(GL_TEXTURE_2D, imageTexture);
            glUniform1i(uniforms[UNIFORM_VIDEOFRAME], 0);
            // For fitting
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
            // This is necessary for non-power-of-two textures
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

            glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, bufferWidth, bufferHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, image->GetPlanePointer(0));

            // Dealing with the CbCr potion of the YCbCr
            glActiveTexture(GL_TEXTURE1);
            glGenTextures(1, &imageTextureUV);
            //Bind CbCr texture
            glBindTexture(GL_TEXTURE_2D, imageTextureUV);
            glUniform1i(uniforms[UNIFORM_VIDEOFRAMEUV], 1);
            // For fitting
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
            // This is necessary for non-power-of-two textures
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

            glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, 0, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, image->GetPlanePointer(1));

        }

        [self drawFrame];
    }
}

And finally, the drawFrame function:

- (void)drawFrame
{
    [self setFramebuffer];

    // Replace the implementation of this method to do your own custom drawing.
    static const GLfloat squareVertices[] = {
        -1.0f, -1.0f,
        1.0f, -1.0f,
        -1.0f, 1.0f,
        1.0f, 1.0f
    };

    static const GLfloat textureVertices[] = {
//        1.0f, 1.0f,
//        1.0f, 0.0f,
//        0.0f,  1.0f,
//        0.0f,  0.0f,
        0.0f, 1.0f,
        1.0f, 1.0f,
        0.0f, 0.0f,
        1.0f, 0.0f
//        0.0f, 0.8f,
//        1.0f, 0.8f,
//        0.0f, 0.2f,
//        1.0f, 0.2f
    };

    static float transY = 0.0f;


    if ([context API] == kEAGLRenderingAPIOpenGLES2) {
        // Use shader program.
        glUseProgram(program);

        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, imageTexture);

        // Update attribute values.
        glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
        glEnableVertexAttribArray(ATTRIB_VERTEX);
        glVertexAttribPointer(ATTRIB_TEXTUREPOSITON, 2, GL_FLOAT, 0, 0, textureVertices);
        glEnableVertexAttribArray(ATTRIB_TEXTUREPOSITON);

        glActiveTexture(GL_TEXTURE1);
        glBindTexture(GL_TEXTURE_2D, imageTextureUV);

        // Update attribute values.
        glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
        glEnableVertexAttribArray(ATTRIB_VERTEX);
        glVertexAttribPointer(ATTRIB_TEXTUREPOSITON, 2, GL_FLOAT, 0, 0, textureVertices);
        glEnableVertexAttribArray(ATTRIB_TEXTUREPOSITON);

        // Validate program before drawing. This is a good check, but only really necessary in a debug build.
        // DEBUG macro must be defined in your debug configurations if that's not already the case.
#if defined(DEBUG)
        if (![self validateProgram:program]) {
            NSLog(@"Failed to validate program: %d", program);
            return;
        }
#endif
    } else {
        glMatrixMode(GL_PROJECTION);
        glLoadIdentity();
        glMatrixMode(GL_MODELVIEW);
        glLoadIdentity();
        glTranslatef(0.0f, (GLfloat)(sinf(transY)/2.0f), 0.0f);
        transY += 0.075f;

        glVertexPointer(2, GL_FLOAT, 0, squareVertices);
        glEnableClientState(GL_VERTEX_ARRAY);
    } 

    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

    [self presentFramebuffer];
}
Doc
  • 1,480
  • 2
  • 16
  • 28
  • Has your shader program been compiled before -renderImage: is called? You might need to put a `glUseProgram(program);` before your `glUniform1i()` calls to make sure that the program is active and that you're setting the texture uniforms on it properly. Also, make sure that you've set the current context before you do this. These are two things that have tripped me up in the past. – Brad Larson Sep 14 '12 at 16:59
  • yeah, I do various basic setup in a separate function called earlier - setting the context, compiling the shaders, and linking the program should all be done before renderImage() is called. I've moved a glUseProgram() call before the glUniform1i() calls, but doesn't seem to change anything unfortunately. – Doc Sep 14 '12 at 18:11
  • Pardon my ignorance of what is exposed in iOS's OpenGL ES, but the PowerVR GPU hardware can decode certain YUV textures (at least according to http://pandorawiki.org/OpenGLES_On_the_Pandora#Pandora_3D_acceleration_capabilities_.28PowerVR_SGX.29). Is there no OpenGL ES extension? – Simon F Oct 29 '12 at 14:22

1 Answers1

27

Here is some snippets of code from my project 'movie player for iOS'.

1. fragment shader

varying highp vec2 v_texcoord;  
uniform sampler2D s_texture_y;  
uniform sampler2D s_texture_u;  
uniform sampler2D s_texture_v;   
void main() {  
    highp float y = texture2D(s_texture_y, v_texcoord).r;  
    highp float u = texture2D(s_texture_u, v_texcoord).r - 0.5;  
    highp float v = texture2D(s_texture_v, v_texcoord).r - 0.5;  
    highp float r = y +             1.402 * v;  
    highp float g = y - 0.344 * u - 0.714 * v;  
    highp float b = y + 1.772 * u;  
    gl_FragColor = vec4(r,g,b,1.0);  
}

2. create textures from YUV420p frame

glPixelStorei(GL_UNPACK_ALIGNMENT, 1);  
glGenTextures(3, _textures);  
const UInt8 *pixels[3] = { yuvFrame.luma.bytes, yuvFrame.chromaB.bytes, yuvFrame.chromaR.bytes };  
const NSUInteger widths[3]  = { frameWidth, frameWidth / 2, frameWidth / 2 };  
const NSUInteger heights[3] = { frameHeight, frameHeight / 2, frameHeight / 2 };  
for (int i = 0; i < 3; ++i) {  
    glBindTexture(GL_TEXTURE_2D, _textures[i]);  
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widths[i],heights[i],0,GL_LUMINANCE,GL_UNSIGNED_BYTE,pixels[i]);  
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);  
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);  
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);  
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);  
}

3. init vertices and texture coords

static const GLfloat texCoords[] = { 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f };  
static const GLfloat vertices[]= {-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f };

4. render frame

[EAGLContext setCurrentContext:_context];  
glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);  
glViewport(0, 0, _backingWidth, _backingHeight);  
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);  
glClear(GL_COLOR_BUFFER_BIT);  
glUseProgram(_program);  
for (int i = 0; i < 3; ++i) {  
    glActiveTexture(GL_TEXTURE0 + i);  
    glBindTexture(GL_TEXTURE_2D, _textures[i]);  
    glUniform1i(_uniformSamplers[i], i);  
}  
glVertexAttribPointer(ATTRIBUTE_VERTEX, 2, GL_FLOAT, 0, 0, vertices);  
glEnableVertexAttribArray(ATTRIBUTE_VERTEX);  
glVertexAttribPointer(ATTRIBUTE_TEXCOORD, 2, GL_FLOAT, 0, 0, texCoords);  
glEnableVertexAttribArray(ATTRIBUTE_TEXCOORD);  
glBindRenderbuffer(GL_RENDERBUFFER, _renderbuffer);  
[_context presentRenderbuffer:GL_RENDERBUFFER];

The link to project on github.

Marek R
  • 32,568
  • 6
  • 55
  • 140
Kolyvan
  • 501
  • 4
  • 8