0

I have followed a tutorial on how to display an image in OpenGL, and I first had to make a square and then texture it. I have tried gl.glColor4f(0f,0f,0f,1f); and that works, I can see the square. But then when I try to texture it, the screen just stays white. What did I do wrong?

Here is the screen with the gl.glColor4f(0f,0f,0f,1f); :

image description

If I apply the texture (using the custom Texture class that the tutorial suggested), the screen is completely white.

Here is my code:

Square.java:

package com.chrypthic.android.reference;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;

import javax.microedition.khronos.opengles.GL10;

import android.content.Context;

public class Square
{
final int VERTEX_SIZE = (2+2) *4;
FloatBuffer vertices;
ShortBuffer indices;

Texture texture;

GL10 gl;
Context c;

public Square(GL10 gl, Context context)
{
    this.gl = gl;
    this.c = context;

    ByteBuffer byteBuffer = ByteBuffer.allocateDirect(VERTEX_SIZE * 4);
    byteBuffer.order(ByteOrder.nativeOrder());
    vertices = byteBuffer.asFloatBuffer();
    vertices.put(new float[]{
        10.0f, 10.0f, 0.0f, 0.0f, //bl  
        160.0f, 10.0f, 1.0f, 0.0f, //br 
        160.0f, 160.0f, 1.0f, 1.0f, //tr    
        10.0f, 160.0f, 1.0f, 0.0f, //tl 
    });
    vertices.flip();

    byteBuffer = ByteBuffer.allocateDirect(VERTEX_SIZE * 4);
    byteBuffer.order(ByteOrder.nativeOrder());
    indices = byteBuffer.asShortBuffer();
    indices.put(new short[]{
        0, 1, 2, 2, 3, 0
    });
    indices.flip();

    texture = new Texture("image/picture.png", c, gl);
}

public void draw()
{
    gl.glEnable(GL10.GL_TEXTURE_2D);
    texture.bind();
    //gl.glColor4f(0f, 0f, 0f, 1f);

    gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
    gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);

    vertices.position(0);
    gl.glVertexPointer(2, GL10.GL_FLOAT, VERTEX_SIZE, vertices);
    vertices.position(2);
    gl.glTexCoordPointer(2, GL10.GL_FLOAT, VERTEX_SIZE, vertices);

    gl.glDrawElements(GL10.GL_TRIANGLES, 6, GL10.GL_UNSIGNED_SHORT, indices);
}
}

Texture.java

package com.chrypthic.android.reference;

import java.io.InputStream;

import javax.microedition.khronos.opengles.GL10;

import android.content.Context;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLUtils;

public class Texture {

String texturePath;
Context context;
GL10 gl;

int textureId;
int minFilter;
int magFilter;
Bitmap texture;

public Texture(String texturePath, Context context, GL10 gl)
{
    this.gl = gl;
    this.texturePath = texturePath;
    this.context = context;
}

public void load()
{
    try{
        AssetManager assetManager = context.getAssets();
        InputStream is = assetManager.open(texturePath);
        texture = BitmapFactory.decodeStream(is);
        gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);
        GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, texture, 0);
        setFilters(GL10.GL_NEAREST, GL10.GL_NEAREST);
        gl.glBindTexture(GL10.GL_TEXTURE_2D, 0);
    }catch(Exception e)
    {
        e.printStackTrace();
    }
}

public void reload()
{
    load();
    bind();
    setFilters(minFilter, magFilter);
    gl.glBindTexture(GL10.GL_TEXTURE_2D, 0);
}

public void setFilters(int minFilter, int magFilter)
{
    this.minFilter = minFilter;
    this.magFilter = magFilter;
    gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, minFilter);
    gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, magFilter);
}

public void bind()
{
    gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);
}

public void dispose()
{
    gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);
    int[] textureIds = {textureId};
    gl.glDeleteTextures(1, textureIds, 0);
}

}

OpenGLRenderer.java

package com.chrypthic.android.reference;

import java.util.Random;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.content.Context;
import android.opengl.GLSurfaceView.Renderer;

public class OpenGLRenderer implements Renderer 
{
Random rand = new Random();
int mWidth = 0;
int mHeight = 0;

Context c;

Square square;

public OpenGLRenderer(Context c)
{
    this.c = c;
}

@Override
public void onDrawFrame(GL10 gl) 
{
    gl.glClearColor(1, 1, 1, 1);
    gl.glClear(GL10.GL_COLOR_BUFFER_BIT);
    gl.glViewport(0, 0, mWidth, mHeight);
    gl.glMatrixMode(GL10.GL_PROJECTION);
    gl.glLoadIdentity();
    gl.glOrthof(0, mWidth, 0, mHeight, -1, 1);

    if(square != null)square.draw();
}

@Override
public void onSurfaceChanged(GL10 gl, int width, int height) 
{
    mWidth = width;
    mHeight = height;
}

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) 
{
    square = new Square(gl, c);
}

}

I hope you can help me, if you need even more information just tell me. Thanks in advance. I did't post the white screen as a screenshot, because it seems kind of pointless with the white background that stackoverflow have.

chrypthic
  • 579
  • 1
  • 6
  • 15

2 Answers2

1

Going from a comment to an answer.

(Comment): Why are you using glVertexPointer twice rather that glVertexPointer and then glTexCoordPointer? Also there seems to be a typo in that line that says GL_FLAT instead of GL_FLOAT. Are you sure you're giving us your actual code?

-Answer starts here-

It looks like you aren't using texture coordinates at all. You have to specify which corner of the texture you'd like to assign to which corner of the triangle/quad. For example, I see that you've got a "tl" comment next to the top left vertex. You similarly need to supply a texture coordinate that says "use the top left corner of the image with this vertex" and then OpenGL figures out the rest.

Then when you go to pass the vertex pointer, you also have to pass a texture coordinate pointer that contains those texture coordinates I were just talking about in the previous paragraph. You do this with glTexCoordPointer in the exact same fashion as glVertexPointer. You can also load texture coordinates into a VBO (in fact, into the same VBO as the vertices, then you can call glTexCoordPointer with the offset into the VBO to where the tex coords start).

Let me know (via comment) if you need more elaboration/code examples or more explanation anywhere.

Edit 1:

See these links for information about texture mapping: gamedev, nehe

Edit 2:

Okay, stupid me. I didn't notice that you've actually got the texture coordinate data in with the vertex data. Try changing your second call to glVertexPointer to glTexCoordPointer. This might fix your problem entirely.

Andrew Rasmussen
  • 14,912
  • 10
  • 45
  • 81
1

You never create the texture!

public void load()
{
    try{
        int []texture = new int[1];
        gl.glGenTextures(1, texture,0);//the missing method
        textureId = texture[0];
        //now you can call
        AssetManager assetManager = context.getAssets();
        InputStream is = assetManager.open(texturePath);
        texture = BitmapFactory.decodeStream(is);
        gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);/*this fails 
          if textureId is a 'random' number. You must/should generate a valid id. */
        GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, texture, 0);
        setFilters(GL10.GL_NEAREST, GL10.GL_NEAREST);
        gl.glBindTexture(GL10.GL_TEXTURE_2D, 0);

It is exactly the opposite of your dispose() method.

Ishtar
  • 11,542
  • 1
  • 25
  • 31
  • 1
    After posting on 2 forums and studying my code for ages, i realised that I forgot to call the load() method. GOSH IM SO STUPID. I need to concentrate better. Thanks for pointing it out anyways. Btw, I use GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, texture, 0); instead of what you thought was missing. – chrypthic Nov 26 '11 at 11:12
  • But it works fine as it is. Also another question, how do I move the origin to the top left instead of the bottom left? – chrypthic Nov 26 '11 at 12:03
  • Never mind, its all fixed. I have even written a class that converts ints of the x and y coordinates to floats and adds the width and height of the image. Now it's just like the Canvas api. Thanks so much for your help. It was inverted because I still had the coordinates set up for the origin at the bottom left. I have also enabled Alpha Blending or whatever. – chrypthic Nov 26 '11 at 12:22