0

I am learning how to work with OpenGL ES 1.0 (I was told that it is better to start with "1.0"/"1.1").

I am trying to create transperent 3D globe on android device using transparent texture and vertex indexing but having problem which I don't understand and can't find any solution on internet.

The problem is that depending on angle of camera rotation my globe stops rendering "back side" of sphere or sphere back sides texture. On image below you can see what happens if I rotate camera in same direction.

Link to image: CLICK HERE (I have no reputation to add image directly, sorry)

So can you help me? What is my mistake, why "back" is disappearing?

"Sphere" class:

package com.example.OpenGL_Testing.openGl.geometry;

import com.example.OpenGL_Testing.openGl.Texture;

import javax.microedition.khronos.opengles.GL10;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;

public class Sphere {

    float step = 5f;            // density of sphere; step in degrees
    float[] verArray;
    float[] texIndex;
    short[] verIndex;
    private FloatBuffer vBuff;
    private FloatBuffer tBuff;
    private ShortBuffer iBuff;
    public final float mCenterX, mCenterY, mCenterZ;
    public final float mRadius;
    private final GL10 mGL;
    private int stepsPai, stepsTheta;
    private Texture texture;

    public Sphere(float mRadius, float mCenterX, float mCenterY, float mCenterZ, GL10 mGL) {
        this.mGL = mGL;

        // sphere parameters
        this.mRadius = mRadius;
        this.mCenterX = mCenterX;
        this.mCenterY = mCenterY;
        this.mCenterZ = mCenterZ;

        stepsPai = (int) (180f / step) ;             //sphere vertical 'lines'
        stepsTheta = (int) (360f / step) + 1;        //sphere horizontal 'lines'

        // create sphere 'dots'
        createVerticesBuff();
        createIndexingBuffer();

    }

    private float[] createVerticesArray() {
        float[] vertices = new float[stepsPai * stepsTheta * 3];

        int n = 0;
        float cRadius, cHeight, co, si;
        for (float pai = 180f - step; pai > 0f; pai -= step) {

            cRadius = (float) Math.sin((pai + step) * Math.PI / 180f);
            cHeight = (float) Math.cos((pai + step) * Math.PI / 180f);

            for (float theta = 0.0f; theta <= 360f; theta += step) {
                co = (float) Math.cos(theta * Math.PI / 180f);
                si = -(float) Math.sin(theta * Math.PI / 180f);

                vertices[n * 3] = mRadius * (cRadius * co) + mCenterX;
                vertices[n * 3 + 1] = mRadius * (cHeight) + mCenterY;
                vertices[n * 3 + 2] = mRadius * (cRadius * si) + mCenterZ;

                n ++;
            }
        }

        return vertices;
    }

    public FloatBuffer createVerticesBuff() {
        // create array
        verArray = createVerticesArray();

        // create buffer
        if (vBuff == null) {
            ByteBuffer byteBuffer = ByteBuffer.allocateDirect(verArray.length * 4);
            byteBuffer.order(ByteOrder.nativeOrder());
            vBuff = byteBuffer.asFloatBuffer();
            vBuff.put(verArray);
            vBuff.flip();
        }
        return vBuff;
    }

    public short[] createIndexingArray() {
        short[] indexies = new short[verArray.length*2];
        int n=0;
        for (int i = 0; i < stepsPai-1; i++) {
            for (int j = 0; j < stepsTheta; j++) {
                indexies[n] = (short) (i*stepsTheta+j);
                indexies[n+1] = (short) ((i+1)*stepsTheta+j);
                indexies[n+2] = (short) ((i+1)*stepsTheta+j+1);
                indexies[n+3] = indexies[n+2];
                indexies[n+4] = (short) (i*stepsTheta+j+1);
                indexies[n+5] = indexies[n];
                n+=6;
            }
        }
        return indexies;
    }

    public void createIndexingBuffer(){
        // create array
        verIndex = createIndexingArray();

        //create buffer
        ByteBuffer byteBuffer = ByteBuffer.allocateDirect(verIndex.length * 2);
        byteBuffer.order(ByteOrder.nativeOrder());
        iBuff = byteBuffer.asShortBuffer();
        iBuff.put(verIndex);
        iBuff.flip();
    }

    public void draw() {

        // setup vertices buffer
        mGL.glEnableClientState(GL10.GL_VERTEX_ARRAY);
        vBuff.position(0);
        mGL.glVertexPointer(3, GL10.GL_FLOAT, 0, vBuff);

        // setup texture
        if (texture != null) {
            mGL.glEnable(GL10.GL_TEXTURE_2D);
            mGL.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
            tBuff.position(0);
            mGL.glTexCoordPointer(2, GL10.GL_FLOAT, 0, tBuff);
        }

        // display poligons on screen
        mGL.glDrawElements(GL10.GL_TRIANGLES, verIndex.length, GL10.GL_UNSIGNED_SHORT, iBuff);

        // reset settings
        if (texture != null) {
            mGL.glDisable(GL10.GL_TEXTURE_2D);
            mGL.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
        } else {
            mGL.glColor4f(1, 1, 1, 1);
        }
        mGL.glDisableClientState(GL10.GL_VERTEX_ARRAY);
    }

    public void setTexture(Texture texture) {
        if (this.texture != null) {
            this.texture.dispose();
        }
        this.texture = texture;

        // create array
        texIndex = new float[stepsPai * stepsTheta * 2 * 2];
        int n = 0;
        for (float pai = 0f; pai < 180f; pai += step) {
            for (float theta = 0.0f; theta <= 360f; theta += step) {
                texIndex[n] = theta / 360f;
                texIndex[n + 1] = pai / 180f;
                n += 2;
            }
        }

        // create buffer
        ByteBuffer byteBuffer = ByteBuffer.allocateDirect(texIndex.length * 4);
        byteBuffer.order(ByteOrder.nativeOrder());
        tBuff = byteBuffer.asFloatBuffer();
        tBuff.put(texIndex);
        tBuff.flip();

        // bind texture
        texture.bind();
    }

}

"World" class

package com.example.OpenGL_Testing.openGl;

import android.opengl.GLSurfaceView;
import android.support.v4.view.GestureDetectorCompat;
import android.view.MotionEvent;
import android.view.View;
import com.example.OpenGL_Testing.MyApp;
import com.example.OpenGL_Testing.MyGestureListener;
import com.example.OpenGL_Testing.Screen;
import com.example.OpenGL_Testing.openGl.geometry.Sphere;
import com.example.OpenGL_Testing.openGl.geometry.Sphere2;

import javax.microedition.khronos.opengles.GL10;

/**
 * Created by Aleksandr.Tsatski on 11.11.2014.
 */
public class WorldMap implements Screen {

    GLSurfaceView glView;
    static final int VERTEX_SIZE = (3) * 4;
    Sphere sphere;
    GL10 gl;
    private GestureDetectorCompat mDetector;
    final Object stateChanged = new Object();

    public WorldMap(final GLSurfaceView glView, final GL10 gl) {
        this.glView = glView;
        this.gl = gl;
        sphere = new Sphere(400, 0, 0, 0, gl);

    }

    private void setupGestureListener(final GLSurfaceView glView, final GL10 gl) {
        final MyGestureListener listener = new MyGestureListener();
        listener.setGestureFeedBackListener(new MyGestureListener.Feedback() {
            @Override
            public void onFeedback(final int event, final float parameter1, final float parameter2) {
                glView.queueEvent(new Runnable() {
                    @Override
                    public void run() {
                        switch (event) {
                            case MyGestureListener.SCROLL:
                                gl.glRotatef(parameter1/10, 0f, 1f, 0f);
                                gl.glRotatef(-parameter2/10, 0f, 0f, 1f);
                                break;
                            default:
                                break;
                        }
                    }
                });
            }
        });
        glView.post(new Runnable() {
            @Override
            public void run() {
                mDetector = new GestureDetectorCompat(MyApp.getAppContext(), listener);
            }
        });
        glView.setOnTouchListener(new View.OnTouchListener() {
            @Override
            public boolean onTouch(View view, MotionEvent motionEvent) {
                return mDetector.onTouchEvent(motionEvent);
            }
        });
    }

    @Override
    public void present(float deltaTime) {

        gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);

        sphere.draw();

    }

    @Override
    public void resume() {
        gl.glClearColor(1, 0, 0, 1);
        gl.glEnable(GL10.GL_DEPTH_TEST);
        gl.glEnable(GL10.GL_BLEND);
        gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);


        setupGestureListener(glView, gl);
        sphere.setTexture(new Texture(gl, "world_map.png"));
    }

    @Override
    public void pause() {

    }

    @Override
    public void dispose() {

    }
}
Gugalo
  • 42
  • 4
  • It's hard to tell from the screenshots, but do you have depth testing enabled? Try `glEnable(GL_DEPTH_TEST); glDepthMask(GL_TRUE);` before you render the globe, and make sure your `glClear` include `GL_DEPTH_BUFFER_BIT`. If this doesn't work, make sure that the context you request supports depth buffering. – Justin Jan 05 '15 at 14:02
  • @Justin I have depth testing enabled, but it might be done wrong... I now have added two classes from my project. Depth testing is setup in "World" class, on "resume()" method. – Gugalo Jan 06 '15 at 14:26

0 Answers0