1

I'm trying to port a game that uses SDL 1.2 and OpenGL 1.3 to Android and eventually iOS.

For that I had to switch to SDL 1.3, the unreleased next version that's going to become SDL 2.0, which has been ported to Android. Initial results were good, everything works fine with just SDL and SDL_image.

But when I add OpenGL ES 1.1 to the mix and try to render a textured rectangle, all I get is a white rectangle.

Any ideas what I'm doing wrong here? Like I said, just SDL/SDL_image displays the image just fine.

Edit: I've updated the code below according to Ryan Maloney's suggestions below, namely using float literals where I use GL_FLOAT and try to use SDL_GL_BindTexture instead of creating the texture manually. Doesn't work either though, I get a black texture and some errors: No EGL config available, EGLNativeWindowType 0x2a1b5380 already connected to another API. glBindTexture still results in a white texture.

Edit 2: Just had the glorious idea of actually checking for errors: If I call glGetError() right after glTexImage2D, I get GL_INVALID_OPERATION.

#include <GLES/gl.h>
#include <SDL.h>
#include <SDL_image.h>
#include <string>

// Set to 1 to use SDL_GL_BindTexture instead of glBindTexture
#define USE_SDL_TEXTURE 0

static const int screen_width = 640;
static const int screen_height = 480;

void init_opengl()
{
    glEnable(GL_TEXTURE_2D);
    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
    glEnable(GL_BLEND);
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glViewport(0, 0, screen_width, screen_height);
    glOrthof(0, screen_width, screen_height, 0, -1, 1);
    glMatrixMode(GL_MODELVIEW);
}

int next_power_of_two(int n)
{
    double logbase2 = log((double) n) / log(2.0);
    return (int) (pow(2, ceil(logbase2)) + 0.5);
}

SDL_Surface* convert_to_power_of_two(SDL_Surface* surface)
{
    int width = next_power_of_two(surface->w);
    int height = next_power_of_two(surface->h);

    SDL_Surface* pot_surface = SDL_CreateRGBSurface(0, width, height, 32,
                                                    0x00ff0000, 0x0000ff00,
                                                    0x000000ff, 0xff000000);
    SDL_Rect dstrect;
    dstrect.w = surface->w;
    dstrect.h = surface->h;
    dstrect.x = 0;
    dstrect.y = 0;
    SDL_SetSurfaceAlphaMod(surface, 0);
    SDL_BlitSurface(surface, NULL, pot_surface, &dstrect);
    SDL_FreeSurface(surface);

    return pot_surface;
}

GLenum get_texture_format(SDL_PixelFormat* pixel_format, GLint bpp)
{
    switch (bpp) {
    case 4:
        return GL_RGBA;
    case 3:
        return GL_RGB;
    }
    throw "Unsupported pixel format";
}

#if USE_SDL_TEXTURE
SDL_Texture* load_image(const std::string& path, SDL_Renderer* renderer)
#else
GLuint load_image(const std::string& path)
#endif
{
    SDL_Surface* surface = IMG_Load(path.c_str());
    SDL_Surface* pot_surface = convert_to_power_of_two(surface);

#if USE_SDL_TEXTURE
    return SDL_CreateTextureFromSurface(renderer, surface);
#else
    SDL_PixelFormat* pixel_format = pot_surface->format;
    GLint bpp = pixel_format->BytesPerPixel;
    GLenum texture_format = get_texture_format(pixel_format, bpp);

    GLuint texture;
    glGenTextures(1, &texture);
    glBindTexture(GL_TEXTURE_2D, texture);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, bpp, pot_surface->w, pot_surface->h, 0,
                 texture_format, GL_UNSIGNED_BYTE, pot_surface->pixels);

    SDL_FreeSurface(pot_surface);
    return texture;
#endif
}

#if USE_SDL_TEXTURE
void draw_texture(SDL_Texture* texture, float width, float height)
#else
void draw_texture(GLuint texture, float width, float height)
#endif
{
#if USE_SDL_TEXTURE
    SDL_GL_BindTexture(texture, NULL, NULL);
#else
    glBindTexture(GL_TEXTURE_2D, texture);
#endif

    GLfloat texture_coordinates[] = {0.0f, 1.0f,
                                     0.0f, 0.0f,
                                     1.0f, 1.0f,
                                     1.0f, 0.0f};
    glTexCoordPointer(2, GL_FLOAT, 0, texture_coordinates);
    glEnableClientState(GL_TEXTURE_COORD_ARRAY);

    GLfloat vertices[] = {0.0f, height,
                          0.0f, 0.0f,
                          width, height,
                          width, 0.0f};
    glVertexPointer(2, GL_FLOAT, 0, vertices);
    glEnableClientState(GL_VERTEX_ARRAY);

    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

    glDisableClientState(GL_VERTEX_ARRAY);
    glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}

int main(int argc, char* argv[])
{
SDL_Init(SDL_INIT_VIDEO);
atexit(SDL_Quit);

IMG_Init(IMG_INIT_PNG);
atexit(IMG_Quit);

SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);

const Uint32 flags = SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN;
SDL_Window* window = SDL_CreateWindow("Foo", 0, 0,
                                      screen_width, screen_height, flags);

#if USE_SDL_TEXTURE
SDL_Renderer* renderer = SDL_CreateRenderer(window, 1, SDL_RENDERER_ACCELERATED);
#endif

SDL_GLContext context = SDL_GL_CreateContext(window);
SDL_GL_SetSwapInterval(1);

init_opengl();

#if USE_SDL_TEXTURE
    SDL_Texture* texture = load_image("character_editor_bg.png", renderer);
#else
    GLuint texture = load_image("character_editor_bg.png");
#endif

SDL_Event event;
for (;;) {
    SDL_WaitEvent(&event);
    if (event.type == SDL_QUIT)
        break;

    glClear(GL_COLOR_BUFFER_BIT);
    draw_texture(texture, screen_width, screen_height);

    SDL_GL_SwapWindow(window);
    SDL_Delay(1);
}

SDL_GL_DeleteContext(context);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
futlib
  • 8,258
  • 13
  • 40
  • 55
  • One thing I notice is that you are specifying that vertices and texture coordinates as GL_FLOAT but then defining the Glfloat array with ints - try changing the definition to use floats - 0.f etc. – Ryan Maloney Dec 26 '12 at 13:32
  • Also, while what you have should work given that you are using SDL 1.3 you can use helper methods. Create an SDL_Texture object like so: SDL_Texture *sdl_texture = SDL_CreateTextureFromSurface(renderer, surface); and then glEnableClientState(GL_TEXTURE_COORD_ARRAY); SDL_GL_BindTexture(sdl_texture, &texw, &texh); – Ryan Maloney Dec 26 '12 at 13:35
  • Thanks Ryan, I've updated the code above accordingly. Sadly, it doesn't work with SDL_GL_BindTexture either, but I get a black texture now instead of a white one. (While SDL_GL_BindTexture does seem neat, I'd probably still stick to the more elaborate glBindTexture approach if both work, just using SDL for window creation, image loading and event handling.) – futlib Dec 27 '12 at 03:25
  • If I recall the "No EGL config available" in the log of an Android emulator is coming from the SDL activity and means it failed to create an OpenGL ES 2.0 context and is now falling back on 1.1. This most likely means you are using the emulator and aren't using hardware acceleration. If this is the case, I was having some strange texturing problems until I enabled HW acceleration so perhaps give that a try - see Using Hardware acceleration here: http://developer.android.com/tools/devices/emulator.html – Ryan Maloney Dec 27 '12 at 15:08
  • Indeed, I don't get that message when running the app with USE_SDL_TEXTURE on my Galaxy Nexus. However, I'm still getting the "EGLNativeWindowType [...] already connected to another API eglCreateWindowSurface: [...] error [...] (EGL_BAD_NATIVE_WINDOW)" message, and it's still a black screen. – futlib Dec 28 '12 at 03:21
  • @RyanMaloney I've just poked around a bit with glGetError() (I had forgotten it was there :D) and noticed that I get a GL_INVALID_OPERATION right after the glTexImage2D call. – futlib Dec 28 '12 at 04:26
  • It might be your texture format then, use the example here (http://content.gpwiki.org/index.php/SDL:Tutorials:Using_SDL_with_OpenGL) that checks number of colors and then the red mask: if (surface->format->Rmask == 0x000000ff) texture_format = GL_RGBA; else texture_format = GL_BGRA; – Ryan Maloney Dec 28 '12 at 13:38
  • I had very similar code to that, but since GL_BGR_EXT isn't available in OpenGL ES, I thought I'd leave out GL_BGRA_EXT as well and only support RGB(A) images for now. Still have to find a proper solution. – futlib Dec 29 '12 at 07:02

1 Answers1

2

Finally found the problem. I was in fact using glTexImage2D completely wrong, and for some reason it worked fine in OpenGL 1.3.

Here's what the invocation ought to be:

    glTexImage2D(GL_TEXTURE_2D, 0, texture_format, pot_surface->w, pot_surface->h, 0,
             texture_format, GL_UNSIGNED_BYTE, pot_surface->pixels);

Previously, I had passed bpp as the third parameter, which is rubbish.

futlib
  • 8,258
  • 13
  • 40
  • 55