0

I'm trying to build a simple ray tracer and i'm mapping a texture to a quad for visualisation. The resolution i started with is 250X250, but when i change it to, say, 300X300, it compiles and crashes.

#include <stdlib.h>
#include <GL/glut.h>

using namespace std;

const int window_w = 250;
const int window_h = 250;

struct RGBType 
{
    float r;
    float g;
    float b;
};

GLuint tex = 0;

void init()
{
    RGBType pixels[ window_w*window_h ];

    RGBType* temp = pixels;
    for (int x = 0; x < window_w*window_h; x++) 
    {
            temp->r = 0;
            temp->g = 0;
            temp->b = 1;
            temp++;
    }

    glGenTextures( 1, &tex );
    glBindTexture( GL_TEXTURE_2D, tex );
    glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
    glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
    glTexImage2D( GL_TEXTURE_2D, 0, GL_RGB, window_w, window_h, 0, GL_RGB, GL_FLOAT, NULL );
    glTexSubImage2D(GL_TEXTURE_2D,0,0,0,window_w,window_h,GL_RGB,GL_FLOAT,pixels);
}

void display(void)
{
    glClearColor(0, 0, 0, 1);
    glClear(GL_COLOR_BUFFER_BIT);

    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();

    glOrtho(0, 1, 0, 1, -1, 1);

    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();

    glColor3ub( 255, 255, 255 );
    glEnable( GL_TEXTURE_2D );
    glBindTexture( GL_TEXTURE_2D, tex );

    glBegin(GL_QUADS);
    glTexCoord2i( 0, 0 );
    glVertex2i( 0, 0 );
    glTexCoord2i( 1, 0 );
    glVertex2i( 1, 0 );
    glTexCoord2i( 1, 1 );
    glVertex2i( 1, 1 );
    glTexCoord2i( 0, 1 );
    glVertex2i( 0, 1 );
    glEnd();

    glutSwapBuffers();
}

int main(int argc, char** argv)
{
    glutInit(&argc, argv);
    glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE);
    glutInitWindowSize(window_w,window_h);
    glutCreateWindow("Ray Tracer");
    init();
    glutDisplayFunc(display);
    glutMainLoop();
    return 0;
}
Olivier Moindrot
  • 27,908
  • 11
  • 92
  • 91
whoadrian
  • 77
  • 1
  • 7

1 Answers1

1

It could be, that you simply run out of stack. You're allocating a large-ish array statically, which can easily consume the whole stack assigned to your process. Use dynamic allocation. You're using C++, so new and delete[] operators:

RGBType *pixels = new RGBType[ window_w*window_h ];

for(int x = 0; x < window_w*window_h; x++) 
{
        pixels[x].r = 0;
        pixels[x].g = 0;
        pixels[x].b = 1;
}

glGenTextures( 1, &tex );
glBindTexture( GL_TEXTURE_2D, tex );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGB, window_w, window_h, 0, GL_RGB, GL_FLOAT, pixels );

delete[] pixels;
datenwolf
  • 159,371
  • 13
  • 185
  • 298
  • I'm also wondering how i could use doubles instead of floats, because when i change the types and to GL_DOUBLE, the screen goes white. – whoadrian Nov 16 '13 at 21:06
  • @AdrianCristea: `GL_DOUBLE` is not an accepted pixel transfer type. That is, your screen goes white because the call to `glTexImage2D (...)` fails with `GL_INVALID_ENUM`. You should be checking `glGetError (...)` at some point. – Andon M. Coleman Nov 16 '13 at 22:21