I have wxPython + pyOpenGL application that needs to be cross platform. Now for selection I implemented a color picking scheme, basically as follows:
if len(self.pick_color_array) == 0:
self.init_color_buffers(len(self.points))
glUseProgram(0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glDisable(GL_BLEND)
glDisable(GL_DITHER)
glDisable(GL_FOG)
glDisable(GL_LIGHTING)
glDisable(GL_TEXTURE_1D)
glDisable(GL_TEXTURE_2D)
glDisable(GL_TEXTURE_3D)
glShadeModel(GL_FLAT)
glPushMatrix()
glTranslate(self.x_translation_step,self.y_translation_step,
Z_DISTANCE + z_translation)
glRotate(alpha_rotation, 0.0, 0.0, 1.0)
glRotate(beta_rotation, 0.0, 1.0, 0.0)
self.apply_connectivity_nose_correction()
for i in range(len(self.points)):
glColor3ub(self.pick_color_array[i][0], self.pick_color_array[i][1], self.pick_color_array[i][2])
glEnableClientState(GL_VERTEX_ARRAY)
glBindBuffer(GL_ARRAY_BUFFER, self.positions_buffers[i][0])
glVertexPointer(3, GL_FLOAT, 0, None)
glBindBuffer(GL_ARRAY_BUFFER, self.positions_buffers[i][2])
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.positions_buffers[i][2])
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_SHORT, None)
glPopMatrix()
viewport = glGetIntegerv(GL_VIEWPORT)
pixel = glReadPixels(self.control.mouse_x(), float(viewport[3] - float(self.control.mouse_y())), 1, 1,
GL_RGB, GL_UNSIGNED_BYTE)
glUseProgram(self.shader.shader)
glEnable(GL_BLEND)
glEnable(GL_DITHER)
glEnable(GL_FOG)
glEnable(GL_LIGHTING)
glEnable(GL_TEXTURE_1D)
glEnable(GL_TEXTURE_2D)
glEnable(GL_TEXTURE_3D)
glShadeModel(GL_SMOOTH)
Now for the init_color_buffers:
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
self.pick_color_dict = {}
self.pick_color_array = []
vertices = numpy.array([
0.0, 1000.0, 0.0,
-1000.0, -1000.0, 1000.0,
1000.0, -1000.0, 1000.0,
0.0, 1000.0, 0.0,
1000.0, -1000.0, 1000.0,
1000.0, -1000.0, -1000.0,
0.0, 100.0, 0.0,
1000.0, -1000.0, -1000.0,
-1000.0, -1000.0, -1000.0,
0.0, 1000.0, 0.0,
-1000.0, -1000.0, -1000.0,
-1000.0, -1000.0, 1000.0
], dtype=numpy.float32)
buffer_cube = glGenBuffers(1)
glBindBuffer(GL_ARRAY_BUFFER, buffer_cube)
glBufferData(GL_ARRAY_BUFFER, ADT.arrayByteCount(vertices),
ADT.voidDataPointer(vertices), GL_STATIC_DRAW)
glUseProgram(0)
glDisable(GL_BLEND)
glDisable(GL_DITHER)
glDisable(GL_FOG)
glDisable(GL_LIGHTING)
glDisable(GL_TEXTURE_1D)
glDisable(GL_TEXTURE_2D)
glDisable(GL_TEXTURE_3D)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glShadeModel(GL_FLAT)
for i in range(nr_points):
self.pick_color_array.append((numpy.uint8(255.0/nr_points*i),
numpy.uint8(255 - 255.0/nr_points*i),
numpy.uint8(255.0/nr_points*i)))
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glColor3ub(self.pick_color_array[i][0], self.pick_color_array[i][1], self.pick_color_array[i][2])
glEnableClientState(GL_VERTEX_ARRAY)
glBindBuffer(GL_ARRAY_BUFFER, buffer_cube)
glVertexPointer(3, GL_FLOAT, 0, None)
glDrawArrays(GL_TRIANGLES, 0, 12)
pixel = glReadPixels(10, 10, 1, 1,
GL_RGB, GL_UNSIGNED_BYTE)
from binascii import hexlify
bit_repr = bin(int(b"1" + hexlify(pixel), 16))[3:]
color_0 = 0
for bit in bit_repr[:8]:
color_0 = color_0 * 2 + numpy.uint8(bit)
color_1 = 0
for bit in bit_repr[8:16]:
color_1 = color_1 * 2 + numpy.uint8(bit)
color_2 = 0
for bit in bit_repr[16:]:
color_2 = color_2 * 2 + numpy.uint8(bit)
self.pick_color_dict[(color_0, color_1, color_2)] = i
Now this was written and worked perfectly on MacOS. But now when I try it on windows it won't work at all. The color dictionary that is created is just a {(0, 0, 0): last_index} and also any click on any of my items also returns (0, 0, 0). Now I'm really stumped as to what I'm doing wrong here, especially since on MacOS everything works fine. My only guess would be that either glReadPixels somehow doesn't work properly on windows or wxPython on windows somehow messed it up?!
Any help would be greatly appreciated.
Regards, Bogdan