0

So im rendering a curve with openGL. The curve is completely streched for a 1920 x 1080 Pixel Window. With this given relation i now want to analyse the proxy position in world coordinates of my haptic device and draw a 3D/2D Cursor for my window coordinates. How exactly do i have to transform my GL_MODELVIEW_MATRIX, GL_PROJECTION and GL_VIEWPORT to fit my proxy world coordinates (haptic device position) into screen coordinates? Or do i even have to do transformation. Even after researching i dont really understand the transformation of those matrix. So it would be very nice if you could help me here.

display func:

void renderDisplay() {
glClearColor(1.0, 1.0, 1.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(1.0, 0.0, 1.0);
glBegin(GL_LINE_STRIP);
for (int i = 0; i < vertices.size(); i += 3) {
    glVertex2f(vertices[i], vertices[i+2]);
}
glEnd();
glFlush();

hlBeginFrame();
hlCheckEvents();
drawCursor();
hlEndFrame();

}

Draw Cursor Func:

    void drawCursor()
{
    static const double kCursorRadius = 0.5;
    static const double kCursorHeight = 1.5;
    static const int kCursorTess = 15;
    HLdouble proxyxform[16];

    GLUquadricObj *qobj = 0;

    glPushAttrib(GL_CURRENT_BIT | GL_ENABLE_BIT | GL_LIGHTING_BIT);
    glPushMatrix();

    if (!gCursorDisplayList)
    {
        gCursorDisplayList = glGenLists(1);
        glNewList(gCursorDisplayList, GL_COMPILE);
        qobj = gluNewQuadric();
               
        gluCylinder(qobj, 0.0, kCursorRadius, kCursorHeight,
                            kCursorTess, kCursorTess);
        glTranslated(0.0, 0.0, kCursorHeight);
        gluCylinder(qobj, kCursorRadius, 0.0, kCursorHeight / 5.0,
                    kCursorTess, kCursorTess);
    
        gluDeleteQuadric(qobj);
        glEndList();
    }
    
    // Get the proxy transform in world coordinates.
    hlGetDoublev(HL_PROXY_TRANSFORM, proxyxform);
    glMultMatrixd(proxyxform);

    // Apply the local cursor scale factor.
    glScaled(gCursorScale, gCursorScale, gCursorScale);

    glEnable(GL_LIGHTING);
    glEnable(GL_COLOR_MATERIAL);
    glColor3f(0.0, 0.5, 1.0);

    glCallList(gCursorDisplayList);

    glPopMatrix(); 
    glPopAttrib();
}

setup3DCursor Func:

void setup3DCursor(int width, int height)
{
    GLdouble modelview[16];
    GLdouble projection[16];
    GLint viewport[4];

    glGetDoublev(GL_MODELVIEW_MATRIX, modelview);
    glGetDoublev(GL_PROJECTION_MATRIX, projection);
    glGetIntegerv(GL_VIEWPORT, viewport);

    hlMatrixMode(HL_TOUCHWORKSPACE);
    hlLoadIdentity();
    
    // Fit haptic workspace to view volume.
    hluFitWorkspace(projection);

    // Compute cursor scale.
    gCursorScale = hluScreenToModelScale(modelview, projection, (HLint*)viewport);
    gCursorScale *= CURSOR_SIZE_PIXELS;
}

main func:

 findExtrema(); // Find limits for projection

 gluOrtho2D(minX, maxX, minY, maxY) // Params: Left, right, bottom, top
 glutDisplayFunc(renderDisplay);
 glutReshapefunc(setup3DCursor);

So what exactly am i doing wrong here?

LoveAndMercy
  • 27
  • 1
  • 1
  • 6
  • I don't know this haptic feedback library, but I assume you get some coordinates from the sensor, you convert them so they make sense on the screen, then you render something at that point on the screen. – user253751 Aug 29 '22 at 16:23
  • Im using the openHaptics library but im retrieving the proxy/haptic device position via "hlGetDoublev(HL_PROXY_TRANSFORM, proxyxform);" which gives me the tranformation from haptic workspace coordinates to world space coordinates. – LoveAndMercy Aug 29 '22 at 16:30
  • What is haptic workspace and world space? (Sorry if it's a basic question. I don't know haptic feedback) – user253751 Aug 29 '22 at 16:37

0 Answers0