2

Note Im new to Android and OpenGL

Im building an Augmented Reality App based on ARToolKitX (Github: https://github.com/artoolkitx/artoolkitx/tree/8c6bd4e7be5e80c8439066b23473506aebbb496c/Source/ARXJ/ARXJProj/arxj/src/main/java/org/artoolkitx/arx/arxj).

The application shows the camera frame and displays objects with opengl on top.

My Problem: ARToolKitX forces the app to be in landscape mode:

setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);

but when I change the screen orientation to SCREEN_ORIENTATION_PORTRAIT, the camera image and the opengl objects dont rotate to the correct orientation and stay in landscape mode.

Inside the ARRenderer I can use the drawVideoSettings method to rotate the camera image by itself, but that doesnt apply to the opengl objects.

ARToolKitX also provides a SurfaceChanged method inside the CameraSurface class, with the comment: "This is where [...] to create transformation matrix to scale and then rotate surface view, if the app is going to handle orientation changes."

But I dont have any idea, how the transformation matrix has too look like and how to apply it.

Any help is appreciated.

ARRenderer:

public abstract class ARRenderer implements GLSurfaceView.Renderer {

private MyShaderProgram shaderProgram;
private int width, height, cameraIndex;
private int[] viewport = new int[4];
private boolean firstRun = true;

private final static String TAG = ARRenderer.class.getName();


/**
 * Allows subclasses to load markers and prepare the scene. This is called after
 * initialisation is complete.
 */
public boolean configureARScene() {
    return true;
}

public void onSurfaceCreated(GL10 unused, EGLConfig config) {

    // Transparent background
    GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.f);
    this.shaderProgram = new MyShaderProgram(new MyVertexShader(), new MyFragmentShader());
    GLES20.glUseProgram(shaderProgram.getShaderProgramHandle());
}

public void onSurfaceChanged(GL10 unused, int w, int h) {
    this.width = w;
    this.height = h;
    if(ARController.getInstance().isRunning()) {
        //Update the frame settings for native rendering
        ARController.getInstance().drawVideoSettings(cameraIndex, w, h, false, false, false, ARX_jni.ARW_H_ALIGN_CENTRE, ARX_jni.ARW_V_ALIGN_CENTRE, ARX_jni.ARW_SCALE_MODE_FILL, viewport);
    }
}

public void onDrawFrame(GL10 unused) {
    if (ARController.getInstance().isRunning()) {
        // Initialize artoolkitX video background rendering.
        if (firstRun) {
            boolean isDisplayFrameInited = ARController.getInstance().drawVideoInit(cameraIndex);
            if (!isDisplayFrameInited) {
                Log.e(TAG, "Display Frame not inited");
            }

            if (!ARController.getInstance().drawVideoSettings(cameraIndex, this.width, this.height, false, false,
                    false, ARX_jni.ARW_H_ALIGN_CENTRE, ARX_jni.ARW_V_ALIGN_CENTRE,
                    ARX_jni.ARW_SCALE_MODE_FILL, viewport)) {
                Log.e(TAG, "Error during call of displayFrameSettings.");
            } else {
                Log.i(TAG, "Viewport {" + viewport[0] + ", " + viewport[1] + ", " + viewport[2] + ", " + viewport[3] + "}.");
            }

            firstRun = false;
        }
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        if (!ARController.getInstance().drawVideoSettings(cameraIndex)) {
            Log.e(TAG, "Error during call of displayFrame.");
        }
        draw();
    }
}

/**
 * Should be overridden in subclasses and used to perform rendering.
 */
public void draw() {
    GLES20.glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);

    //TODO: Check how to refactor near and far plane
    shaderProgram.setProjectionMatrix(ARController.getInstance().getProjectionMatrix(10.0f, 10000.0f));
    float[] camPosition = {1f, 1f, 1f};
    shaderProgram.render(camPosition);
}

@SuppressWarnings("unused")
public ShaderProgram getShaderProgram() {
    return shaderProgram;
}

public void setCameraIndex(int cameraIndex) {
    this.cameraIndex = cameraIndex;
}

}

CameraSurface

class CameraSurfaceImpl implements CameraSurface {

/**
 * Android logging tag for this class.
 */
private static final String TAG = CameraSurfaceImpl.class.getSimpleName();
private CameraDevice mCameraDevice;
private ImageReader mImageReader;
private Size mImageReaderVideoSize;
private final Context mAppContext;

private final CameraDevice.StateCallback mCamera2DeviceStateCallback = new CameraDevice.StateCallback() {
    @Override
    public void onOpened(@NonNull CameraDevice camera2DeviceInstance) {
        mCameraDevice = camera2DeviceInstance;
        startCaptureAndForwardFramesSession();
    }

    @Override
    public void onDisconnected(@NonNull CameraDevice camera2DeviceInstance) {
        camera2DeviceInstance.close();
        mCameraDevice = null;
    }

    @Override
    public void onError(@NonNull CameraDevice camera2DeviceInstance, int error) {
        camera2DeviceInstance.close();
        mCameraDevice = null;
    }
};

/**
 * Listener to inform of camera related events: start, frame, and stop.
 */
private final CameraEventListener mCameraEventListener;
/**
 * Tracks if SurfaceView instance was created.
 */
private boolean mImageReaderCreated;

public CameraSurfaceImpl(CameraEventListener cameraEventListener, Context appContext){
    this.mCameraEventListener = cameraEventListener;
    this.mAppContext = appContext;
}


private final ImageReader.OnImageAvailableListener mImageAvailableAndProcessHandler = new ImageReader.OnImageAvailableListener() {
    @Override
    public void onImageAvailable(ImageReader reader)
    {

        Image imageInstance = reader.acquireLatestImage();
        if (imageInstance == null) {
            //Note: This seems to happen quite often.
            Log.v(TAG, "onImageAvailable(): unable to acquire new image");
            return;
        }

        // Get a ByteBuffer for each plane.
        final Image.Plane[] imagePlanes = imageInstance.getPlanes();
        final int imagePlaneCount = Math.min(4, imagePlanes.length); // We can handle up to 4 planes max.
        final ByteBuffer[] imageBuffers = new ByteBuffer[imagePlaneCount];
        final int[] imageBufferPixelStrides = new int[imagePlaneCount];
        final int[] imageBufferRowStrides = new int[imagePlaneCount];
        for (int i = 0; i < imagePlaneCount; i++) {
            imageBuffers[i] = imagePlanes[i].getBuffer();
            // For ImageFormat.YUV_420_888 the order of planes in the array returned by Image.getPlanes()
            // is guaranteed such that plane #0 is always Y, plane #1 is always U (Cb), and plane #2 is always V (Cr).
            // The Y-plane is guaranteed not to be interleaved with the U/V planes (in particular, pixel stride is
            // always 1 in yPlane.getPixelStride()). The U/V planes are guaranteed to have the same row stride and
            // pixel stride (in particular, uPlane.getRowStride() == vPlane.getRowStride() and uPlane.getPixelStride() == vPlane.getPixelStride(); ).
            imageBufferPixelStrides[i] = imagePlanes[i].getPixelStride();
            imageBufferRowStrides[i] = imagePlanes[i].getRowStride();
        }

        if (mCameraEventListener != null) {
            mCameraEventListener.cameraStreamFrame(imageBuffers, imageBufferPixelStrides, imageBufferRowStrides);
        }

        imageInstance.close();
    }
};

@Override
public void surfaceCreated() {
    Log.i(TAG, "surfaceCreated(): called");

    SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(mAppContext);
    int defaultCameraIndexId = mAppContext.getResources().getIdentifier("pref_defaultValue_cameraIndex","string", mAppContext.getPackageName());
    mCamera2DeviceID = Integer.parseInt(prefs.getString("pref_cameraIndex", mAppContext.getResources().getString(defaultCameraIndexId)));
    Log.i(TAG, "surfaceCreated(): will attempt to open camera \"" + mCamera2DeviceID +
            "\", set orientation, set preview surface");

    /*
    Set the resolution from the settings as size for the glView. Because the video stream capture
    is requested based on this size.

    WARNING: While coding the preferences are taken from the res/xml/preferences.xml!!!
    When building for Unity the actual used preferences are taken from the UnityARPlayer project!!!
    */
    int defaultCameraValueId = mAppContext.getResources().getIdentifier("pref_defaultValue_cameraResolution","string",mAppContext.getPackageName());
    String camResolution = prefs.getString("pref_cameraResolution", mAppContext.getResources().getString(defaultCameraValueId));
    String[] dims = camResolution.split("x", 2);
    mImageReaderVideoSize =  new Size(Integer.parseInt(dims[0]),Integer.parseInt(dims[1]));

    // Note that maxImages should be at least 2 for acquireLatestImage() to be any different than acquireNextImage() -
    // discarding all-but-the-newest Image requires temporarily acquiring two Images at once. Or more generally,
    // calling acquireLatestImage() with less than two images of margin, that is (maxImages - currentAcquiredImages < 2)
    // will not discard as expected.
    mImageReader = ImageReader.newInstance(mImageReaderVideoSize.getWidth(),mImageReaderVideoSize.getHeight(), ImageFormat.YUV_420_888, /* The maximum number of images the user will want to access simultaneously:*/ 2 );
    mImageReader.setOnImageAvailableListener(mImageAvailableAndProcessHandler, null);

    mImageReaderCreated = true;

} // end: public void surfaceCreated(SurfaceHolder holder)

/* Interface implemented by this SurfaceView subclass
   holder: SurfaceHolder instance associated with SurfaceView instance that changed
   format: pixel format of the surface
   width: of the SurfaceView instance
   height: of the SurfaceView instance
*/
@Override
public void surfaceChanged() {
    Log.i(TAG, "surfaceChanged(): called");

    // This is where to calculate the optimal size of the display and set the aspect ratio
    // of the surface view (probably the service holder). Also where to Create transformation
    // matrix to scale and then rotate surface view, if the app is going to handle orientation
    // changes.
    if (!mImageReaderCreated) {
        surfaceCreated();
    }
    if (!isCamera2DeviceOpen()) {
        openCamera2(mCamera2DeviceID);
    }
    if (isCamera2DeviceOpen() && (null == mYUV_CaptureAndSendSession)) {
        startCaptureAndForwardFramesSession();
    }


}

private void openCamera2(int camera2DeviceID) {
    Log.i(TAG, "openCamera2(): called");
    CameraManager camera2DeviceMgr = (CameraManager)mAppContext.getSystemService(Context.CAMERA_SERVICE);

    try {
        if (PackageManager.PERMISSION_GRANTED == ContextCompat.checkSelfPermission(mAppContext, Manifest.permission.CAMERA)) {
            camera2DeviceMgr.openCamera(Integer.toString(camera2DeviceID), mCamera2DeviceStateCallback, null);
            return;
        }
    } catch (CameraAccessException ex) {
        Log.e(TAG, "openCamera2(): CameraAccessException caught, " + ex.getMessage());
    } catch (Exception ex) {
        Log.e(TAG, "openCamera2(): exception caught, " + ex.getMessage());
    }
    if (null == camera2DeviceMgr) {
        Log.e(TAG, "openCamera2(): Camera2 DeviceMgr not set");
    }
    Log.e(TAG, "openCamera2(): abnormal exit");
}

private int mCamera2DeviceID = -1;
private CaptureRequest.Builder mCaptureRequestBuilder;
private CameraCaptureSession mYUV_CaptureAndSendSession;

private void startCaptureAndForwardFramesSession() {

    if ((null == mCameraDevice) || (!mImageReaderCreated) /*|| (null == mPreviewSize)*/) {
        return;
    }

    closeYUV_CaptureAndForwardSession();

    try {
        mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
        List<Surface> surfaces = new ArrayList<>();

        Surface surfaceInstance;
        surfaceInstance = mImageReader.getSurface();
        surfaces.add(surfaceInstance);
        mCaptureRequestBuilder.addTarget(surfaceInstance);

        mCameraDevice.createCaptureSession(
                surfaces, // Output surfaces
                new CameraCaptureSession.StateCallback() {
                    @Override
                    public void onConfigured(@NonNull CameraCaptureSession session) {
                        try {
                            if (mCameraEventListener != null) {
                                mCameraEventListener.cameraStreamStarted(mImageReaderVideoSize.getWidth(), mImageReaderVideoSize.getHeight(), "YUV_420_888", mCamera2DeviceID, false);
                            }
                            mYUV_CaptureAndSendSession = session;
                            // Session to repeat request to update passed in camSensorSurface
                            mYUV_CaptureAndSendSession.setRepeatingRequest(mCaptureRequestBuilder.build(), /* CameraCaptureSession.CaptureCallback cameraEventListener: */null, /* Background thread: */ null);
                        } catch (CameraAccessException e) {
                            e.printStackTrace();
                        }
                    }

                    @Override
                    public void onConfigureFailed(@NonNull CameraCaptureSession session) {
                        Toast.makeText(mAppContext, "Unable to setup camera sensor capture session", Toast.LENGTH_SHORT).show();
                    }
                }, // Callback for capture session state updates
                null); // Secondary thread message queue
    } catch (CameraAccessException ex) {
        ex.printStackTrace();
    }
}

@Override
public void closeCameraDevice() {
    closeYUV_CaptureAndForwardSession();
    if (null != mCameraDevice) {
        mCameraDevice.close();
        mCameraDevice = null;
    }
    if (null != mImageReader) {
        mImageReader.close();
        mImageReader = null;
    }
    if (mCameraEventListener != null) {
        mCameraEventListener.cameraStreamStopped();
    }
    mImageReaderCreated = false;
}

private void closeYUV_CaptureAndForwardSession() {
    if (mYUV_CaptureAndSendSession != null) {
        mYUV_CaptureAndSendSession.close();
        mYUV_CaptureAndSendSession = null;
    }
}

/**
 * Indicates whether or not camera2 device instance is available, opened, enabled.
 */
@Override
public boolean isCamera2DeviceOpen() {
    return (null != mCameraDevice);
}

@Override
public boolean isImageReaderCreated() {
    return mImageReaderCreated;
}

}

Edit:

/**
 * Override the draw function from ARRenderer.
 */
@Override
public void draw() {
    super.draw();
    fpsCounter.frame();
    if(maxfps<fpsCounter.getFPS()){
        maxfps= fpsCounter.getFPS();
    }
    logger.log(Level.INFO, "FPS: " + maxfps);

    // Initialize GL
    GLES20.glEnable(GLES20.GL_CULL_FACE);
    GLES20.glEnable(GLES20.GL_DEPTH_TEST);
    GLES20.glFrontFace(GLES20.GL_CCW);
    
    // Look for trackables, and draw on each found one.
    for (int trackableUID : trackables.keySet()) {
        // If the trackable is visible, apply its transformation, and render the object
        float[] modelViewMatrix = new float[16];
        if (ARController.getInstance().queryTrackableVisibilityAndTransformation(trackableUID, modelViewMatrix)) {
            float[] projectionMatrix = ARController.getInstance().getProjectionMatrix(10.0f, 10000.0f);
            trackables.get(trackableUID).draw(projectionMatrix, modelViewMatrix);
        }
    }
}
  • 1
    Maybe you need to rotate the camera 90 degrees using the Matrix.setLookAtM(viewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, 1.0f, 0.0f, 0.0f). May also need to change projectionMatrix. – alexrnov Sep 08 '20 at 09:05
  • 1
    I already tried changing the modelview matrix and projection matrix in the dode section added above, but sadly that has not changed anything or it has resulted in no models being displayed at all. – Johannes Scheibe Sep 09 '20 at 16:16

0 Answers0