0

I have been trying to perform AruCo or Marker detection in unity using a customer camera that can provide a texture, this is not a regular camera so you cant use the regular webcamtexture that comes with the package, i have been trying everything that i can imagine but it just dont work for me. I have tested so many things so ill post my code for AruCo and maybe someone can help me fix this, the code dont show any errors but it also doesnt detect anything or at minimum i was expecting the rejected corners to be detected.

using UnityEngine;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
using System.Collections;
using System.Collections.Generic;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.Calib3dModule;
using OpenCVForUnity.ArucoModule;
using OpenCVForUnity.UnityUtils;
using Leap.Unity;
using Utils = OpenCVForUnity.UnityUtils.Utils;
using OpenCVForUnity.ImgprocModule;

namespace OpenCVForUnityExample
{
    /// <summary>
    /// ArUco Example
    /// An example of marker-based AR view and camera pose estimation using the aruco (ArUco Marker Detection) module.
    /// Referring to https://github.com/opencv/opencv_contrib/blob/master/modules/aruco/samples/detect_markers.cpp.
    /// http://docs.opencv.org/3.1.0/d5/dae/tutorial_aruco_detection.html
    /// </summary>
    public class ArUcoExample_LM : MonoBehaviour
    {
        /// <summary>
        /// The image texture.
        /// </summary>
        public Texture2D imgTexture;

        [Space (10)]

        /// <summary>
        /// The dictionary identifier.
        /// </summary>
        public ArUcoDictionary dictionaryId = ArUcoDictionary.DICT_6X6_250;

        /// <summary>
        /// The dictionary id dropdown.
        /// </summary>
        public Dropdown dictionaryIdDropdown;
        
        /// <summary>
        /// Determines if shows rejected corners.
        /// </summary>
        public bool showRejectedCorners = false;

        /// <summary>
        /// The shows rejected corners toggle.
        /// </summary>
        public Toggle showRejectedCornersToggle;
        
        /// <summary>
        /// Determines if applied the pose estimation.
        /// </summary>
        public bool applyEstimationPose = true;
        
        /// <summary>
        /// The length of the markers' side. Normally, unit is meters.
        /// </summary>
        public float markerLength = 0.1f;

        /// <summary>
        /// The AR game object.
        /// </summary>
        public GameObject arGameObject;
        
        /// <summary>
        /// The AR camera.
        /// </summary>
        public Camera arCamera;

        [Space (10)]

        /// <summary>
        /// Determines if request the AR camera moving.
        /// </summary>
        public bool shouldMoveARCamera = false;

        /// <summary>
        /// The rgb mat.
        /// </summary>
        Mat rgbMat;
        Mat ids;
        List<Mat> corners;
        List<Mat> rejectedCorners;
        Mat rvecs;
        Mat tvecs;
        Mat rotMat;
        Mat gray;
        Mat bw;

        DetectorParameters detectorParams;
        Dictionary dictionary;

        /// <summary>
        /// The texture.
        /// </summary>
        public Texture2D texture;

        public LM_Texture2DToMat_ZED LMD;

        Mat hierarchy;
        List<MatOfPoint> contours;

        // Use this for initialization
        void Start ()
        {
            ids = new Mat();
            corners = new List<Mat>();
            rejectedCorners = new List<Mat>();
            rvecs = new Mat();
            tvecs = new Mat();
            rotMat = new Mat(3, 3, CvType.CV_64FC1);

            detectorParams = DetectorParameters.create();
            dictionary = Aruco.getPredefinedDictionary((int)dictionaryId);

            ///////////////////
            ///
            hierarchy = new Mat();
            contours = new List<MatOfPoint>();
            /////////////////////////////////////


        }

        // Update is called once per frame
        void Update()
        {
            imgTexture = LMD.myCroppedTex2d;
            if (imgTexture != null)
            {

                //gameObject.GetComponent<Renderer>().material.mainTexture = imgTexture;  //THIS WAS JUST TO VALIDATE THE TEXTURE WAS COMMING PROPERLY.
                if (rgbMat == null)
                {
                    //rgbMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);
                    rgbMat = LMD.outputMAT;
                    rgbMat.convertTo(rgbMat, CvType.CV_8UC3);
                    Debug.LogWarning("MAT INFO: "+rgbMat+" COLS: "+rgbMat.cols());
                    texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGBA32, false);
                }
                
                dictionaryIdDropdown.value = (int)dictionaryId;
                showRejectedCornersToggle.isOn = showRejectedCorners;
                DetectMarkers();
            }
        }

        private void DetectMarkers ()
        {

            Utils.texture2DToMat (imgTexture, rgbMat);   // <-- does not work the image breaks from there.
                                                         //Debug.Log ("imgMat dst ToString " + rgbMat.ToString ());


            /////
            //rgbMat = LMD.outputMAT;
            //rgbMat.convertTo(rgbMat, CvType.CV_32SC2);
            //rgbMat.convertTo(rgbMat, CvType.CV_8UC1);
            //Debug.Log("imgMat dst ToString " + rgbMat.ToString());
            //Utils.matToTexture2D(rgbMat, texture);      // <-- if you disable line 121,297 and 295 and comment out this 3 lines, you get a blinking image.
            //gameObject.GetComponent<Renderer>().material.mainTexture = texture;
            /////



            //gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1);
            //Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width = rgbMat.width ();
            float height = rgbMat.height ();

            float imageSizeScale = 1.0f;
            float widthScale = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;
            if (widthScale < heightScale) {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            } else {
                Camera.main.orthographicSize = height / 2;
            }


            // set camera parameters.
            int max_d = (int)Mathf.Max (width, height);
            double fx = max_d;
            double fy = max_d;
            double cx = width / 2.0f;
            double cy = height / 2.0f;
            Mat camMatrix = new Mat (3, 3, CvType.CV_64FC1);
            camMatrix.put (0, 0, fx);
            camMatrix.put (0, 1, 0);
            camMatrix.put (0, 2, cx);
            camMatrix.put (1, 0, 0);
            camMatrix.put (1, 1, fy);
            camMatrix.put (1, 2, cy);
            camMatrix.put (2, 0, 0);
            camMatrix.put (2, 1, 0);
            camMatrix.put (2, 2, 1.0f);
            //Debug.Log ("camMatrix " + camMatrix.dump ());  //Dont care about this right now


            MatOfDouble distCoeffs = new MatOfDouble (0, 0, 0, 0);
            //Debug.Log ("distCoeffs " + distCoeffs.dump ()); //Dont care about this right now


            // calibration camera matrix values.
            Size imageSize = new Size (width * imageSizeScale, height * imageSizeScale);
            double apertureWidth = 0;
            double apertureHeight = 0;
            double[] fovx = new double[1];
            double[] fovy = new double[1];
            double[] focalLength = new double[1];
            Point principalPoint = new Point (0, 0);
            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            //Debug.Log ("imageSize " + imageSize.ToString ());
            //Debug.Log ("apertureWidth " + apertureWidth);
            //Debug.Log ("apertureHeight " + apertureHeight);
            //Debug.Log ("fovx " + fovx [0]);
            //Debug.Log ("fovy " + fovy [0]);
            //Debug.Log ("focalLength " + focalLength [0]);
            //Debug.Log ("principalPoint " + principalPoint.ToString ());
            //Debug.Log ("aspectratio " + aspectratio [0]);


            // To convert the difference of the FOV value of the OpenCV and Unity. 
            double fovXScale = (2.0 * Mathf.Atan ((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2 ((float)cx, (float)fx) + Mathf.Atan2 ((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan ((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2 ((float)cy, (float)fy) + Mathf.Atan2 ((float)(imageSize.height - cy), (float)fy));

            //Debug.Log ("fovXScale " + fovXScale); //Dont care about this right now
            //Debug.Log ("fovYScale " + fovYScale); //Dont care about this right now


            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale) {
                arCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            } else {
                arCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }
            // Display objects near the camera.
            arCamera.nearClipPlane = 0.01f;




            //Debug.Log("RGBMAT " + rgbMat);
            //Debug.Log("Dictionary " + dictionary);
            //Debug.Log("corners " + corners);
            //Debug.Log("ids " + ids);
            //Debug.Log("detectorParams " + detectorParams);
            //Debug.Log("rejectedCorners " + rejectedCorners);
            //Debug.Log("camMatrix " + camMatrix);
            //Debug.Log("distCoeffs " + distCoeffs);



            /////////////////////////////
            if (gray == null)
            {
                gray = new Mat();
            }
            
            Imgproc.cvtColor(rgbMat, gray, Imgproc.COLOR_BGR2GRAY);

            // Convert image to binary
            if (bw == null)
            {
                bw = new Mat();
            }
            
            Imgproc.threshold(gray, bw, 50, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU);

            /////////////////////////////
            ///
            // detect markers.  ////////////////////////////////////// if you enable it breaks
            Aruco.detectMarkers (gray, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);


            // if at least one marker detected
            if (ids.total () > 0) {
                Debug.Log("some ids");
                Aruco.drawDetectedMarkers (rgbMat, corners, ids, new Scalar (0, 255, 0));

                // estimate pose.
                if (applyEstimationPose) {
                    Debug.Log("this is progress");
                    Aruco.estimatePoseSingleMarkers (corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                    for (int i = 0; i < ids.total (); i++) {
                        using (Mat rvec = new Mat (rvecs, new OpenCVForUnity.CoreModule.Rect (0, i, 1, 1)))
                        using (Mat tvec = new Mat (tvecs, new OpenCVForUnity.CoreModule.Rect (0, i, 1, 1))) {
                            
                            // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                            Calib3d.drawFrameAxes(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
                        }
                        
                        // This example can display the ARObject on only first detected marker.
                        if (i == 0) {

                            // Get translation vector
                            double[] tvecArr = tvecs.get (i, 0);

                            // Get rotation vector
                            double[] rvecArr = rvecs.get (i, 0);
                            Mat rvec = new Mat (3, 1, CvType.CV_64FC1);
                            rvec.put (0, 0, rvecArr);

                            // Convert rotation vector to rotation matrix.
                            Calib3d.Rodrigues (rvec, rotMat);
                            double[] rotMatArr = new double[rotMat.total ()];
                            rotMat.get (0, 0, rotMatArr);

                            // Convert OpenCV camera extrinsic parameters to Unity Matrix4x4.
                            Matrix4x4 transformationM = new Matrix4x4 (); // from OpenCV
                            transformationM.SetRow (0, new Vector4 ((float)rotMatArr [0], (float)rotMatArr [1], (float)rotMatArr [2], (float)tvecArr [0]));
                            transformationM.SetRow (1, new Vector4 ((float)rotMatArr [3], (float)rotMatArr [4], (float)rotMatArr [5], (float)tvecArr [1]));
                            transformationM.SetRow (2, new Vector4 ((float)rotMatArr [6], (float)rotMatArr [7], (float)rotMatArr [8], (float)tvecArr [2]));
                            transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));
                            Debug.Log ("transformationM " + transformationM.ToString ());

                            Matrix4x4 invertYM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, -1, 1));
                            Debug.Log ("invertYM " + invertYM.ToString ());

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                            Matrix4x4 ARM = invertYM * transformationM * invertYM;

                            if (shouldMoveARCamera) {

                                ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse;

                                Debug.Log ("ARM " + ARM.ToString ());

                                ARUtils.SetTransformFromMatrix (arCamera.transform, ref ARM);

                            } else {

                                ARM = arCamera.transform.localToWorldMatrix * ARM;

                                Debug.Log ("ARM " + ARM.ToString ());

                                ARUtils.SetTransformFromMatrix (arGameObject.transform, ref ARM);
                            }
                        }
                    }
                }
            }

            if (showRejectedCorners && rejectedCorners.Count > 0)
                Debug.Log("Show Rejected Corners");
                Aruco.drawDetectedMarkers (rgbMat, rejectedCorners, new Mat (), new Scalar (255, 0, 0));

            Utils.matToTexture2D (rgbMat, texture);
            gameObject.GetComponent<Renderer>().material.mainTexture = texture;
        }

        private void ResetObjectTransform ()
        {
            // reset AR object transform.
            Matrix4x4 i = Matrix4x4.identity;
            ARUtils.SetTransformFromMatrix (arCamera.transform, ref i);
            ARUtils.SetTransformFromMatrix (arGameObject.transform, ref i);
        }

        /// <summary>
        /// Raises the destroy event.
        /// </summary>
        void OnDestroy ()
        {
            if (rgbMat != null)
                rgbMat.Dispose ();
        }

        /// <summary>
        /// Raises the back button click event.
        /// </summary>
        public void OnBackButtonClick ()
        {
            SceneManager.LoadScene ("OpenCVForUnityExample");
        }

        /// <summary>
        /// Raises the dictionary id dropdown value changed event.
        /// </summary>
        public void OnDictionaryIdDropdownValueChanged (int result)
        {
            if ((int)dictionaryId != result) {
                dictionaryId = (ArUcoDictionary)result;

                ResetObjectTransform ();

                DetectMarkers ();
            }
        }

        /// <summary>
        /// Raises the show rejected corners toggle value changed event.
        /// </summary>
        public void OnShowRejectedCornersToggleValueChanged ()
        {
            if (showRejectedCorners != showRejectedCornersToggle.isOn) {
                showRejectedCorners = showRejectedCornersToggle.isOn;

                ResetObjectTransform ();

                DetectMarkers ();
            }
        }

        public enum ArUcoDictionary
        {
            DICT_4X4_50 = Aruco.DICT_4X4_50,
            DICT_4X4_100 = Aruco.DICT_4X4_100,
            DICT_4X4_250 = Aruco.DICT_4X4_250,
            DICT_4X4_1000 = Aruco.DICT_4X4_1000,
            DICT_5X5_50 = Aruco.DICT_5X5_50,
            DICT_5X5_100 = Aruco.DICT_5X5_100,
            DICT_5X5_250 = Aruco.DICT_5X5_250,
            DICT_5X5_1000 = Aruco.DICT_5X5_1000,
            DICT_6X6_50 = Aruco.DICT_6X6_50,
            DICT_6X6_100 = Aruco.DICT_6X6_100,
            DICT_6X6_250 = Aruco.DICT_6X6_250,
            DICT_6X6_1000 = Aruco.DICT_6X6_1000,
            DICT_7X7_50 = Aruco.DICT_7X7_50,
            DICT_7X7_100 = Aruco.DICT_7X7_100,
            DICT_7X7_250 = Aruco.DICT_7X7_250,
            DICT_7X7_1000 = Aruco.DICT_7X7_1000,
            DICT_ARUCO_ORIGINAL = Aruco.DICT_ARUCO_ORIGINAL,
        }


    }



}
Chop Labalagun
  • 592
  • 1
  • 6
  • 19

1 Answers1

1

The problem with the code was that the images were coming as Alpha8 texture and OpenCV works with RGB texture, so when i was converting to RGB the A part was been strip from the image which was pretty much the only content in it.

The solution was posted on this other thread: Solution to change format from Alpha8 to RGBA texture.

Chop Labalagun
  • 592
  • 1
  • 6
  • 19