private void UpdateARObjectTransform(Mat rvec, Mat tvec)
        {
            // Convert to unity pose data.
            double[] rvecArr = new double[3];
            rvec.get(0, 0, rvecArr);
            double[] tvecArr = new double[3];
            tvec.get(0, 0, tvecArr);
            PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

            // Changes in pos/rot below these thresholds are ignored.
            if (enableLowPassFilter)
            {
                ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
            }
            oldPoseData = poseData;

            // Convert to transform matrix.
            ARM = ARUtils.ConvertPoseDataToMatrix(ref poseData, true, true);

            if (shouldMoveARCamera)
            {
                ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse;

                ARUtils.SetTransformFromMatrix(arCamera.transform, ref ARM);
            }
            else
            {
                ARM = arCamera.transform.localToWorldMatrix * ARM;

                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
            }
        }
        private IEnumerator Wait(float delayTime)
        {
            yield return(new WaitForSeconds(delayTime));

            ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
            deactivateCoroutine = null;
        }
Пример #3
0
        private void ResetObjectTransform()
        {
            // reset AR object transform.
            Matrix4x4 i = Matrix4x4.identity;

            ARUtils.SetTransformFromMatrix(arCamera.transform, ref i);
            ARUtils.SetTransformFromMatrix(arGameObject.transform, ref i);
        }
Пример #4
0
        private void UpdateARObjectTransform(Mat rvec, Mat tvec)
        {
            // Position
            double[] tvecArr = new double[3];
            tvec.get(0, 0, tvecArr);

            // Rotation
            Calib3d.Rodrigues(rvec, rotMat);

            double[] rotMatArr = new double[rotMat.total()];
            rotMat.get(0, 0, rotMatArr);

            transformationM.SetRow(0, new Vector4((float)rotMatArr [0], (float)rotMatArr [1], (float)rotMatArr [2], (float)tvecArr [0]));
            transformationM.SetRow(1, new Vector4((float)rotMatArr [3], (float)rotMatArr [4], (float)rotMatArr [5], (float)tvecArr [1]));
            transformationM.SetRow(2, new Vector4((float)rotMatArr [6], (float)rotMatArr [7], (float)rotMatArr [8], (float)tvecArr [2]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
            ARM = invertYM * transformationM;

            // Apply Z axis inverted matrix.
            ARM = ARM * invertZM;

            if (shouldMoveARCamera)
            {
                ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse;

                ARUtils.SetTransformFromMatrix(arCamera.transform, ref ARM);
            }
            else
            {
                ARM = arCamera.transform.localToWorldMatrix * ARM;

                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
            }
        }
Пример #5
0
        private void DetectMarkers()
        {
            Utils.texture2DToMat(imgTexture, rgbMat);
            Debug.Log("imgMat dst ToString " + rgbMat.ToString());

            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = rgbMat.width();
            float height = rgbMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            // set camera parameters.
            int    max_d     = (int)Mathf.Max(width, height);
            double fx        = max_d;
            double fy        = max_d;
            double cx        = width / 2.0f;
            double cy        = height / 2.0f;
            Mat    camMatrix = new Mat(3, 3, CvType.CV_64FC1);

            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);

            Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera matrix values.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                arCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                arCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }
            // Display objects near the camera.
            arCamera.nearClipPlane = 0.01f;



            Mat        ids             = new Mat();
            List <Mat> corners         = new List <Mat> ();
            List <Mat> rejectedCorners = new List <Mat> ();
            Mat        rvecs           = new Mat();
            Mat        tvecs           = new Mat();
            Mat        rotMat          = new Mat(3, 3, CvType.CV_64FC1);

            DetectorParameters detectorParams = DetectorParameters.create();
            Dictionary         dictionary     = Aruco.getPredefinedDictionary((int)dictionaryId);


            // detect markers.
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

            // if at least one marker detected
            if (ids.total() > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0));

                // estimate pose.
                if (applyEstimationPose)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                    for (int i = 0; i < ids.total(); i++)
                    {
                        using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                            using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) {
                                // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                                Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
                            }

                        // This example can display the ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // Get translation vector
                            double[] tvecArr = tvecs.get(i, 0);

                            // Get rotation vector
                            double[] rvecArr = rvecs.get(i, 0);
                            Mat      rvec    = new Mat(3, 1, CvType.CV_64FC1);
                            rvec.put(0, 0, rvecArr);

                            // Convert rotation vector to rotation matrix.
                            Calib3d.Rodrigues(rvec, rotMat);
                            double[] rotMatArr = new double[rotMat.total()];
                            rotMat.get(0, 0, rotMatArr);

                            // Convert OpenCV camera extrinsic parameters to Unity Matrix4x4.
                            Matrix4x4 transformationM = new Matrix4x4();  // from OpenCV
                            transformationM.SetRow(0, new Vector4((float)rotMatArr [0], (float)rotMatArr [1], (float)rotMatArr [2], (float)tvecArr [0]));
                            transformationM.SetRow(1, new Vector4((float)rotMatArr [3], (float)rotMatArr [4], (float)rotMatArr [5], (float)tvecArr [1]));
                            transformationM.SetRow(2, new Vector4((float)rotMatArr [6], (float)rotMatArr [7], (float)rotMatArr [8], (float)tvecArr [2]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
                            Debug.Log("transformationM " + transformationM.ToString());

                            Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
                            Debug.Log("invertYM " + invertYM.ToString());

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                            Matrix4x4 ARM = invertYM * transformationM * invertYM;

                            if (shouldMoveARCamera)
                            {
                                ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(arCamera.transform, ref ARM);
                            }
                            else
                            {
                                ARM = arCamera.transform.localToWorldMatrix * ARM;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
                            }
                        }
                    }
                }
            }

            if (showRejectedCorners && rejectedCorners.Count > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, rejectedCorners, new Mat(), new Scalar(255, 0, 0));
            }

            Utils.matToTexture2D(rgbMat, texture);
        }
Пример #6
0
        // Use this for initialization
        void Start()
        {
            Mat rgbMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Utils.texture2DToMat(imgTexture, rgbMat);
            Debug.Log("imgMat dst ToString " + rgbMat.ToString());


            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = rgbMat.width();
            float height = rgbMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            // set cameraparam.
            int    max_d     = (int)Mathf.Max(width, height);
            double fx        = max_d;
            double fy        = max_d;
            double cx        = width / 2.0f;
            double cy        = height / 2.0f;
            Mat    camMatrix = new Mat(3, 3, CvType.CV_64FC1);

            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);

            Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                ARCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }



            Mat        ids      = new Mat();
            List <Mat> corners  = new List <Mat> ();
            List <Mat> rejected = new List <Mat> ();
            Mat        rvecs    = new Mat();
            Mat        tvecs    = new Mat();
            Mat        rotMat   = new Mat(3, 3, CvType.CV_64FC1);

            DetectorParameters detectorParams = DetectorParameters.create();
            Dictionary         dictionary     = Aruco.getPredefinedDictionary(dictionaryId);


            // detect markers.
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);

            // estimate pose.
            if (applyEstimationPose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
            }

            if (ids.total() > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                if (applyEstimationPose)
                {
                    for (int i = 0; i < ids.total(); i++)
                    {
//                        Debug.Log ("ids.dump() " + ids.dump ());

                        Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                        // This example can display ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // position
                            double[] tvec = tvecs.get(i, 0);

                            // rotation
                            double[] rv   = rvecs.get(i, 0);
                            Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                            rvec.put(0, 0, rv[0]);
                            rvec.put(1, 0, rv[1]);
                            rvec.put(2, 0, rv[2]);
                            Calib3d.Rodrigues(rvec, rotMat);

                            Matrix4x4 transformationM = new Matrix4x4();  // from OpenCV
                            transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0]));
                            transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1]));
                            transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec [2]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
                            Debug.Log("transformationM " + transformationM.ToString());

                            Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
                            Debug.Log("invertZM " + invertZM.ToString());

                            Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
                            Debug.Log("invertYM " + invertYM.ToString());

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            Matrix4x4 ARM = invertYM * transformationM;

                            // Apply Z axis inverted matrix.
                            ARM = ARM * invertZM;

                            if (shouldMoveARCamera)
                            {
                                ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * ARM;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                            }
                        }
                    }
                }
            }

            if (showRejected && rejected.Count > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
            }


            Texture2D texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(rgbMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
Пример #7
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                // detect markers.
                Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);

                // estimate pose.
                if (applyEstimationPose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
                }

                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                    if (applyEstimationPose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                            // This example can display ARObject on only first detected marker.
                            if (i == 0)
                            {
                                // position
                                double[] tvec = tvecs.get(i, 0);

                                // rotation
                                double[] rv   = rvecs.get(i, 0);
                                Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                                rvec.put(0, 0, rv [0]);
                                rvec.put(1, 0, rv [1]);
                                rvec.put(2, 0, rv [2]);
                                Calib3d.Rodrigues(rvec, rotMat);

                                transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0]));
                                transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1]));
                                transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec [2]));
                                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                ARM = invertYM * transformationM;

                                // Apply Z axis inverted matrix.
                                ARM = ARM * invertZM;

                                if (shouldMoveARCamera)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;

                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                }
                                else
                                {
                                    ARM = ARCamera.transform.localToWorldMatrix * ARM;

                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                }
                            }
                        }
                    }
                }

                if (showRejected && rejected.Count > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
                }


                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
Пример #8
0
        void OnRenderObject()
        {
            if (pointCloudVisualizer == null)
            {
                return;
            }

            if (vertices == null || uintColorData == null)
            {
                return;
            }


            pointCloudBuffer.SetData(vertices);

            colorBuffer.SetData(uintColorData);

            pointCloudVisualizer.SetBuffer("_PointCloudData", pointCloudBuffer);
            pointCloudVisualizer.SetBuffer("_ColorData", colorBuffer);



            //Aruco
            if (applyAruco)
            {
                //transformation Matrix is static
                if (isStatic)
                {
                    //using earlier frames, decide transformationM
                    if (!matrixCofigured)
                    {
                        while (frameCount > 0)
                        {
                            imgTexture.LoadRawTextureData(byteColorData);

                            Utils.texture2DToMat(imgTexture, rgbMat);

                            //Upside down
                            Core.flip(rgbMat, rgbMat, 0);
                            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected, camMatrix, distCoeffs);


                            if (ids.total() == 1)
                            {
                                Debug.Log("detected");

                                frameCount--;
                            }
                        }

                        Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                        // position
                        tvec = tvecs.get(0, 0);

                        // rotation
                        rvec = new Mat(rvecs, range, range);
                        Calib3d.Rodrigues(rvec, rotMat);

                        //from marker coordinate(opencv) to camera coordinate(opencv)
                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec[0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec[1]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec[2]));
                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));


                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        // from camera coordinate(opencv) to camera coordinate(unity)
                        transformationM = invertYM * transformationM;

                        // Apply Z axis inverted matrix.
                        // from marker coordinate(Unity) to camera coordinate(Unity)
                        transformationM = transformationM * invertZM;

                        //from camera coordinate(unity) to marker coordinate(unity)
                        //rotate 90 degrees around the x-axis(alignM)
                        //from marker coordinate(unity) to world coordinate(unity)
                        transformationM = markerOrigin.transform.localToWorldMatrix * alignM * transformationM.inverse;

                        //transformationM: the matrix which transforms camera coordinate(unity) into world coordinate(unity)
                        //move RealSense model
                        ARUtils.SetTransformFromMatrix(viewCamera.transform, ref transformationM);

                        matrixCofigured = true;

                        Debug.Log("Transformation Matrix is configured");

                        if (drawMesh)
                        {
                            meshDrawer.draw(corners, vertices, ref transformationM, width, markerLength);
                        }
                    }
                }
                else
                {
                    imgTexture.LoadRawTextureData(byteColorData);

                    Utils.texture2DToMat(imgTexture, rgbMat);

                    Core.flip(rgbMat, rgbMat, 0);
                    Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected, camMatrix, distCoeffs);


                    if (ids.total() > 0)
                    {
                        Debug.Log("detected: " + ids.total());

                        Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                        // position
                        tvec = tvecs.get(0, 0);

                        // rotation
                        rvec = new Mat(rvecs, range, range);
                        Calib3d.Rodrigues(rvec, rotMat);

                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec[0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec[1]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec[2]));
                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));


                        transformationM = invertYM * transformationM;

                        transformationM = transformationM * invertZM;

                        transformationM = markerOrigin.transform.localToWorldMatrix * alignM * transformationM.inverse;

                        ARUtils.SetTransformFromMatrix(viewCamera.transform, ref transformationM);
                    }
                }
            }
            else
            {
                transformationM = Matrix4x4.TRS(viewCamera.transform.position, viewCamera.transform.rotation, viewCamera.transform.lossyScale);
            }

            pointCloudVisualizer.SetMatrix("_transformationM", transformationM);

            trs = Matrix4x4.TRS(transform.position, transform.rotation, transform.lossyScale);
            pointCloudVisualizer.SetMatrix("_trs", trs);

            pointCloudVisualizer.SetPass(0);
            Graphics.DrawProcedural(MeshTopology.Points, dataLength);
        }
        private void HandPoseEstimationProcess(Mat rgbaMat)
        {
            //Imgproc.blur(mRgba, mRgba, new Size(5,5));
            Imgproc.GaussianBlur(rgbaMat, rgbaMat, new Size(3, 3), 1, 1);
            //Imgproc.medianBlur(mRgba, mRgba, 3);

            if (!isColorSelected)
            {
                return;
            }

            List <MatOfPoint> contours = detector.GetContours();

            detector.Process(rgbaMat);

            //Debug.Log ("Contours count: " + contours.Count);

            if (contours.Count <= 0)
            {
                return;
            }

            RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[0].toArray()));

            double boundWidth  = rect.size.width;
            double boundHeight = rect.size.height;
            int    boundPos    = 0;

            for (int i = 1; i < contours.Count; i++)
            {
                rect = Imgproc.minAreaRect(new MatOfPoint2f(contours[i].toArray()));
                if (rect.size.width * rect.size.height > boundWidth * boundHeight)
                {
                    boundWidth  = rect.size.width;
                    boundHeight = rect.size.height;
                    boundPos    = i;
                }
            }

            MatOfPoint contour = contours[boundPos];

            OpenCVForUnity.CoreModule.Rect boundRect = Imgproc.boundingRect(new MatOfPoint(contour.toArray()));

            Imgproc.rectangle(rgbaMat, boundRect.tl(), boundRect.br(), CONTOUR_COLOR_WHITE, 2, 8, 0);

            //            Debug.Log (
            //                " Row start [" +
            //(int)boundRect.tl ().y + "] row end [" +
            //                    (int)boundRect.br ().y + "] Col start [" +
            //                    (int)boundRect.tl ().x + "] Col end [" +
            //                    (int)boundRect.br ().x + "]");

            Point bottomLeft  = new Point(boundRect.x, boundRect.y + boundRect.height);
            Point topLeft     = new Point(boundRect.x, boundRect.y);
            Point bottomRight = new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height);
            Point topRight    = new Point(boundRect.x + boundRect.width, boundRect.y);

            rectPoints = new MatOfPoint2f(new Point(boundRect.x, boundRect.y),                                      //topleft
                                          new Point(boundRect.x + boundRect.width, boundRect.y),                    //Top Right
                                          new Point(boundRect.x + boundRect.width, boundRect.y + boundRect.height), //Bottom Right
                                          new Point(boundRect.x, boundRect.y + boundRect.height)                    //Bottom Left
                                          );

            //double a = boundRect.br ().y - boundRect.tl ().y;
            //a = a * 0.7;
            //a = boundRect.tl ().y + a;

            //Debug.Log (" A [" + a + "] br y - tl y = [" + (boundRect.br ().y - boundRect.tl ().y) + "]");

            //Imgproc.rectangle (rgbaMat, boundRect.tl (), new Point (boundRect.br ().x, a), CONTOUR_COLOR, 2, 8, 0);

            List <Point3> m_markerCorners3dList = new List <Point3>();

            m_markerCorners3dList.Add(new Point3(-0.5f, -0.5f, 0)); //Top, Left (A)
            m_markerCorners3dList.Add(new Point3(+0.5f, -0.5f, 0)); //Top, Right (B)
            m_markerCorners3dList.Add(new Point3(+0.5f, +0.5f, 0)); //Bottom, Right (C)
            m_markerCorners3dList.Add(new Point3(-0.5f, +0.5f, 0)); //Bottom, Left (D)
            m_markerCorners3d.fromList(m_markerCorners3dList);

            //estimate pose
            Mat Rvec = new Mat();
            Mat Tvec = new Mat();
            Mat raux = new Mat();
            Mat taux = new Mat();

            Calib3d.solvePnP(m_markerCorners3d, rectPoints, camMatrix, distCoeff, raux, taux);

            raux.convertTo(Rvec, CvType.CV_32F);
            taux.convertTo(Tvec, CvType.CV_32F);

            rotMat = new Mat(3, 3, CvType.CV_64FC1);
            Calib3d.Rodrigues(Rvec, rotMat);

            transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)Tvec.get(0, 0)[0]));
            transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)Tvec.get(1, 0)[0]));
            transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)Tvec.get(2, 0)[0]));
            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

            //Debug.Log ("transformationM " + transformationM.ToString ());

            Rvec.Dispose();
            Tvec.Dispose();
            raux.Dispose();
            taux.Dispose();
            rotMat.Dispose();

            ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
            //Debug.Log("arM " + ARM.ToString());

            if (ARGameObject != null)
            {
                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                if (deactivateCoroutine == null)
                {
                    deactivateCoroutine = StartCoroutine(Wait(10.0f));
                }
                ARGameObject.SetActive(true);
            }

            //end pose estimation

            MatOfPoint2f pointMat = new MatOfPoint2f();

            Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), pointMat, 3, true);
            contour = new MatOfPoint(pointMat.toArray());

            MatOfInt  hull         = new MatOfInt();
            MatOfInt4 convexDefect = new MatOfInt4();

            Imgproc.convexHull(new MatOfPoint(contour.toArray()), hull);

            if (hull.toArray().Length < 3)
            {
                return;
            }

            Imgproc.convexityDefects(new MatOfPoint(contour.toArray()), hull, convexDefect);

            List <MatOfPoint> hullPoints = new List <MatOfPoint>();
            List <Point>      listPo     = new List <Point>();

            for (int j = 0; j < hull.toList().Count; j++)
            {
                listPo.Add(contour.toList()[hull.toList()[j]]);
            }

            MatOfPoint e = new MatOfPoint();

            e.fromList(listPo);
            hullPoints.Add(e);

            List <Point> listPoDefect = new List <Point>();

            if (convexDefect.rows() > 0)
            {
                List <int>   convexDefectList = convexDefect.toList();
                List <Point> contourList      = contour.toList();
                for (int j = 0; j < convexDefectList.Count; j = j + 4)
                {
                    Point farPoint = contourList[convexDefectList[j + 2]];
                    int   depth    = convexDefectList[j + 3];
                    //if (depth > threasholdSlider.value && farPoint.y < a)
                    //{
                    //    listPoDefect.Add(contourList[convexDefectList[j + 2]]);
                    //}
                    //Debug.Log ("convexDefectList [" + j + "] " + convexDefectList [j + 3]);
                }
            }


            Debug.Log("hull: " + hull.toList());
            if (convexDefect.rows() > 0)
            {
                Debug.Log("defects: " + convexDefect.toList());
            }

            //use these contours to do heart detection
            Imgproc.drawContours(rgbaMat, hullPoints, -1, CONTOUR_COLOR, 3);

            int defectsTotal = (int)convexDefect.total();

            Debug.Log("Defect total " + defectsTotal);

            this.numberOfFingers = listPoDefect.Count;
            if (this.numberOfFingers > 5)
            {
                this.numberOfFingers = 5;
            }

            Debug.Log("numberOfFingers " + numberOfFingers);

            Imgproc.putText(rgbaMat, "" + numberOfFingers, new Point(rgbaMat.cols() / 2, rgbaMat.rows() / 2), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 255, 255, 255), 6, Imgproc.LINE_AA, false);
            numberOfFingersText.text = numberOfFingers.ToString();


            foreach (Point p in listPoDefect)
            {
                Imgproc.circle(rgbaMat, p, 6, new Scalar(255, 0, 255, 255), -1);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                // detect markers and estimate pose
                Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);
                //          Aruco.detectMarkers (imgMat, dictionary, corners, ids);
                if (estimatePose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
                }


                // draw results
                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                    if (estimatePose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                            //This example can display ARObject on only first detected marker.
                            if (i == 0)
                            {
                                // position
                                double[] tvec = tvecs.get(i, 0);
                                Vector4  pos  = new Vector4((float)tvec [0], (float)tvec [1], (float)tvec [2], 0); // from OpenCV
                                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                ARGameObject.transform.localPosition = new Vector3(pos.x, -pos.y, pos.z);


                                // rotation
                                double[] rv   = rvecs.get(i, 0);
                                Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                                rvec.put(0, 0, rv[0]);
                                rvec.put(1, 0, rv[1]);
                                rvec.put(2, 0, rv[2]);

                                Calib3d.Rodrigues(rvec, rotMat);

                                Vector3 forward = new Vector3((float)rotMat.get(0, 2) [0], (float)rotMat.get(1, 2) [0], (float)rotMat.get(2, 2) [0]); // from OpenCV
                                Vector3 up      = new Vector3((float)rotMat.get(0, 1) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(2, 1) [0]); // from OpenCV
                                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                Quaternion rot = Quaternion.LookRotation(new Vector3(forward.x, -forward.y, forward.z), new Vector3(up.x, -up.y, up.z));
                                ARGameObject.transform.localRotation = rot;


                                // Apply Z axis inverted matrix.
                                invertZM        = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
                                transformationM = ARGameObject.transform.localToWorldMatrix * invertZM;

                                // Apply camera transform matrix.
                                transformationM = ARCamera.transform.localToWorldMatrix * transformationM;

                                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref transformationM);
                            }
                        }
                    }
                }

                if (showRejected && rejected.Count > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
                }


                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }