/// <summary>
        /// Creates pose data dictionary.
        /// </summary>
        /// <param name="markerCount">Marker count.</param>
        /// <param name="ids">ids.</param>
        /// <param name="rvecs">Rvecs.</param>
        /// <param name="tvecs">Tvecs.</param>
        /// <returns>PoseData dictionary.</returns>
        public static Dictionary <int, PoseData> CreatePoseDataDict(int markerCount, int[] ids, double[] rvecs, double[] tvecs)
        {
            Dictionary <int, PoseData> dict = new Dictionary <int, PoseData>();

            if (markerCount == 0)
            {
                return(dict);
            }

            Vector3 rvec = new Vector3();

            for (int i = 0; i < markerCount; i++)
            {
                PoseData data = new PoseData();
                data.pos.Set((float)tvecs[i * 3], (float)tvecs[i * 3 + 1], (float)tvecs[i * 3 + 2]);

                rvec.Set((float)rvecs[i * 3], (float)rvecs[i * 3 + 1], (float)rvecs[i * 3 + 2]);
                float theta = rvec.magnitude;
                rvec.Normalize();
                data.rot = Quaternion.AngleAxis(theta * Mathf.Rad2Deg, rvec);

                dict[ids[i]] = data;
            }
            return(dict);
        }
        /// <summary>
        /// Performs a lowpass check on the position and rotation of each marker in newDict, comparing them to those in oldDict.
        /// </summary>
        /// <param name="oldDict">Old dictionary.</param>
        /// <param name="newDict">New dictionary.</param>
        /// <param name="posThreshold">Positon threshold.</param>
        /// <param name="rotThreshold">Rotation threshold.</param>
        public static void LowpassPoseDataDict(Dictionary <int, PoseData> oldDict, Dictionary <int, PoseData> newDict, float posThreshold, float rotThreshold)
        {
            posThreshold *= posThreshold;

            List <int> keys = new List <int>(newDict.Keys);

            foreach (int key in keys)
            {
                if (!oldDict.ContainsKey(key))
                {
                    continue;
                }

                PoseData oldPose = oldDict[key];
                PoseData newPose = newDict[key];

                float posDiff = (newPose.pos - oldPose.pos).sqrMagnitude;
                float rotDiff = Quaternion.Angle(newPose.rot, oldPose.rot);

                if (posDiff < posThreshold)
                {
                    newPose.pos = oldPose.pos;
                }

                if (rotDiff < rotThreshold)
                {
                    newPose.rot = oldPose.rot;
                }

                newDict[key] = newPose;
            }
        }
        /// <summary>
        /// Convertes rvec and tvec value to PoseData.
        /// </summary>
        /// <param name="tvec">Rvec.</param>
        /// <param name="tvec">Tvec.</param>
        /// <returns>PoseData.</returns>
        public static PoseData ConvertRvecTvecToPoseData(double[] rvec, double[] tvec)
        {
            PoseData data = new PoseData();

            data.pos = ConvertTvecToPos(tvec);
            data.rot = ConvertRvecToRot(rvec);

            return(data);
        }
        private void DetectARUcoMarker()
        {
            Imgproc.cvtColor(downScaleFrameMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

            // Detect markers and estimate Pose
            Aruco.detectMarkers(grayMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

            if (applyEstimationPose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                for (int i = 0; i < ids.total(); i++)
                {
                    //This example can display ARObject on only first detected marker.
                    if (i == 0)
                    {
                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvecs.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvecs.get(0, 0, tvecArr);
                        tvecArr[2] /= imageOptimizationHelper.downscaleRatio;
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Changes in pos/rot below these thresholds are ignored.
                        if (enableLowPassFilter)
                        {
                            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                        }
                        oldPoseData = poseData;

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                        // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                        ARM = invertYM * transformationM * invertYM;

                        // Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
                        ARM = ARM * invertYM * invertZM;

                        hasUpdatedARTransformMatrix = true;

                        break;
                    }
                }
            }
        }
        /// <summary>
        /// Performs a lowpass check on the position and rotation in newPose, comparing them to oldPose.
        /// </summary>
        /// <param name="oldPose">Old PoseData.</param>
        /// <param name="newPose">New PoseData.</param>
        /// <param name="posThreshold">Positon threshold.</param>
        /// <param name="rotThreshold">Rotation threshold.</param>
        public static void LowpassPoseData(ref PoseData oldPose, ref PoseData newPose, float posThreshold, float rotThreshold)
        {
            posThreshold *= posThreshold;

            float posDiff = (newPose.pos - oldPose.pos).sqrMagnitude;
            float rotDiff = Quaternion.Angle(newPose.rot, oldPose.rot);

            if (posDiff < posThreshold)
            {
                newPose.pos = oldPose.pos;
            }

            if (rotDiff < rotThreshold)
            {
                newPose.rot = oldPose.rot;
            }
        }
Esempio n. 6
0
        private void DetectARUcoMarker()
        {
            // Detect markers and estimate Pose
            Aruco.detectMarkers(downScaleMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

            if (applyEstimationPose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                for (int i = 0; i < ids.total(); i++)
                {
                    //This example can display ARObject on only first detected marker.
                    if (i == 0)
                    {
                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvecs.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvecs.get(0, 0, tvecArr);
                        tvecArr[2] /= DOWNSCALE_RATIO;
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                        // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                        ARM = invertYM * transformationM * invertYM;

                        // Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
                        ARM = ARM * invertYM * invertZM;

                        hasUpdatedARTransformMatrix = true;

                        break;
                    }
                }
            }
        }
Esempio n. 7
0
        public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix)
        {
            downScaleFrameMat = imageOptimizationHelper.GetDownScaleMat(bgraMat);

            if (enableDetection)
            {
                Imgproc.cvtColor(downScaleFrameMat, grayMat, Imgproc.COLOR_BGRA2GRAY);

                // Detect markers and estimate Pose
                Aruco.detectMarkers(grayMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

                if (applyEstimationPose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                    for (int i = 0; i < ids.total(); i++)
                    {
                        //This example can display ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // Convert to unity pose data.
                            double[] rvecArr = new double[3];
                            rvecs.get(0, 0, rvecArr);
                            double[] tvecArr = new double[3];
                            tvecs.get(0, 0, tvecArr);
                            tvecArr[2] /= imageOptimizationHelper.downscaleRatio;
                            PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                            // Changes in pos/rot below these thresholds are ignored.
                            if (enableLowPassFilter)
                            {
                                ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                            }
                            oldPoseData = poseData;

                            // Create transform matrix.
                            transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                            lock (sync){
                                // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                ARM = invertYM * transformationM;

                                // Apply Z-axis inverted matrix.
                                ARM = ARM * invertZM;
                            }

                            hasUpdatedARTransformMatrix = true;

                            break;
                        }
                    }
                }
            }

            Mat rgbMat4preview = null;

            if (displayCameraPreview)
            {
                rgbMat4preview = new Mat();
                Imgproc.cvtColor(downScaleFrameMat, rgbMat4preview, Imgproc.COLOR_BGRA2RGB);

                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0));

                    if (applyEstimationPose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.Rect(0, i, 1, 1)))
                                using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.Rect(0, i, 1, 1))) {
                                    // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                                    Aruco.drawAxis(rgbMat4preview, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
                                }
                        }
                    }
                }
            }


            UnityEngine.WSA.Application.InvokeOnAppThread(() => {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                if (displayCameraPreview && rgbMat4preview != null)
                {
                    OpenCVForUnity.Utils.fastMatToTexture2D(rgbMat4preview, texture);
                }

                if (applyEstimationPose)
                {
                    if (hasUpdatedARTransformMatrix)
                    {
                        hasUpdatedARTransformMatrix = false;

                        lock (sync){
                            // Apply camera transform matrix.
                            ARM = cameraToWorldMatrix * invertZM * ARM;

                            ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
                        }
                    }
                }

                bgraMat.Dispose();
                if (rgbMat4preview != null)
                {
                    rgbMat4preview.Dispose();
                }
            }, false);
        }
Esempio n. 8
0
        public void OnFrameMatAcquired(Mat grayMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix, CameraIntrinsics cameraIntrinsics)
        {
            isDetectingInFrameArrivedThread = true;

            DebugUtils.VideoTick();

            Mat   downScaleMat = null;
            float DOWNSCALE_RATIO;

            if (enableDownScale)
            {
                downScaleMat    = imageOptimizationHelper.GetDownScaleMat(grayMat);
                DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio;
            }
            else
            {
                downScaleMat    = grayMat;
                DOWNSCALE_RATIO = 1.0f;
            }

            Mat         camMatrix  = null;
            MatOfDouble distCoeffs = null;

            if (useStoredCameraParameters)
            {
                camMatrix  = this.camMatrix;
                distCoeffs = this.distCoeffs;
            }
            else
            {
                camMatrix  = CreateCameraMatrix(cameraIntrinsics.FocalLengthX, cameraIntrinsics.FocalLengthY, cameraIntrinsics.PrincipalPointX / DOWNSCALE_RATIO, cameraIntrinsics.PrincipalPointY / DOWNSCALE_RATIO);
                distCoeffs = new MatOfDouble(cameraIntrinsics.RadialDistK1, cameraIntrinsics.RadialDistK2, cameraIntrinsics.RadialDistK3, cameraIntrinsics.TangentialDistP1, cameraIntrinsics.TangentialDistP2);
            }

            if (enableDetection)
            {
                // Detect markers and estimate Pose
                Aruco.detectMarkers(downScaleMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

                if (applyEstimationPose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                    for (int i = 0; i < ids.total(); i++)
                    {
                        //This example can display ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // Convert to unity pose data.
                            double[] rvecArr = new double[3];
                            rvecs.get(0, 0, rvecArr);
                            double[] tvecArr = new double[3];
                            tvecs.get(0, 0, tvecArr);
                            tvecArr[2] /= DOWNSCALE_RATIO;
                            PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                            // Create transform matrix.
                            transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                            lock (sync)
                            {
                                // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                ARM = invertYM * transformationM;

                                // Apply Z-axis inverted matrix.
                                ARM = ARM * invertZM;
                            }

                            hasUpdatedARTransformMatrix = true;

                            break;
                        }
                    }
                }
            }

            Mat rgbMat4preview = null;

            if (displayCameraPreview)
            {
                rgbMat4preview = new Mat();
                Imgproc.cvtColor(downScaleMat, rgbMat4preview, Imgproc.COLOR_GRAY2RGB);

                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0));

                    if (applyEstimationPose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                                using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                                {
                                    // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                                    Calib3d.drawFrameAxes(rgbMat4preview, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
                                }
                        }
                    }
                }
            }

            DebugUtils.TrackTick();

            Enqueue(() =>
            {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                if (displayCameraPreview && rgbMat4preview != null)
                {
                    Utils.fastMatToTexture2D(rgbMat4preview, texture);
                    rgbMat4preview.Dispose();
                }

                if (applyEstimationPose)
                {
                    if (hasUpdatedARTransformMatrix)
                    {
                        hasUpdatedARTransformMatrix = false;

                        lock (sync)
                        {
                            // Apply camera transform matrix.
                            ARM = cameraToWorldMatrix * invertZM * ARM;

                            if (enableLerpFilter)
                            {
                                arGameObject.SetMatrix4x4(ARM);
                            }
                            else
                            {
                                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
                            }
                        }
                    }
                }

                grayMat.Dispose();
            });

            isDetectingInFrameArrivedThread = false;
        }