示例#1
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();
                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                foreach (MarkerSettings settings in markerSettingsList)
                {
                    settings.setAllARGameObjectsDisable();
                }

                Aruco.detectMarkers(rgbMat, dictionaryAruco, cornersAruco, idsAruco);

                if (idsAruco.total() > 0)
                {
                    SetMarker();
                }
                else
                {
                    SetMarkerLess();
                }

                Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
示例#2
0
    public void MarkerProcessing(Mat MatImg)
    {
        if (MatImg != null)
        {
            Imgproc.cvtColor(MatImg, MatImg, Imgproc.COLOR_RGBA2RGB);
            Aruco.detectMarkers(MatImg, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

            if (ids.total() > 0)
            {
                OpenCVForUnity.UtilsModule.Converters.Mat_to_vector_int(ids, list_ids);

                // draw markers.
                Aruco.drawDetectedMarkers(MatImg, corners, ids, new Scalar(0, 255, 0));

                // estimate pose.
                if (applyEstimationPose)
                {
                    EstimatePoseCanonicalMarker(MatImg);
                }

                if (showRejectedCorners && rejectedCorners.Count > 0)
                {
                    Aruco.drawDetectedMarkers(MatImg, rejectedCorners, new Mat(), new Scalar(255, 0, 0));
                }
            }
        }
    }
示例#3
0
        void ArucoDetection()
        {
            // Detect ArUco markers
            Dictionary dict = Aruco.getPredefinedDictionary(Aruco.DICT_4X4_1000);

            Aruco.detectMarkers(cached_initMat, dict, corners, ids);
            Aruco.drawDetectedMarkers(cached_initMat, corners, ids);
            // Debug.Log("AD - 93: Markers Detected");
            // Debug.LogFormat("Corners: {0}", corners.Count);

            // Get desired corner of marker
            Point[] src_point_array = new Point[POINT_COUNT];
            for (int i = 0; i < corners.Count; i++)
            {
                int aruco_id = (int)(ids.get(i, 0)[0]);
                int src_i    = arucoTosrc(aruco_id);
                int corner_i = aruco_id % 4;

                // Debug.LogFormat("AD - 101: aruco_id: {0}; corner_i: {1}; src_i: {2}", aruco_id, corner_i, src_i);

                // Store corner[i] into spa[src_i]
                src_point_array[src_i] = new Point(corners[i].get(0, corner_i)[0], corners[i].get(0, corner_i)[1]);

                // Display the corner as circle on outMat.
                Imgproc.circle(cached_initMat, src_point_array[src_i], 10, new Scalar(255, 255, 0));
            }

            // Converting to Ray values for Raycast
            Camera _cam = Camera.main;

            if (_cam != null)
            {
                for (int i = 0; i < POINT_COUNT; i++)
                {
                    if (src_point_array[i] != null)
                    {
                        src_ray_array[i] = _cam.ScreenPointToRay(
                            new Vector3((float)src_point_array[i].x, (float)src_point_array[i].y, 0)).direction;
                    }
                }
            }
            // Debug.LogFormat("Detected Direction: {0}", src_ray_array[0]);
            // Debug.LogFormat("Camera Direction: {0}", _cam.transform.forward);

            // Count non-null source points
            bool spa_full = (count_src_nulls() == 7);

            // Check if have valid faces
            for (int i = 0; i < FACE_COUNT; i++)
            {
                // faceX_full[i] = check_faces(i);
                faceX_full[i] = check_faces(i);
            }

            Core.flip(cached_initMat, outMat, 0);
        }
    public Vector4 DetectMarkers(Mat rgbMat)
    {
        Mat        ids             = new Mat();
        List <Mat> corners         = new List <Mat>();
        List <Mat> rejectedCorners = new List <Mat>();
        Mat        rvecs           = new Mat();
        //Mat rotMat = new Mat(3, 3, CvType.CV_64FC1);
        Mat tvecs = new Mat();
        DetectorParameters detectorParams = DetectorParameters.create();
        Dictionary         dictionary     = Aruco.getPredefinedDictionary((int)dictionaryId);
        Vector4            ARM            = new Vector3();

        // detect markers
        Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

        // if at least one marker detected
        if (ids.total() > 0)
        {
            // estimate pose, from marker to camera
            Aruco.estimatePoseSingleMarkers(corners, markerSize, camMatrix, distCoeffs, rvecs, tvecs);

            // Marker centre location
            double[] tvecArr = tvecs.get(0, 0);

            // image flip + coordinates transformation (OpenCV to Unity)
            ARM = new Vector4((float)tvecArr[0] + 0.005f, (float)tvecArr[1] + 0.005f, -(float)tvecArr[2], 1f);

            Debug.Log("raw ARM " + ARM.ToString("f5"));

            //// Rotation and convert to Unity matrix format
            //double[] rvecArr = rvecs.get(0, 0);
            //Mat rvec = new Mat(3, 1, CvType.CV_64FC1);
            //rvec.put(0, 0, rvecArr);
            //Calib3d.Rodrigues(rvec, rotMat);
            //double[] rotMatArr = new double[rotMat.total()];
            //rotMat.get(0, 0, rotMatArr);

            //// Transformation matrix
            //Matrix4x4 transformationM = new Matrix4x4();
            //transformationM.SetRow(0, new Vector4((float)rotMatArr[0], (float)rotMatArr[1], (float)rotMatArr[2], (float)tvecArr[0]));
            //transformationM.SetRow(1, new Vector4((float)rotMatArr[3], (float)rotMatArr[4], (float)rotMatArr[5], (float)tvecArr[1]));
            //transformationM.SetRow(2, new Vector4((float)rotMatArr[6], (float)rotMatArr[7], (float)rotMatArr[8], (float)tvecArr[2]));
            //transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
            //Debug.Log("transformationM " + transformationM.ToString());

            success = true;
        }
        else
        {
            Debug.Log("not detected");
            success = false;
        }

        return(ARM);
    }
    void ArucoDetection()
    {
        Dictionary dict = Aruco.getPredefinedDictionary(Aruco.DICT_4X4_1000);

        Aruco.detectMarkers(cached_initMat, dict, corners, ids);
        Aruco.drawDetectedMarkers(cached_initMat, corners, ids);
        Debug.Log("AD: Markers Detected");
        src_recent_array = new Point[7];

        for (int i = 0; i < corners.Count; i++)
        {
            int aruco_id = (int)(ids.get(i, 0)[0]);
            int src_i    = arucoTosrc(aruco_id);
            int corner_i = aruco_id % 4;

            // Store corner[i] into spa[src_i]
            src_point_array[src_i]  = new Point(corners[i].get(0, corner_i)[0], corners[i].get(0, corner_i)[1]);
            src_recent_array[src_i] = new Point(corners[i].get(0, corner_i)[0], corners[i].get(0, corner_i)[1]);

            Debug.LogFormat("aruco_id: {0}; corner: {1}; src_i: {2}", aruco_id, src_point_array[src_i], src_i);

            // Display the corner as circle on outMat.
            Imgproc.circle(cached_initMat, src_point_array[src_i], 10, new Scalar(255, 255, 0));
        }

        Debug.Log("AD: src_point_array and recent populated");

        // Count non-null source points
        int markerCount = count_src_nulls();

        Debug.LogFormat("AD: markerCount = {0}", markerCount);
        m_ImageInfo.text = string.Format("Number of markers detected: {0}", markerCount);
        spa_full         = (markerCount == 7);

        // Check if have valid faces
        for (int i = 0; i < 3; i++)
        {
            // faceX_full[i] = check_faces(i);
            faceX_full[i] = check_faces(i);
        }
        Debug.LogFormat("AD: full faces: 1-{0}, 2-{1}, 3-{2}",
                        faceX_full[0], faceX_full[1], faceX_full[2]);

        for (int i = 0; i < 3; i++)
        {
            // faceX_full[i] = check_faces(i);
            faceX_recent_full[i] = check_recent_faces(i);
        }
        Debug.LogFormat("AD: recent full faces: 1-{0}, 2-{1}, 3-{2}",
                        faceX_recent_full[0], faceX_recent_full[1], faceX_recent_full[2]);

        Core.flip(cached_initMat, outMat, 0);
        Debug.Log("AD: done");
    }
        private void DetectARUcoMarker()
        {
            if (downscaleRatio == 1)
            {
                downScaleRgbaMat = rgbaMat4Thread;
            }
            else
            {
                Imgproc.resize(rgbaMat4Thread, downScaleRgbaMat, new Size(), 1.0 / downscaleRatio, 1.0 / downscaleRatio, Imgproc.INTER_LINEAR);
            }
            Imgproc.cvtColor(downScaleRgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

            // Detect markers and estimate Pose
            Aruco.detectMarkers(grayMat, dictionary, corners, ids, detectorParams, rejected);

            if (applyEstimationPose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                for (int i = 0; i < ids.total(); i++)
                {
                    //This example can display ARObject on only first detected marker.
                    if (i == 0)
                    {
                        // Position
                        double[] tvec = tvecs.get(i, 0);

                        // Rotation
                        double[] rv   = rvecs.get(i, 0);
                        Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                        rvec.put(0, 0, rv [0]);
                        rvec.put(1, 0, rv [1]);
                        rvec.put(2, 0, rv [2]);
                        Calib3d.Rodrigues(rvec, rotMat);

                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)(tvec [2] / downscaleRatio)));

                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                        // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        ARM = invertYM * transformationM;

                        // Apply Z axis inverted matrix.
                        ARM = ARM * invertZM;

                        hasUpdatedARTransformMatrix = true;

                        break;
                    }
                }
            }
        }
    public Mat MarkerProcessing(Mat MatImg)
    {
        Mat resultMat = new Mat();

        Imgproc.cvtColor(MatImg, resultMat, Imgproc.COLOR_RGBA2RGB);
        Aruco.detectMarkers(resultMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

        // add the time to each marker
        // it is not premitted to change value in foreach
        List <int> temp_list = new List <int>();

        temp_list.AddRange(Timer.Keys);
        foreach (int key in temp_list)
        {
            Timer[key] = Timer[key] + Time.deltaTime;
            if (Timer[key] > waitTime)
            {
                GameObject parentObject     = GameObject.Find("ARGameObjects");
                GameObject HidearGameObject = parentObject.transform.Find(idDict[key]).gameObject;
                HidearGameObject.SetActive(false);
                Timer[key] = 0.0f;
            }
        }


        if (ids.total() > 0)
        {
            OpenCVForUnity.UtilsModule.Converters.Mat_to_vector_int(ids, list_ids);

            // draw markers.
            Aruco.drawDetectedMarkers(resultMat, corners, ids, new Scalar(0, 255, 0));

            // estimate pose.
            if (applyEstimationPose)
            {
                EstimatePoseCanonicalMarker(resultMat);
            }

            if (showRejectedCorners && rejectedCorners.Count > 0)
            {
                Aruco.drawDetectedMarkers(resultMat, rejectedCorners, new Mat(), new Scalar(255, 0, 0));
            }
        }

        //return black imgae if we need
        Mat BlackImage = new Mat(resultMat.rows(), resultMat.cols(), CvType.CV_8UC1, new Scalar(0, 0, 0));

        //Mat BlackImage = new Mat(resultMat.rows(), resultMat.cols(), CvType.CV_8UC1, new Scalar(255,255, 255));
        return(resultMat);
    }
        private void DetectARUcoMarker()
        {
            Imgproc.cvtColor(downScaleFrameMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

            // Detect markers and estimate Pose
            Aruco.detectMarkers(grayMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

            if (applyEstimationPose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                for (int i = 0; i < ids.total(); i++)
                {
                    //This example can display ARObject on only first detected marker.
                    if (i == 0)
                    {
                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvecs.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvecs.get(0, 0, tvecArr);
                        tvecArr[2] /= imageOptimizationHelper.downscaleRatio;
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Changes in pos/rot below these thresholds are ignored.
                        if (enableLowPassFilter)
                        {
                            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                        }
                        oldPoseData = poseData;

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                        // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                        ARM = invertYM * transformationM * invertYM;

                        // Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
                        ARM = ARM * invertYM * invertZM;

                        hasUpdatedARTransformMatrix = true;

                        break;
                    }
                }
            }
        }
示例#9
0
    void ArucoDetection()
    {
        Dictionary dict = Aruco.getPredefinedDictionary(Aruco.DICT_4X4_1000);

        Aruco.detectMarkers(cached_initMat, dict, corners, ids);
        Aruco.drawDetectedMarkers(cached_initMat, corners, ids);
        src_recent_array = new Point[7];

        int markerCount = count_src_nulls();

        for (int i = 0; i < corners.Count; i++)
        {
            int aruco_id = (int)(ids.get(i, 0)[0]);
            int src_i    = arucoTosrc(aruco_id);
            int corner_i = aruco_id % 4;

            // Store corner[i] into spa[src_i]
            src_point_array[src_i]  = new Point(corners[i].get(0, corner_i)[0], corners[i].get(0, corner_i)[1]);
            src_recent_array[src_i] = new Point(corners[i].get(0, corner_i)[0], corners[i].get(0, corner_i)[1]);

            // Display the corner as circle on outMat.
            // Imgproc.circle(cached_initMat, src_point_array[src_i], 5, new Scalar(255, 0, 255));
        }

        // Count non-null source points
        spa_full = (markerCount == 7);

        // Check if have valid faces
        for (int i = 0; i < 3; i++)
        {
            // faceX_full[i] = check_faces(i);
            faceX_full[i] = check_faces(i);
        }

        for (int i = 0; i < 3; i++)
        {
            // faceX_full[i] = check_faces(i);
            faceX_recent_full[i] = check_recent_faces(i);
        }
    }
示例#10
0
        private void DetectARUcoMarker()
        {
            // Detect markers and estimate Pose
            Aruco.detectMarkers(downScaleMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

            if (applyEstimationPose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                for (int i = 0; i < ids.total(); i++)
                {
                    //This example can display ARObject on only first detected marker.
                    if (i == 0)
                    {
                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvecs.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvecs.get(0, 0, tvecArr);
                        tvecArr[2] /= DOWNSCALE_RATIO;
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                        // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                        ARM = invertYM * transformationM * invertYM;

                        // Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
                        ARM = ARM * invertYM * invertZM;

                        hasUpdatedARTransformMatrix = true;

                        break;
                    }
                }
            }
        }
示例#11
0
        public void OnFrameMatAcquired(Mat grayMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix, CameraIntrinsics cameraIntrinsics)
        {
            isDetectingInFrameArrivedThread = true;

            DebugUtils.VideoTick();

            Mat   downScaleMat = null;
            float DOWNSCALE_RATIO;

            if (enableDownScale)
            {
                downScaleMat    = imageOptimizationHelper.GetDownScaleMat(grayMat);
                DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio;
            }
            else
            {
                downScaleMat    = grayMat;
                DOWNSCALE_RATIO = 1.0f;
            }

            Mat         camMatrix  = null;
            MatOfDouble distCoeffs = null;

            if (useStoredCameraParameters)
            {
                camMatrix  = this.camMatrix;
                distCoeffs = this.distCoeffs;
            }
            else
            {
                camMatrix  = CreateCameraMatrix(cameraIntrinsics.FocalLengthX, cameraIntrinsics.FocalLengthY, cameraIntrinsics.PrincipalPointX / DOWNSCALE_RATIO, cameraIntrinsics.PrincipalPointY / DOWNSCALE_RATIO);
                distCoeffs = new MatOfDouble(cameraIntrinsics.RadialDistK1, cameraIntrinsics.RadialDistK2, cameraIntrinsics.RadialDistK3, cameraIntrinsics.TangentialDistP1, cameraIntrinsics.TangentialDistP2);
            }

            if (enableDetection)
            {
                // Detect markers and estimate Pose
                Aruco.detectMarkers(downScaleMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

                if (applyEstimationPose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                    for (int i = 0; i < ids.total(); i++)
                    {
                        //This example can display ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // Convert to unity pose data.
                            double[] rvecArr = new double[3];
                            rvecs.get(0, 0, rvecArr);
                            double[] tvecArr = new double[3];
                            tvecs.get(0, 0, tvecArr);
                            tvecArr[2] /= DOWNSCALE_RATIO;
                            PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                            // Create transform matrix.
                            transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                            lock (sync)
                            {
                                // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                ARM = invertYM * transformationM;

                                // Apply Z-axis inverted matrix.
                                ARM = ARM * invertZM;
                            }

                            hasUpdatedARTransformMatrix = true;

                            break;
                        }
                    }
                }
            }

            Mat rgbMat4preview = null;

            if (displayCameraPreview)
            {
                rgbMat4preview = new Mat();
                Imgproc.cvtColor(downScaleMat, rgbMat4preview, Imgproc.COLOR_GRAY2RGB);

                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0));

                    if (applyEstimationPose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                                using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                                {
                                    // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                                    Calib3d.drawFrameAxes(rgbMat4preview, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
                                }
                        }
                    }
                }
            }

            DebugUtils.TrackTick();

            Enqueue(() =>
            {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                if (displayCameraPreview && rgbMat4preview != null)
                {
                    Utils.fastMatToTexture2D(rgbMat4preview, texture);
                    rgbMat4preview.Dispose();
                }

                if (applyEstimationPose)
                {
                    if (hasUpdatedARTransformMatrix)
                    {
                        hasUpdatedARTransformMatrix = false;

                        lock (sync)
                        {
                            // Apply camera transform matrix.
                            ARM = cameraToWorldMatrix * invertZM * ARM;

                            if (enableLerpFilter)
                            {
                                arGameObject.SetMatrix4x4(ARM);
                            }
                            else
                            {
                                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
                            }
                        }
                    }
                }

                grayMat.Dispose();
            });

            isDetectingInFrameArrivedThread = false;
        }
示例#12
0
        private void DetectMarkers()
        {
            Utils.texture2DToMat(imgTexture, rgbMat);
            Debug.Log("imgMat dst ToString " + rgbMat.ToString());

            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = rgbMat.width();
            float height = rgbMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            // set camera parameters.
            int    max_d     = (int)Mathf.Max(width, height);
            double fx        = max_d;
            double fy        = max_d;
            double cx        = width / 2.0f;
            double cy        = height / 2.0f;
            Mat    camMatrix = new Mat(3, 3, CvType.CV_64FC1);

            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);

            Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera matrix values.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                arCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                arCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }
            // Display objects near the camera.
            arCamera.nearClipPlane = 0.01f;



            Mat        ids             = new Mat();
            List <Mat> corners         = new List <Mat> ();
            List <Mat> rejectedCorners = new List <Mat> ();
            Mat        rvecs           = new Mat();
            Mat        tvecs           = new Mat();
            Mat        rotMat          = new Mat(3, 3, CvType.CV_64FC1);

            DetectorParameters detectorParams = DetectorParameters.create();
            Dictionary         dictionary     = Aruco.getPredefinedDictionary((int)dictionaryId);


            // detect markers.
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

            // if at least one marker detected
            if (ids.total() > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0));

                // estimate pose.
                if (applyEstimationPose)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                    for (int i = 0; i < ids.total(); i++)
                    {
                        using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)))
                            using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1))) {
                                // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                                Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
                            }

                        // This example can display the ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // Get translation vector
                            double[] tvecArr = tvecs.get(i, 0);

                            // Get rotation vector
                            double[] rvecArr = rvecs.get(i, 0);
                            Mat      rvec    = new Mat(3, 1, CvType.CV_64FC1);
                            rvec.put(0, 0, rvecArr);

                            // Convert rotation vector to rotation matrix.
                            Calib3d.Rodrigues(rvec, rotMat);
                            double[] rotMatArr = new double[rotMat.total()];
                            rotMat.get(0, 0, rotMatArr);

                            // Convert OpenCV camera extrinsic parameters to Unity Matrix4x4.
                            Matrix4x4 transformationM = new Matrix4x4();  // from OpenCV
                            transformationM.SetRow(0, new Vector4((float)rotMatArr [0], (float)rotMatArr [1], (float)rotMatArr [2], (float)tvecArr [0]));
                            transformationM.SetRow(1, new Vector4((float)rotMatArr [3], (float)rotMatArr [4], (float)rotMatArr [5], (float)tvecArr [1]));
                            transformationM.SetRow(2, new Vector4((float)rotMatArr [6], (float)rotMatArr [7], (float)rotMatArr [8], (float)tvecArr [2]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
                            Debug.Log("transformationM " + transformationM.ToString());

                            Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
                            Debug.Log("invertYM " + invertYM.ToString());

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                            Matrix4x4 ARM = invertYM * transformationM * invertYM;

                            if (shouldMoveARCamera)
                            {
                                ARM = arGameObject.transform.localToWorldMatrix * ARM.inverse;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(arCamera.transform, ref ARM);
                            }
                            else
                            {
                                ARM = arCamera.transform.localToWorldMatrix * ARM;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
                            }
                        }
                    }
                }
            }

            if (showRejectedCorners && rejectedCorners.Count > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, rejectedCorners, new Mat(), new Scalar(255, 0, 0));
            }

            Utils.matToTexture2D(rgbMat, texture);
        }
示例#13
0
        // Use this for initialization
        void Start()
        {
            Mat rgbMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Utils.texture2DToMat(imgTexture, rgbMat);
            Debug.Log("imgMat dst ToString " + rgbMat.ToString());


            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = rgbMat.width();
            float height = rgbMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            // set cameraparam.
            int    max_d     = (int)Mathf.Max(width, height);
            double fx        = max_d;
            double fy        = max_d;
            double cx        = width / 2.0f;
            double cy        = height / 2.0f;
            Mat    camMatrix = new Mat(3, 3, CvType.CV_64FC1);

            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);

            Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                ARCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }



            Mat        ids      = new Mat();
            List <Mat> corners  = new List <Mat> ();
            List <Mat> rejected = new List <Mat> ();
            Mat        rvecs    = new Mat();
            Mat        tvecs    = new Mat();
            Mat        rotMat   = new Mat(3, 3, CvType.CV_64FC1);

            DetectorParameters detectorParams = DetectorParameters.create();
            Dictionary         dictionary     = Aruco.getPredefinedDictionary(dictionaryId);


            // detect markers.
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);

            // estimate pose.
            if (applyEstimationPose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
            }

            if (ids.total() > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                if (applyEstimationPose)
                {
                    for (int i = 0; i < ids.total(); i++)
                    {
//                        Debug.Log ("ids.dump() " + ids.dump ());

                        Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                        // This example can display ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // position
                            double[] tvec = tvecs.get(i, 0);

                            // rotation
                            double[] rv   = rvecs.get(i, 0);
                            Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                            rvec.put(0, 0, rv[0]);
                            rvec.put(1, 0, rv[1]);
                            rvec.put(2, 0, rv[2]);
                            Calib3d.Rodrigues(rvec, rotMat);

                            Matrix4x4 transformationM = new Matrix4x4();  // from OpenCV
                            transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0]));
                            transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1]));
                            transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec [2]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
                            Debug.Log("transformationM " + transformationM.ToString());

                            Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
                            Debug.Log("invertZM " + invertZM.ToString());

                            Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
                            Debug.Log("invertYM " + invertYM.ToString());

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            Matrix4x4 ARM = invertYM * transformationM;

                            // Apply Z axis inverted matrix.
                            ARM = ARM * invertZM;

                            if (shouldMoveARCamera)
                            {
                                ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * ARM;

                                Debug.Log("ARM " + ARM.ToString());

                                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                            }
                        }
                    }
                }
            }

            if (showRejected && rejected.Count > 0)
            {
                Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
            }


            Texture2D texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(rgbMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
示例#14
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                // detect markers.
                Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);

                // estimate pose.
                if (applyEstimationPose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
                }

                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                    if (applyEstimationPose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                            // This example can display ARObject on only first detected marker.
                            if (i == 0)
                            {
                                // position
                                double[] tvec = tvecs.get(i, 0);

                                // rotation
                                double[] rv   = rvecs.get(i, 0);
                                Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                                rvec.put(0, 0, rv [0]);
                                rvec.put(1, 0, rv [1]);
                                rvec.put(2, 0, rv [2]);
                                Calib3d.Rodrigues(rvec, rotMat);

                                transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0]));
                                transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1]));
                                transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec [2]));
                                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                ARM = invertYM * transformationM;

                                // Apply Z axis inverted matrix.
                                ARM = ARM * invertZM;

                                if (shouldMoveARCamera)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;

                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                }
                                else
                                {
                                    ARM = ARCamera.transform.localToWorldMatrix * ARM;

                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                }
                            }
                        }
                    }
                }

                if (showRejected && rejected.Count > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
                }


                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);


                // detect markers.
                Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

                // refine marker detection.
                if (refineMarkerDetection && (markerType == MarkerType.GridBoard || markerType == MarkerType.ChArUcoBoard))
                {
                    switch (markerType)
                    {
                    case MarkerType.GridBoard:
                        Aruco.refineDetectedMarkers(rgbMat, gridBoard, corners, ids, rejectedCorners, camMatrix, distCoeffs, 10f, 3f, true, recoveredIdxs, detectorParams);
                        break;

                    case MarkerType.ChArUcoBoard:
                        Aruco.refineDetectedMarkers(rgbMat, charucoBoard, corners, ids, rejectedCorners, camMatrix, distCoeffs, 10f, 3f, true, recoveredIdxs, detectorParams);
                        break;
                    }
                }

                // if at least one marker detected
                if (ids.total() > 0)
                {
                    if (markerType != MarkerType.ChArUcoDiamondMarker)
                    {
                        if (markerType == MarkerType.ChArUcoBoard)
                        {
                            Aruco.interpolateCornersCharuco(corners, ids, rgbMat, charucoBoard, charucoCorners, charucoIds, camMatrix, distCoeffs, charucoMinMarkers);

                            // draw markers.
                            Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0));
                            if (charucoIds.total() > 0)
                            {
                                Aruco.drawDetectedCornersCharuco(rgbMat, charucoCorners, charucoIds, new Scalar(0, 0, 255));
                            }
                        }
                        else
                        {
                            // draw markers.
                            Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0));
                        }

                        // estimate pose.
                        if (applyEstimationPose)
                        {
                            switch (markerType)
                            {
                            default:
                            case MarkerType.CanonicalMarker:
                                EstimatePoseCanonicalMarker(rgbMat);
                                break;

                            case MarkerType.GridBoard:
                                EstimatePoseGridBoard(rgbMat);
                                break;

                            case MarkerType.ChArUcoBoard:
                                EstimatePoseChArUcoBoard(rgbMat);
                                break;
                            }
                        }
                    }
                    else
                    {
                        // detect diamond markers.
                        Aruco.detectCharucoDiamond(rgbMat, corners, ids, diamondSquareLength / diamondMarkerLength, diamondCorners, diamondIds, camMatrix, distCoeffs);

                        // draw markers.
                        Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0));
                        // draw diamond markers.
                        Aruco.drawDetectedDiamonds(rgbMat, diamondCorners, diamondIds, new Scalar(0, 0, 255));

                        // estimate pose.
                        if (applyEstimationPose)
                        {
                            EstimatePoseChArUcoDiamondMarker(rgbMat);
                        }
                    }
                }

                if (showRejectedCorners && rejectedCorners.Count > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, rejectedCorners, new Mat(), new Scalar(255, 0, 0));
                }


//                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.fastMatToTexture2D(rgbMat, texture);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                // detect markers and estimate pose
                Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);
                //          Aruco.detectMarkers (imgMat, dictionary, corners, ids);
                if (estimatePose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
                }


                // draw results
                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                    if (estimatePose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                            //This example can display ARObject on only first detected marker.
                            if (i == 0)
                            {
                                // position
                                double[] tvec = tvecs.get(i, 0);
                                Vector4  pos  = new Vector4((float)tvec [0], (float)tvec [1], (float)tvec [2], 0); // from OpenCV
                                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                ARGameObject.transform.localPosition = new Vector3(pos.x, -pos.y, pos.z);


                                // rotation
                                double[] rv   = rvecs.get(i, 0);
                                Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                                rvec.put(0, 0, rv[0]);
                                rvec.put(1, 0, rv[1]);
                                rvec.put(2, 0, rv[2]);

                                Calib3d.Rodrigues(rvec, rotMat);

                                Vector3 forward = new Vector3((float)rotMat.get(0, 2) [0], (float)rotMat.get(1, 2) [0], (float)rotMat.get(2, 2) [0]); // from OpenCV
                                Vector3 up      = new Vector3((float)rotMat.get(0, 1) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(2, 1) [0]); // from OpenCV
                                // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                Quaternion rot = Quaternion.LookRotation(new Vector3(forward.x, -forward.y, forward.z), new Vector3(up.x, -up.y, up.z));
                                ARGameObject.transform.localRotation = rot;


                                // Apply Z axis inverted matrix.
                                invertZM        = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
                                transformationM = ARGameObject.transform.localToWorldMatrix * invertZM;

                                // Apply camera transform matrix.
                                transformationM = ARCamera.transform.localToWorldMatrix * transformationM;

                                ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref transformationM);
                            }
                        }
                    }
                }

                if (showRejected && rejected.Count > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
                }


                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
        private void DrawFrame(Mat grayMat, Mat bgrMat)
        {
            Imgproc.cvtColor(grayMat, bgrMat, Imgproc.COLOR_GRAY2BGR);

            switch (markerType)
            {
            default:
            case MarkerType.ChArUcoBoard:
                // detect markers.
                Aruco.detectMarkers(grayMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

                // refine marker detection.
                if (refineMarkerDetection)
                {
                    Aruco.refineDetectedMarkers(grayMat, charucoBoard, corners, ids, rejectedCorners, camMatrix, distCoeffs, 10f, 3f, true, recoveredIdxs, detectorParams);
                }

                // if at least one marker detected
                if (ids.total() > 0)
                {
                    Aruco.interpolateCornersCharuco(corners, ids, grayMat, charucoBoard, charucoCorners, charucoIds, camMatrix, distCoeffs, charucoMinMarkers);

                    // draw markers.
                    Aruco.drawDetectedMarkers(bgrMat, corners, ids, new Scalar(0, 255, 0, 255));
                    // if at least one charuco corner detected
                    if (charucoIds.total() > 0)
                    {
                        Aruco.drawDetectedCornersCharuco(bgrMat, charucoCorners, charucoIds, new Scalar(0, 0, 255, 255));
                    }
                }
                break;

            case MarkerType.ChessBoard:
            case MarkerType.CirclesGlid:
            case MarkerType.AsymmetricCirclesGlid:
                // detect markers.
                MatOfPoint2f points = new MatOfPoint2f();
                bool         found  = false;

                switch (markerType)
                {
                default:
                case MarkerType.ChessBoard:
                    found = Calib3d.findChessboardCorners(grayMat, new Size((int)squaresX, (int)squaresY), points, Calib3d.CALIB_CB_ADAPTIVE_THRESH | Calib3d.CALIB_CB_FAST_CHECK | Calib3d.CALIB_CB_NORMALIZE_IMAGE);
                    break;

                case MarkerType.CirclesGlid:
                    found = Calib3d.findCirclesGrid(grayMat, new Size((int)squaresX, (int)squaresY), points, Calib3d.CALIB_CB_SYMMETRIC_GRID);
                    break;

                case MarkerType.AsymmetricCirclesGlid:
                    found = Calib3d.findCirclesGrid(grayMat, new Size((int)squaresX, (int)squaresY), points, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
                    break;
                }

                if (found)
                {
                    if (markerType == MarkerType.ChessBoard)
                    {
                        Imgproc.cornerSubPix(grayMat, points, new Size(5, 5), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1));
                    }

                    // draw markers.
                    Calib3d.drawChessboardCorners(bgrMat, new Size((int)squaresX, (int)squaresY), points, found);
                }
                break;
            }

            double[] camMatrixArr = new double[(int)camMatrix.total()];
            camMatrix.get(0, 0, camMatrixArr);
            double[] distCoeffsArr = new double[(int)distCoeffs.total()];
            distCoeffs.get(0, 0, distCoeffsArr);

            int    ff         = Imgproc.FONT_HERSHEY_SIMPLEX;
            double fs         = 0.4;
            Scalar c          = new Scalar(255, 255, 255, 255);
            int    t          = 0;
            int    lt         = Imgproc.LINE_AA;
            bool   blo        = false;
            int    frameCount = (markerType == MarkerType.ChArUcoBoard) ? allCorners.Count : imagePoints.Count;

            Imgproc.putText(bgrMat, frameCount + " FRAME CAPTURED", new Point(bgrMat.cols() - 300, 20), ff, fs, c, t, lt, blo);
            Imgproc.putText(bgrMat, "IMAGE_WIDTH: " + bgrMat.width(), new Point(bgrMat.cols() - 300, 40), ff, fs, c, t, lt, blo);
            Imgproc.putText(bgrMat, "IMAGE_HEIGHT: " + bgrMat.height(), new Point(bgrMat.cols() - 300, 60), ff, fs, c, t, lt, blo);
            Imgproc.putText(bgrMat, "CALIBRATION_FLAGS: " + calibrationFlags, new Point(bgrMat.cols() - 300, 80), ff, fs, c, t, lt, blo);

            Imgproc.putText(bgrMat, "CAMERA_MATRIX: ", new Point(bgrMat.cols() - 300, 100), ff, fs, c, t, lt, blo);
            for (int i = 0; i < camMatrixArr.Length; i = i + 3)
            {
                Imgproc.putText(bgrMat, "   " + camMatrixArr[i] + ", " + camMatrixArr[i + 1] + ", " + camMatrixArr[i + 2] + ",", new Point(bgrMat.cols() - 300, 120 + 20 * i / 3), ff, fs, c, t, lt, blo);
            }
            Imgproc.putText(bgrMat, "DISTORTION_COEFFICIENTS: ", new Point(bgrMat.cols() - 300, 180), ff, fs, c, t, lt, blo);
            for (int i = 0; i < distCoeffsArr.Length; ++i)
            {
                Imgproc.putText(bgrMat, "   " + distCoeffsArr[i] + ",", new Point(bgrMat.cols() - 300, 200 + 20 * i), ff, fs, c, t, lt, blo);
            }
            Imgproc.putText(bgrMat, "AVG_REPROJECTION_ERROR: " + repErr, new Point(bgrMat.cols() - 300, 300), ff, fs, c, t, lt, blo);

            if (frameCount == 0)
            {
                Imgproc.putText(bgrMat, "To calibration start, please press the calibration button or do air tap gesture!", new Point(5, bgrMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

                // detect markers and estimate pose
                Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);
                //          Aruco.detectMarkers (imgMat, dictionary, corners, ids);
                if (estimatePose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
                }


                // draw results
                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(255, 0, 0));

                    if (estimatePose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            Aruco.drawAxis(rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);

                            //This sample can display ARObject on only first detected marker.
                            if (i == 0)
                            {
                                Calib3d.Rodrigues(rvecs, rotMat);


                                transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvecs.get(0, 0) [0]));
                                transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvecs.get(0, 0) [1]));
                                transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvecs.get(0, 0) [2]));
                                transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                                if (shouldMoveARCamera)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;
//                                                              Debug.Log ("ARM " + ARM.ToString ());

                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                }
                                else
                                {
                                    ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
//                                                              Debug.Log ("ARM " + ARM.ToString ());

                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                }
                            }
                        }
                    }
                }

                if (showRejected && rejected.Count > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat, rejected, new Mat(), new Scalar(0, 0, 255));
                }


                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
示例#19
0
        public List <ZOArucoTrackerDetection> DetectMarkers(Mat rgbMat)
        {
            List <ZOArucoTrackerDetection> results = new List <ZOArucoTrackerDetection>();

            // Debug.Log("imgMat dst ToString " + rgbMat.ToString());

            float width          = rgbMat.width();
            float height         = rgbMat.height();
            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                // Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                // Camera.main.orthographicSize = height / 2;
            }

            // set camera parameters.
            int    max_d     = (int)Mathf.Max(width, height);
            double fx        = max_d;
            double fy        = max_d;
            double cx        = width / 2.0f;
            double cy        = height / 2.0f;
            Mat    camMatrix = new Mat(3, 3, CvType.CV_64FC1);

            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            // Debug.Log("camMatrix " + camMatrix.dump());


            MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);
            // Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera matrix values.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            // Debug.Log("imageSize " + imageSize.ToString());
            // Debug.Log("apertureWidth " + apertureWidth);
            // Debug.Log("apertureHeight " + apertureHeight);
            // Debug.Log("fovx " + fovx[0]);
            // Debug.Log("fovy " + fovy[0]);
            // Debug.Log("focalLength " + focalLength[0]);
            // Debug.Log("principalPoint " + principalPoint.ToString());
            // Debug.Log("aspectratio " + aspectratio[0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            // Debug.Log("fovXScale " + fovXScale);
            // Debug.Log("fovYScale " + fovYScale);
            Mat        ids             = new Mat();
            List <Mat> corners         = new List <Mat>();
            List <Mat> rejectedCorners = new List <Mat>();
            Mat        rvecs           = new Mat();
            Mat        tvecs           = new Mat();
            Mat        rotMat          = new Mat(3, 3, CvType.CV_64FC1);

            DetectorParameters detectorParams = DetectorParameters.create();
            Dictionary         dictionary     = Aruco.getPredefinedDictionary((int)dictionaryId);


            // detect markers.
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

            // Debug.Log("INFO: Number of markers detected: " + ids.total());
            // if at least one marker detected
            if (ids.total() > 0)
            {
                if (_debug)
                {
                    Aruco.drawDetectedMarkers(rgbMat, corners, ids, new Scalar(0, 255, 0));
                }


                // estimate pose.
                Aruco.estimatePoseSingleMarkers(corners, _markerLengthMeters, camMatrix, distCoeffs, rvecs, tvecs);

                for (int i = 0; i < ids.total(); i++)
                {
                    // Get translation vector
                    double[] tvecArr = tvecs.get(i, 0);

                    // Get rotation vector
                    double[] rvecArr = rvecs.get(i, 0);
                    Mat      rvec    = new Mat(3, 1, CvType.CV_64FC1);
                    rvec.put(0, 0, rvecArr);

                    // Convert rotation vector to rotation matrix.
                    Calib3d.Rodrigues(rvec, rotMat);
                    double[] rotMatArr = new double[rotMat.total()];
                    rotMat.get(0, 0, rotMatArr);

                    // Convert OpenCV camera extrinsic parameters to Unity Matrix4x4.
                    Matrix4x4 transformationM = new Matrix4x4(); // from OpenCV
                    transformationM.SetRow(0, new Vector4((float)rotMatArr[0], (float)rotMatArr[1], (float)rotMatArr[2], (float)tvecArr[0]));
                    transformationM.SetRow(1, new Vector4((float)rotMatArr[3], (float)rotMatArr[4], (float)rotMatArr[5], (float)tvecArr[1]));
                    transformationM.SetRow(2, new Vector4((float)rotMatArr[6], (float)rotMatArr[7], (float)rotMatArr[8], (float)tvecArr[2]));
                    transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
                    // Debug.Log("transformationM " + transformationM.ToString());

                    ZOArucoTrackerDetection detection = new ZOArucoTrackerDetection();
                    int [] currentId = new int[1];
                    // ids.get(0, i, currentId);
                    ids.get(i, 0, currentId);
                    detection.arucoId   = currentId[0];
                    detection.transform = transformationM;
                    results.Add(detection);
                }
            }

            return(results);
        }
        private void ARUcoDetect()
        {
            if (DOWNSCALE_RATIO == 1)
            {
                downScaleRgbaMat = rgbaMat4Thread;
            }
            else
            {
                Imgproc.resize(rgbaMat4Thread, downScaleRgbaMat, new Size(), 1.0 / DOWNSCALE_RATIO, 1.0 / DOWNSCALE_RATIO, Imgproc.INTER_LINEAR);
            }
            Imgproc.cvtColor(downScaleRgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);

            // Detect markers and estimate Pose
            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected);

            if (estimatePose && ids.total() > 0)
            {
                Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                for (int i = 0; i < ids.total(); i++)
                {
                    //This example can display ARObject on only first detected marker.
                    if (i == 0)
                    {
                        // Position
                        double[] tvec = tvecs.get(i, 0);

                        // Rotation
                        double[] rv   = rvecs.get(i, 0);
                        Mat      rvec = new Mat(3, 1, CvType.CV_64FC1);
                        rvec.put(0, 0, rv [0]);
                        rvec.put(1, 0, rv [1]);
                        rvec.put(2, 0, rv [2]);
                        Calib3d.Rodrigues(rvec, rotMat);

                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec [0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec [1]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)(tvec [2] / DOWNSCALE_RATIO)));

                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                        // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        ARM = invertYM * transformationM;

                        // Apply Z axis inverted matrix.
                        ARM = ARM * invertZM;

                        ARTransMatrixUpdated = true;

                        break;
                    }
                }
            }

            /*
             * // Draw results
             * if (ids.total () > 0) {
             *  Aruco.drawDetectedMarkers (rgbMat, corners, ids, new Scalar (255, 0, 0));
             *
             *  if (estimatePose) {
             *      for (int i = 0; i < ids.total (); i++) {
             *          Aruco.drawAxis (rgbMat, camMatrix, distCoeffs, rvecs, tvecs, markerLength * 0.5f);
             *      }
             *  }
             * }
             */
        }
示例#21
0
        public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix)
        {
            downScaleFrameMat = imageOptimizationHelper.GetDownScaleMat(bgraMat);

            if (enableDetection)
            {
                Imgproc.cvtColor(downScaleFrameMat, grayMat, Imgproc.COLOR_BGRA2GRAY);

                // Detect markers and estimate Pose
                Aruco.detectMarkers(grayMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

                if (applyEstimationPose && ids.total() > 0)
                {
                    Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                    for (int i = 0; i < ids.total(); i++)
                    {
                        //This example can display ARObject on only first detected marker.
                        if (i == 0)
                        {
                            // Convert to unity pose data.
                            double[] rvecArr = new double[3];
                            rvecs.get(0, 0, rvecArr);
                            double[] tvecArr = new double[3];
                            tvecs.get(0, 0, tvecArr);
                            tvecArr[2] /= imageOptimizationHelper.downscaleRatio;
                            PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                            // Changes in pos/rot below these thresholds are ignored.
                            if (enableLowPassFilter)
                            {
                                ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                            }
                            oldPoseData = poseData;

                            // Create transform matrix.
                            transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);

                            lock (sync){
                                // Right-handed coordinates system (OpenCV) to left-handed one (Unity)
                                ARM = invertYM * transformationM;

                                // Apply Z-axis inverted matrix.
                                ARM = ARM * invertZM;
                            }

                            hasUpdatedARTransformMatrix = true;

                            break;
                        }
                    }
                }
            }

            Mat rgbMat4preview = null;

            if (displayCameraPreview)
            {
                rgbMat4preview = new Mat();
                Imgproc.cvtColor(downScaleFrameMat, rgbMat4preview, Imgproc.COLOR_BGRA2RGB);

                if (ids.total() > 0)
                {
                    Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0));

                    if (applyEstimationPose)
                    {
                        for (int i = 0; i < ids.total(); i++)
                        {
                            using (Mat rvec = new Mat(rvecs, new OpenCVForUnity.Rect(0, i, 1, 1)))
                                using (Mat tvec = new Mat(tvecs, new OpenCVForUnity.Rect(0, i, 1, 1))) {
                                    // In this example we are processing with RGB color image, so Axis-color correspondences are X: blue, Y: green, Z: red. (Usually X: red, Y: green, Z: blue)
                                    Aruco.drawAxis(rgbMat4preview, camMatrix, distCoeffs, rvec, tvec, markerLength * 0.5f);
                                }
                        }
                    }
                }
            }


            UnityEngine.WSA.Application.InvokeOnAppThread(() => {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                if (displayCameraPreview && rgbMat4preview != null)
                {
                    OpenCVForUnity.Utils.fastMatToTexture2D(rgbMat4preview, texture);
                }

                if (applyEstimationPose)
                {
                    if (hasUpdatedARTransformMatrix)
                    {
                        hasUpdatedARTransformMatrix = false;

                        lock (sync){
                            // Apply camera transform matrix.
                            ARM = cameraToWorldMatrix * invertZM * ARM;

                            ARUtils.SetTransformFromMatrix(arGameObject.transform, ref ARM);
                        }
                    }
                }

                bgraMat.Dispose();
                if (rgbMat4preview != null)
                {
                    rgbMat4preview.Dispose();
                }
            }, false);
        }
示例#22
0
        void OnRenderObject()
        {
            if (pointCloudVisualizer == null)
            {
                return;
            }

            if (vertices == null || uintColorData == null)
            {
                return;
            }


            pointCloudBuffer.SetData(vertices);

            colorBuffer.SetData(uintColorData);

            pointCloudVisualizer.SetBuffer("_PointCloudData", pointCloudBuffer);
            pointCloudVisualizer.SetBuffer("_ColorData", colorBuffer);



            //Aruco
            if (applyAruco)
            {
                //transformation Matrix is static
                if (isStatic)
                {
                    //using earlier frames, decide transformationM
                    if (!matrixCofigured)
                    {
                        while (frameCount > 0)
                        {
                            imgTexture.LoadRawTextureData(byteColorData);

                            Utils.texture2DToMat(imgTexture, rgbMat);

                            //Upside down
                            Core.flip(rgbMat, rgbMat, 0);
                            Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected, camMatrix, distCoeffs);


                            if (ids.total() == 1)
                            {
                                Debug.Log("detected");

                                frameCount--;
                            }
                        }

                        Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                        // position
                        tvec = tvecs.get(0, 0);

                        // rotation
                        rvec = new Mat(rvecs, range, range);
                        Calib3d.Rodrigues(rvec, rotMat);

                        //from marker coordinate(opencv) to camera coordinate(opencv)
                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec[0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec[1]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec[2]));
                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));


                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        // from camera coordinate(opencv) to camera coordinate(unity)
                        transformationM = invertYM * transformationM;

                        // Apply Z axis inverted matrix.
                        // from marker coordinate(Unity) to camera coordinate(Unity)
                        transformationM = transformationM * invertZM;

                        //from camera coordinate(unity) to marker coordinate(unity)
                        //rotate 90 degrees around the x-axis(alignM)
                        //from marker coordinate(unity) to world coordinate(unity)
                        transformationM = markerOrigin.transform.localToWorldMatrix * alignM * transformationM.inverse;

                        //transformationM: the matrix which transforms camera coordinate(unity) into world coordinate(unity)
                        //move RealSense model
                        ARUtils.SetTransformFromMatrix(viewCamera.transform, ref transformationM);

                        matrixCofigured = true;

                        Debug.Log("Transformation Matrix is configured");

                        if (drawMesh)
                        {
                            meshDrawer.draw(corners, vertices, ref transformationM, width, markerLength);
                        }
                    }
                }
                else
                {
                    imgTexture.LoadRawTextureData(byteColorData);

                    Utils.texture2DToMat(imgTexture, rgbMat);

                    Core.flip(rgbMat, rgbMat, 0);
                    Aruco.detectMarkers(rgbMat, dictionary, corners, ids, detectorParams, rejected, camMatrix, distCoeffs);


                    if (ids.total() > 0)
                    {
                        Debug.Log("detected: " + ids.total());

                        Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);

                        // position
                        tvec = tvecs.get(0, 0);

                        // rotation
                        rvec = new Mat(rvecs, range, range);
                        Calib3d.Rodrigues(rvec, rotMat);

                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)tvec[0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)tvec[1]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)tvec[2]));
                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));


                        transformationM = invertYM * transformationM;

                        transformationM = transformationM * invertZM;

                        transformationM = markerOrigin.transform.localToWorldMatrix * alignM * transformationM.inverse;

                        ARUtils.SetTransformFromMatrix(viewCamera.transform, ref transformationM);
                    }
                }
            }
            else
            {
                transformationM = Matrix4x4.TRS(viewCamera.transform.position, viewCamera.transform.rotation, viewCamera.transform.lossyScale);
            }

            pointCloudVisualizer.SetMatrix("_transformationM", transformationM);

            trs = Matrix4x4.TRS(transform.position, transform.rotation, transform.lossyScale);
            pointCloudVisualizer.SetMatrix("_trs", trs);

            pointCloudVisualizer.SetPass(0);
            Graphics.DrawProcedural(MeshTopology.Points, dataLength);
        }
        private double CaptureFrame(Mat frameMat)
        {
            double repErr = -1;

            switch (markerType)
            {
            default:
            case MarkerType.ChArUcoBoard:
                List <Mat> corners = new List <Mat>();
                Mat        ids     = new Mat();
                Aruco.detectMarkers(frameMat, dictionary, corners, ids, detectorParams, rejectedCorners, camMatrix, distCoeffs);

                if (refineMarkerDetection)
                {
                    Aruco.refineDetectedMarkers(frameMat, charucoBoard, corners, ids, rejectedCorners, camMatrix, distCoeffs, 10f, 3f, true, recoveredIdxs, detectorParams);
                }

                if (ids.total() > 0)
                {
                    Debug.Log("Frame captured.");

                    allCorners.Add(corners);
                    allIds.Add(ids);
                    allImgs.Add(frameMat);
                }
                else
                {
                    Debug.Log("Invalid frame.");

                    frameMat.Dispose();
                    if (ids != null)
                    {
                        ids.Dispose();
                    }
                    foreach (var item in corners)
                    {
                        item.Dispose();
                    }
                    corners.Clear();

                    return(-1);
                }

                // calibrate camera using aruco markers
                //double arucoRepErr = CalibrateCameraAruco (allCorners, allIds, charucoBoard, frameMat.size(), camMatrix, distCoeffs, rvecs, tvecs, calibrationFlags);
                //Debug.Log ("arucoRepErr: " + arucoRepErr);

                // calibrate camera using charuco
                repErr = CalibrateCameraCharuco(allCorners, allIds, charucoBoard, frameMat.size(), camMatrix, distCoeffs, rvecs, tvecs, calibrationFlags, calibrationFlags);

                break;

            case MarkerType.ChessBoard:
            case MarkerType.CirclesGlid:
            case MarkerType.AsymmetricCirclesGlid:

                MatOfPoint2f points      = new MatOfPoint2f();
                Size         patternSize = new Size((int)squaresX, (int)squaresY);

                bool found = false;
                switch (markerType)
                {
                default:
                case MarkerType.ChessBoard:
                    found = Calib3d.findChessboardCorners(frameMat, patternSize, points, Calib3d.CALIB_CB_ADAPTIVE_THRESH | Calib3d.CALIB_CB_FAST_CHECK | Calib3d.CALIB_CB_NORMALIZE_IMAGE);
                    break;

                case MarkerType.CirclesGlid:
                    found = Calib3d.findCirclesGrid(frameMat, patternSize, points, Calib3d.CALIB_CB_SYMMETRIC_GRID);
                    break;

                case MarkerType.AsymmetricCirclesGlid:
                    found = Calib3d.findCirclesGrid(frameMat, patternSize, points, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
                    break;
                }

                if (found)
                {
                    Debug.Log("Frame captured.");
                    if (markerType == MarkerType.ChessBoard)
                    {
                        Imgproc.cornerSubPix(frameMat, points, new Size(5, 5), new Size(-1, -1), new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 30, 0.1));
                    }

                    imagePoints.Add(points);
                    allImgs.Add(frameMat);
                }
                else
                {
                    Debug.Log("Invalid frame.");
                    frameMat.Dispose();
                    if (points != null)
                    {
                        points.Dispose();
                    }
                    return(-1);
                }

                if (imagePoints.Count < 1)
                {
                    Debug.Log("Not enough points for calibration.");
                    repErr = -1;
                }
                else
                {
                    MatOfPoint3f objectPoint = new MatOfPoint3f(new Mat(imagePoints[0].rows(), 1, CvType.CV_32FC3));
                    CalcChessboardCorners(patternSize, squareSize, objectPoint, markerType);

                    List <Mat> objectPoints = new List <Mat>();
                    for (int i = 0; i < imagePoints.Count; ++i)
                    {
                        objectPoints.Add(objectPoint);
                    }

                    repErr = Calib3d.calibrateCamera(objectPoints, imagePoints, frameMat.size(), camMatrix, distCoeffs, rvecs, tvecs, calibrationFlags);
                    objectPoint.Dispose();
                }

                break;
            }

            Debug.Log("repErr: " + repErr);
            Debug.Log("camMatrix: " + camMatrix.dump());
            Debug.Log("distCoeffs: " + distCoeffs.dump());

            return(repErr);
        }
示例#24
0
    /// <summary>
    /// Looks for markers in the most recent ZED image, and updates all registered MarkerObjects accordingly.
    /// </summary>
    private void ImageUpdated(Camera cam, Mat camMat, Mat iamgeMat)
    {
        Dictionary predict = Aruco.getPredefinedDictionary((int)markerDictionary); //Load the selected pre-defined dictionary.

        //Create empty structures to hold the output of Aruco.detectMarkers.
        List <Mat>         corners        = new List <Mat>();
        Mat                ids            = new Mat();
        DetectorParameters detectparams   = DetectorParameters.create();
        List <Mat>         rejectedpoints = new List <Mat>(); //There is no overload for detectMarkers that will take camMat without this also.

        //Call OpenCV to tell us which markers were detected, and give their 2D positions.
        Aruco.detectMarkers(iamgeMat, predict, corners, ids, detectparams, rejectedpoints, camMat);

        //Make matrices to hold the output rotations and translations of each detected marker.
        Mat rotvectors   = new Mat();
        Mat transvectors = new Mat();

        //Convert the 2D corner positions into a 3D pose for each detected marker.
        Aruco.estimatePoseSingleMarkers(corners, markerWidthMeters, camMat, new Mat(), rotvectors, transvectors);

        //Now we have ids, rotvectors and transvectors, which are all vertical arrays holding info about each detection:
        // - ids: An Nx1 array (N = number of markers detected) where each slot is the ID of a detected marker in the dictionary.
        // - rotvectors: An Nx3 array where each row is for an individual detection: The first row is the rotation of the marker
        //    listed in the first row of ids, etc. The columns are the X, Y and Z angles of that marker's rotation, BUT they are not
        //    directly usable in Unity because they're calculated very differetly. We'll deal with that soon.
        // - transvectors: An Nx1 array like rotvectors with each row corresponding to a detected marker, with a double[3] array with the X, Y and Z positions.
        //    positions. These three values are usable in Unity - they're just relative to the camera, not the world, which is easy to fix.

        //Convert matrix of IDs into a List, to simply things for those not familiar with using Matrices.
        List <int> detectedIDs = new List <int>();

        for (int i = 0; i < ids.height(); i++)
        {
            int id = (int)ids.get(i, 0)[0];
            if (!detectedIDs.Contains(id))
            {
                detectedIDs.Add(id);
            }
        }

        //We'll go through every ID that's been registered into registered Markers, and see if we found any markers in the scene with that ID.
        Dictionary <int, List <sl.Pose> > detectedWorldPoses = new Dictionary <int, List <sl.Pose> >(); //Key is marker ID, value is world space poses.

        //foreach (int id in registeredMarkers.Keys)
        for (int index = 0; index < transvectors.rows(); index++)
        {
            int id = (int)ids.get(index, 0)[0];
            if (!registeredMarkers.ContainsKey(id) || registeredMarkers[id].Count == 0)
            {
                continue;                 //Don't waste time if the list is empty. Can happen if markers are added, then removed.
            }
            if (detectedIDs.Contains(id)) //At least one MarkerObject needs to be updated. Convert pose to world space and call MarkerDetected() on it.
            {
                //Translation is just pose relative to camera. But we need to flip Y because of OpenCV's different coordinate system.
                Vector3 localpos;
                localpos.x = (float)transvectors.get(index, 0)[0];
                localpos.y = -(float)transvectors.get(index, 0)[1];
                localpos.z = (float)transvectors.get(index, 0)[2];

                Vector3 worldpos = cam.transform.TransformPoint(localpos); //Convert from local to world space.

                //Because of different coordinate frame, we need to flip the Y direction, which is pointing down instead of up.
                //We need to do this before we calculate the 3x3 rotation matrix soon, as that makes it muuuch harder to work with.
                double[] flip = rotvectors.get(index, 0);
                flip[1] = -flip[1];
                rotvectors.put(index, 0, flip);

                //Convert this rotation vector to a 3x3 matrix, which will hold values we can use in Unity.
                Mat rotmatrix = new Mat();
                Calib3d.Rodrigues(rotvectors.row(index), rotmatrix);

                //This new 3x3 matrix holds a vector pointing right in the first column, a vector pointing up in the second,
                //and a vector pointing forward in the third column. Rows 0, 1 and 2 are the X, Y and Z values of each vector.
                //We'll grab the forward and up vectors, which we can put into Quaternion.LookRotation() to get a representative Quaternion.
                Vector3 forward;
                forward.x = (float)rotmatrix.get(2, 0)[0];
                forward.y = (float)rotmatrix.get(2, 1)[0];
                forward.z = (float)rotmatrix.get(2, 2)[0];

                Vector3 up;
                up.x = (float)rotmatrix.get(1, 0)[0];
                up.y = (float)rotmatrix.get(1, 1)[0];
                up.z = (float)rotmatrix.get(1, 2)[0];

                Quaternion rot = Quaternion.LookRotation(forward, up);

                //Compensate for flip on Z axis.
                rot *= Quaternion.Euler(0, 0, 180);

                //Convert from local space to world space by multiplying the camera's world rotation with it.
                Quaternion worldrot = cam.transform.rotation * rot;

                if (!detectedWorldPoses.ContainsKey(id))
                {
                    detectedWorldPoses.Add(id, new List <sl.Pose>());
                }
                detectedWorldPoses[id].Add(new sl.Pose()
                {
                    translation = worldpos, rotation = worldrot
                });

                foreach (MarkerObject marker in registeredMarkers[id])
                {
                    marker.MarkerDetectedSingle(worldpos, worldrot);
                }
            }
        }

        //Call the event that gives all marker world poses, if any listeners.
        if (OnMarkersDetected != null)
        {
            OnMarkersDetected.Invoke(detectedWorldPoses);
        }

        //foreach (int detectedid in detectedWorldPoses.Keys)
        foreach (int key in registeredMarkers.Keys)
        {
            if (detectedWorldPoses.ContainsKey(key))
            {
                foreach (MarkerObject marker in registeredMarkers[key])
                {
                    marker.MarkerDetectedAll(detectedWorldPoses[key]);
                }
            }
            else
            {
                foreach (MarkerObject marker in registeredMarkers[key])
                {
                    marker.MarkerNotDetected();
                }
            }
        }
    }