예제 #1
0
				// Use this for initialization
				void Start ()
				{


						Texture2D imgTexture = Resources.Load ("lena") as Texture2D;
			
						Mat img1Mat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC3);
						Utils.texture2DToMat (imgTexture, img1Mat);
						Debug.Log ("img1Mat dst ToString " + img1Mat.ToString ());

						Mat img2Mat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC3);
						Utils.texture2DToMat (imgTexture, img2Mat);
						Debug.Log ("img2Mat dst ToString " + img2Mat.ToString ());



						float angle = UnityEngine.Random.Range (0, 360), scale = 1.0f;

						Point center = new Point (img2Mat.cols () * 0.5f, img2Mat.rows () * 0.5f);

						Mat affine_matrix = Imgproc.getRotationMatrix2D (center, angle, scale);

						Imgproc.warpAffine (img1Mat, img2Mat, affine_matrix, img2Mat.size ());


						FeatureDetector detector = FeatureDetector.create (FeatureDetector.ORB);
						DescriptorExtractor extractor = DescriptorExtractor.create (DescriptorExtractor.ORB);

						MatOfKeyPoint keypoints1 = new MatOfKeyPoint ();
						Mat descriptors1 = new Mat ();

						detector.detect (img1Mat, keypoints1);
						extractor.compute (img1Mat, keypoints1, descriptors1);

						MatOfKeyPoint keypoints2 = new MatOfKeyPoint ();
						Mat descriptors2 = new Mat ();
		
						detector.detect (img2Mat, keypoints2);
						extractor.compute (img2Mat, keypoints2, descriptors2);


						DescriptorMatcher matcher = DescriptorMatcher.create (DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
						MatOfDMatch matches = new MatOfDMatch ();

						matcher.match (descriptors1, descriptors2, matches);


						Mat resultImg = new Mat ();

						Features2d.drawMatches (img1Mat, keypoints1, img2Mat, keypoints2, matches, resultImg);



						Texture2D texture = new Texture2D (resultImg.cols (), resultImg.rows (), TextureFormat.RGBA32, false);
		
						Utils.matToTexture2D (resultImg, texture);

						gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

				}
        // Use this for initialization
        void Start()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            Texture2D imgTexture = Resources.Load("lena") as Texture2D;

            Mat img = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            Utils.texture2DToMat(imgTexture, img);
            Debug.Log("imgMat.ToString() " + img.ToString());


            OpenCVForUnity.ShapeModule.ThinPlateSplineShapeTransformer tps = Shape.createThinPlateSplineShapeTransformer(0);
            MatOfPoint2f sourcePoints = new MatOfPoint2f(
                new Point(0, 0),
                new Point(512, 0),
                new Point(0, 512),

                new Point(250, 200),
                new Point(400, 400),
                new Point(200, 400),

                new Point(512, 512)
                );
            MatOfPoint2f targetPoints = new MatOfPoint2f(
                new Point(0, 0),
                new Point(512, 0),
                new Point(0, 599),

                new Point(250, 120),
                new Point(450, 450),
                new Point(100, 450),

                new Point(512, 512)
                );
            MatOfDMatch matches = new MatOfDMatch(
                new DMatch(0, 0, 0),
                new DMatch(1, 1, 0),
                new DMatch(2, 2, 0),
                new DMatch(3, 3, 0),
                new DMatch(4, 4, 0),
                new DMatch(5, 5, 0),
                new DMatch(6, 6, 0)
                );


            //http://stackoverflow.com/questions/32207085/shape-transformers-and-interfaces-opencv3-0
            Core.transpose(sourcePoints, sourcePoints);
            Core.transpose(targetPoints, targetPoints);

            Debug.Log("sourcePoints " + sourcePoints.ToString());
            Debug.Log("targetPoints " + targetPoints.ToString());

            tps.estimateTransformation(targetPoints, sourcePoints, matches);

            MatOfPoint2f transPoints = new MatOfPoint2f();

            tps.applyTransformation(sourcePoints, transPoints);

            Debug.Log("sourcePoints " + sourcePoints.dump());
            Debug.Log("targetPoints " + targetPoints.dump());
            Debug.Log("transPoints " + transPoints.dump());


            Mat res = new Mat();

            tps.warpImage(img, res);

            //plot points
            Point[] sourcePointsArray = sourcePoints.toArray();
            Point[] targetPointsArray = targetPoints.toArray();
            for (int i = 0; i < sourcePointsArray.Length; i++)
            {
                Imgproc.arrowedLine(res, sourcePointsArray [i], targetPointsArray [i], new Scalar(255, 255, 0, 255), 3, Imgproc.LINE_AA, 0, 0.2);

                Imgproc.circle(res, sourcePointsArray [i], 10, new Scalar(255, 0, 0, 255), -1);
                Imgproc.circle(res, targetPointsArray [i], 10, new Scalar(0, 0, 255, 255), -1);
            }


            Texture2D texture = new Texture2D(res.cols(), res.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(res, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
예제 #3
0
        /// <summary>
        /// Refines the matches with homography.
        /// </summary>
        /// <returns><c>true</c>, if matches with homography was refined, <c>false</c> otherwise.</returns>
        /// <param name="queryKeypoints">Query keypoints.</param>
        /// <param name="trainKeypoints">Train keypoints.</param>
        /// <param name="reprojectionThreshold">Reprojection threshold.</param>
        /// <param name="matches">Matches.</param>
        /// <param name="homography">Homography.</param>
        static bool refineMatchesWithHomography
        (
            MatOfKeyPoint queryKeypoints,
            MatOfKeyPoint trainKeypoints,
            float reprojectionThreshold,
            MatOfDMatch matches,
            Mat homography
        )
        {
//              Debug.Log ("matches " + matches.ToString ());

            int minNumberMatchesAllowed = 8;

            List <KeyPoint> queryKeypointsList = queryKeypoints.toList();
            List <KeyPoint> trainKeypointsList = trainKeypoints.toList();
            List <DMatch>   matchesList        = matches.toList();

            if (matchesList.Count < minNumberMatchesAllowed)
            {
                return(false);
            }

            // Prepare data for cv::findHomography
            List <Point> srcPointsList = new List <Point> (matchesList.Count);
            List <Point> dstPointsList = new List <Point> (matchesList.Count);

            for (int i = 0; i < matchesList.Count; i++)
            {
                srcPointsList.Add(trainKeypointsList [matchesList [i].trainIdx].pt);
                dstPointsList.Add(queryKeypointsList [matchesList [i].queryIdx].pt);
            }

            // Find homography matrix and get inliers mask
            using (MatOfPoint2f srcPoints = new MatOfPoint2f())
                using (MatOfPoint2f dstPoints = new MatOfPoint2f())
                    using (MatOfByte inliersMask = new MatOfByte(new byte[srcPointsList.Count])) {
                        srcPoints.fromList(srcPointsList);
                        dstPoints.fromList(dstPointsList);

//              Debug.Log ("srcPoints " + srcPoints.ToString ());
//              Debug.Log ("dstPoints " + dstPoints.ToString ());


                        Calib3d.findHomography(srcPoints,
                                               dstPoints,
                                               Calib3d.FM_RANSAC,
                                               reprojectionThreshold,
                                               inliersMask, 2000, 0.955).copyTo(homography);

                        if (homography.rows() != 3 || homography.cols() != 3)
                        {
                            return(false);
                        }

                        //Debug.Log ("homography " + homography.ToString ());

                        //Debug.Log ("inliersMask " + inliersMask.dump ());

                        List <byte> inliersMaskList = inliersMask.toList();

                        List <DMatch> inliers = new List <DMatch> ();
                        for (int i = 0; i < inliersMaskList.Count; i++)
                        {
                            if (inliersMaskList [i] == 1)
                            {
                                inliers.Add(matchesList [i]);
                            }
                        }

                        matches.fromList(inliers);
                        //Debug.Log ("matches " + matches.ToString ());
                    }

            return(matchesList.Count > minNumberMatchesAllowed);
        }
예제 #4
0
        /// <summary>
        /// Finds the pattern.
        /// </summary>
        /// <returns><c>true</c>, if pattern was found, <c>false</c> otherwise.</returns>
        /// <param name="image">Image.</param>
        /// <param name="info">Info.</param>
        public bool findPattern(Mat image, PatternTrackingInfo info)
        {
            // Convert input image to gray
            getGray(image, m_grayImg);

            // Extract feature points from input gray image
            extractFeatures(m_grayImg, m_queryKeypoints, m_queryDescriptors);

            // Get matches with current pattern
            getMatches(m_queryDescriptors, m_matches);

//      (GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));


            // Find homography transformation and detect good matches
            bool homographyFound = refineMatchesWithHomography(
                m_queryKeypoints,
                m_pattern.keypoints,
                homographyReprojectionThreshold,
                m_matches,
                m_roughHomography);

            if (homographyFound)
            {
//      (GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));

                // If homography refinement enabled improve found transformation
                if (enableHomographyRefinement)
                {
                    // Warp image using found homography
                    Imgproc.warpPerspective(m_grayImg, m_warpedImg, m_roughHomography, m_pattern.size, Imgproc.WARP_INVERSE_MAP | Imgproc.INTER_CUBIC);


                    //(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat(m_warpedImg);

                    // Get refined matches:
                    using (MatOfKeyPoint warpedKeypoints = new MatOfKeyPoint())
                        using (MatOfDMatch refinedMatches = new MatOfDMatch()) {
                            // Detect features on warped image
                            extractFeatures(m_warpedImg, warpedKeypoints, m_queryDescriptors);

                            // Match with pattern
                            getMatches(m_queryDescriptors, refinedMatches);

                            // Estimate new refinement homography
                            homographyFound = refineMatchesWithHomography(
                                warpedKeypoints,
                                m_pattern.keypoints,
                                homographyReprojectionThreshold,
                                refinedMatches,
                                m_refinedHomography);
                        }

                    //(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat(DebugHelpers.getMatchesImage(m_warpedImg, m_pattern.grayImg, warpedKeypoints, m_pattern.keypoints, refinedMatches, 100));

                    // Get a result homography as result of matrix product of refined and rough homographies:
//                              info.homography = m_roughHomography * m_refinedHomography;
                    Core.gemm(m_roughHomography, m_refinedHomography, 1, new Mat(), 0, info.homography);

//              Debug.Log ("info.homography " + info.homography.ToString ());

                    // Transform contour with rough homography

//                              Core.perspectiveTransform (m_pattern.points2d, info.points2d, m_roughHomography);
//                              info.draw2dContour (image, new Scalar (200, 0, 0, 255));


                    // Transform contour with precise homography

                    Core.perspectiveTransform(m_pattern.points2d, info.points2d, info.homography);

//              info.draw2dContour (image, new Scalar (0, 200, 0, 255));
                }
                else
                {
                    info.homography = m_roughHomography;

//              Debug.Log ("m_roughHomography " + m_roughHomography.ToString ());
//              Debug.Log ("info.homography " + info.homography.ToString ());

                    // Transform contour with rough homography
                    Core.perspectiveTransform(m_pattern.points2d, info.points2d, m_roughHomography);

//              info.draw2dContour (image, new Scalar (0, 200, 0, 255));
                }
            }

//              (GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));
//              Debug.Log ("Features:" + m_queryKeypoints.ToString () + " Matches: " + m_matches.ToString ());


            return(homographyFound);
        }
예제 #5
0
        //
        // C++:  void cv::drawMatches(Mat img1, vector_KeyPoint keypoints1, Mat img2, vector_KeyPoint keypoints2, vector_DMatch matches1to2, Mat& outImg, Scalar matchColor = Scalar::all(-1), Scalar singlePointColor = Scalar::all(-1), vector_char matchesMask = std::vector<char>(), DrawMatchesFlags flags = DrawMatchesFlags::DEFAULT)
        //

        //javadoc: drawMatches(img1, keypoints1, img2, keypoints2, matches1to2, outImg, matchColor, singlePointColor, matchesMask)
        public static void drawMatches(Mat img1, MatOfKeyPoint keypoints1, Mat img2, MatOfKeyPoint keypoints2, MatOfDMatch matches1to2, Mat outImg, Scalar matchColor, Scalar singlePointColor, MatOfByte matchesMask)
        {
            if (img1 != null)
            {
                img1.ThrowIfDisposed();
            }
            if (keypoints1 != null)
            {
                keypoints1.ThrowIfDisposed();
            }
            if (img2 != null)
            {
                img2.ThrowIfDisposed();
            }
            if (keypoints2 != null)
            {
                keypoints2.ThrowIfDisposed();
            }
            if (matches1to2 != null)
            {
                matches1to2.ThrowIfDisposed();
            }
            if (outImg != null)
            {
                outImg.ThrowIfDisposed();
            }
            if (matchesMask != null)
            {
                matchesMask.ThrowIfDisposed();
            }
#if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            Mat keypoints1_mat  = keypoints1;
            Mat keypoints2_mat  = keypoints2;
            Mat matches1to2_mat = matches1to2;
            Mat matchesMask_mat = matchesMask;
            features2d_Features2d_drawMatches_10(img1.nativeObj, keypoints1_mat.nativeObj, img2.nativeObj, keypoints2_mat.nativeObj, matches1to2_mat.nativeObj, outImg.nativeObj, matchColor.val[0], matchColor.val[1], matchColor.val[2], matchColor.val[3], singlePointColor.val[0], singlePointColor.val[1], singlePointColor.val[2], singlePointColor.val[3], matchesMask_mat.nativeObj);

            return;
#else
            return;
#endif
        }
예제 #6
0
        //javadoc: drawMatches(img1, keypoints1, img2, keypoints2, matches1to2, outImg)
        public static void drawMatches(Mat img1, MatOfKeyPoint keypoints1, Mat img2, MatOfKeyPoint keypoints2, MatOfDMatch matches1to2, Mat outImg)
        {
            if (img1 != null)
            {
                img1.ThrowIfDisposed();
            }
            if (keypoints1 != null)
            {
                keypoints1.ThrowIfDisposed();
            }
            if (img2 != null)
            {
                img2.ThrowIfDisposed();
            }
            if (keypoints2 != null)
            {
                keypoints2.ThrowIfDisposed();
            }
            if (matches1to2 != null)
            {
                matches1to2.ThrowIfDisposed();
            }
            if (outImg != null)
            {
                outImg.ThrowIfDisposed();
            }
#if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            Mat keypoints1_mat  = keypoints1;
            Mat keypoints2_mat  = keypoints2;
            Mat matches1to2_mat = matches1to2;
            features2d_Features2d_drawMatches_14(img1.nativeObj, keypoints1_mat.nativeObj, img2.nativeObj, keypoints2_mat.nativeObj, matches1to2_mat.nativeObj, outImg.nativeObj);

            return;
#else
            return;
#endif
        }
    // Draw matches between two images
    public static Mat getMatchesImage(Mat query, Mat pattern, MatOfKeyPoint queryKp, MatOfKeyPoint trainKp, MatOfDMatch matches, int maxMatchesDrawn)
    {
        Mat outImg = new Mat();

        List <DMatch> matchesList = matches.toList();

        if (matchesList.Count > maxMatchesDrawn)
        {
//                      matches.resize (maxMatchesDrawn);
            matchesList.RemoveRange(maxMatchesDrawn, matchesList.Count - maxMatchesDrawn);
        }

        MatOfDMatch tmpMatches = new MatOfDMatch();

        tmpMatches.fromList(matchesList);

        Features2d.drawMatches
        (
            query,
            queryKp,
            pattern,
            trainKp,
            tmpMatches,
            outImg,
            new Scalar(0, 200, 0, 255),
            Scalar.all(-1),
            new MatOfByte(),
            Features2d.NOT_DRAW_SINGLE_POINTS
        );


        return(outImg);
    }
        public void UpdateAttitude(Mat mat)
        {
            int LandmarksCount = 0;
            int MatchsCount    = 0;

            using (MatOfKeyPoint keypoints = new MatOfKeyPoint())
                using (Mat descriptors = new Mat())
                {
                    detector.detect(mat, keypoints);
                    extractor.compute(mat, keypoints, descriptors);

                    var trainPoints = keypoints.toArray();

                    List <List <Vector3> > newLandmarks = new List <List <Vector3> >();
                    foreach (var keyPoint in trainPoints)
                    {
                        var keyVectorL = new List <Vector3>();
                        keyVectorL.Add(ARCameraManager.Instance.ToVector(keyPoint));
                        newLandmarks.Add(keyVectorL);
                        LandmarksCount++;
                    }
                    if (Landmarks.Count > 0)
                    {
                        List <Vector3> FromVectorL = new List <Vector3>();
                        List <Vector3> ToVectorL   = new List <Vector3>();
                        using (MatOfDMatch matches = new MatOfDMatch())
                            using (MatOfDMatch crossMatches = new MatOfDMatch())
                            {
                                matcher.match(MapDescriptors, descriptors, matches);
                                matcher.match(descriptors, MapDescriptors, crossMatches);
                                var matchL      = matches.toArray();
                                var crossMatchL = crossMatches.toArray();
                                int i           = 0;
                                foreach (DMatch match in matchL)
                                {
                                    bool flag = false;
                                    foreach (DMatch crossMatch in crossMatchL)
                                    {
                                        if (match.trainIdx == crossMatch.queryIdx && match.queryIdx == crossMatch.trainIdx)
                                        {
                                            flag = true;
                                            MatchsCount++;
                                        }
                                    }
                                    if (match.distance > MatchFilter)
                                    {
                                        flag = false;
                                    }
                                    if (flag)
                                    {
                                        var trainVectors = newLandmarks[match.trainIdx];
                                        var queryVectors = Landmarks[match.queryIdx];
                                        FromVectorL.Add(trainVectors[0]);
                                        //ToVectorL.Add(queryVectors.ToArray().Median()); START
                                        double[] queryPointsX = new double[queryVectors.Count];
                                        double[] queryPointsY = new double[queryVectors.Count];
                                        for (int j = 0; j < queryVectors.Count; j++)
                                        {
                                            var queryPoint = ARCameraManager.Instance.toPoint(queryVectors[j], Attitude);
                                            queryPointsX[j] = queryPoint.x;
                                            queryPointsY[j] = queryPoint.y;
                                        }
                                        ToVectorL.Add(Attitude * ARCameraManager.Instance.ToVector(new Point(queryPointsX.Median(), queryPointsY.Median())));
                                        //ToVectorL.Add(queryVectors.ToArray().Median()); END
                                        newLandmarks[match.trainIdx].AddRange(queryVectors.ToArray());
                                    }
                                    i++;
                                }
                                Quaternion newAttitude;
                                float      error = ARCameraManager.Instance.LMedS(FromVectorL, ToVectorL, out newAttitude);
                                _matchTestEvent.Invoke(FromVectorL.Count);
                                FromVectorL.Clear();
                                ToVectorL.Clear();
                                if (error > 0 && LMedSFilter > error)
                                {
                                    Attitude = newAttitude;
                                    _trackingTestEvent.Invoke(Attitude);
                                    //ARCameraManager.Instance.UpdateCameraPosture(Attitude);
                                    if (debugMode)
                                    {
                                        Debug.Log(string.Format("Attitude = {0}\nError = {1}", Attitude, error));
                                    }
                                }
                                foreach (var newLandmark in newLandmarks)
                                {
                                    newLandmark[0] = Attitude * newLandmark[0];
                                }
                            }
                    }
                    MapDescriptors.Dispose();
                    Landmarks.Clear();
                    Landmarks      = newLandmarks;
                    MapDescriptors = descriptors.clone();
                }
            float now = Time.time;

            if (debugMode)
            {
                Debug.Log(string.Format("time : {0} Landmarks : {1}, Matchs : {2}.", 1 / (Time.time - startime), LandmarksCount, MatchsCount));
            }
            startime = Time.time;
        }
예제 #9
0
        public void Tracking(Mat mat)
        {
            using (MatOfKeyPoint keypoints = new MatOfKeyPoint())
                using (Mat descriptors = new Mat())
                {
                    detector.detect(mat, keypoints);
                    extractor.compute(mat, keypoints, descriptors);

                    var trainPoints = keypoints.toArray();

                    List <List <Vector3> > newLandmarks   = new List <List <Vector3> >();
                    List <List <Mat> >     newDescriptors = new List <List <Mat> >();
                    for (int i = 0; i < trainPoints.Length; i++)
                    {
                        var keyVectorL = new List <Vector3>();
                        keyVectorL.Add(ARCameraManager.Instance.ToVector(trainPoints[i]));
                        var DescriptorL = new List <Mat>();
                        DescriptorL.Add(descriptors.clone().row(i));
                        newLandmarks.Add(keyVectorL);
                        newDescriptors.Add(DescriptorL);
                    }

                    List <Vector3> FromVectorL = new List <Vector3>();
                    List <Vector3> ToVectorL   = new List <Vector3>();
                    List <int>     FinalizingL = new List <int>();
                    bool           finLMedS    = false;

                    if (FinalizingLandmarks.Count > 0)
                    {
                        using (MatOfDMatch matchesFinal = new MatOfDMatch())
                            using (MatOfDMatch crossMatchesFinal = new MatOfDMatch())
                            {
                                matcher.match(FinalizingLandmarkDescriptors, descriptors, matchesFinal);
                                matcher.match(descriptors, FinalizingLandmarkDescriptors, crossMatchesFinal);
                                var matchLFinal      = matchesFinal.toArray();
                                var crossMatchLFinal = crossMatchesFinal.toArray();
                                int i = 0;
                                foreach (DMatch match in matchLFinal)
                                {
                                    bool flag = false;
                                    foreach (DMatch crossMatch in crossMatchLFinal)
                                    {
                                        if (match.trainIdx == crossMatch.queryIdx && match.queryIdx == crossMatch.trainIdx)
                                        {
                                            flag = true;
                                        }
                                    }
                                    if (match.distance > MatchFilter)
                                    {
                                        flag = false;
                                    }
                                    if (flag)
                                    {
                                        FromVectorL.Add(newLandmarks[match.trainIdx][0]);
                                        ToVectorL.Add(FinalizingLandmarks[match.queryIdx]);
                                        FinalizingL.Add(match.trainIdx);
                                        newLandmarks[match.trainIdx][0]   = FinalizingLandmarks[match.queryIdx];
                                        newDescriptors[match.trainIdx][0] = FinalizingLandmarkDescriptors.row(match.queryIdx);
                                    }
                                    i++;
                                }
                                Quaternion newAttitude;
                                float      error = ARCameraManager.Instance.LMedS(FromVectorL, ToVectorL, out newAttitude);
                                if (error > 0 && LMedSFilter > error)
                                {
                                    Attitude = newAttitude;
                                    _trackingEvent.Invoke(Attitude);
                                    _matchingEvent.Invoke(FromVectorL.Count);
                                    //ARCameraManager.Instance.UpdateCameraPosture(Attitude);
                                    Debug.Log(string.Format("Attitude = {0}\nError = {1}\nFinalizMatch = {2}\nAccuracy = {3}", Attitude, error, FinalizingL.Count, 100 * FinalizingL.Count / FromVectorL.Count));
                                    finLMedS = true;
                                }
                            }
                    }

                    if (ProvisioningLandmarks.Count > 0)
                    {
                        using (MatOfDMatch matches = new MatOfDMatch())
                            using (MatOfDMatch crossMatches = new MatOfDMatch())
                            {
                                Mat optimisationDescriptors = OptimisationDescriptors;
                                matcher.match(optimisationDescriptors, descriptors, matches);
                                matcher.match(descriptors, optimisationDescriptors, crossMatches);
                                var matchL      = matches.toArray();
                                var crossMatchL = crossMatches.toArray();
                                int i           = 0;
                                foreach (DMatch match in matchL)
                                {
                                    bool flag = false;
                                    foreach (DMatch crossMatch in crossMatchL)
                                    {
                                        if (match.trainIdx == crossMatch.queryIdx && match.queryIdx == crossMatch.trainIdx)
                                        {
                                            flag = true;
                                        }
                                    }
                                    if (match.distance > MatchFilter)
                                    {
                                        flag = false;
                                    }
                                    if (flag)
                                    {
                                        if (FinalizingL.IndexOf(match.trainIdx) < 0)
                                        {
                                            var     trainVectors = newLandmarks[match.trainIdx];
                                            var     queryVectors = ProvisioningLandmarks[match.queryIdx];
                                            Vector3 queryVector;
                                            int     filter = OptimisationLandmark(queryVectors, Attitude, out queryVector);
                                            if (filter > 0)
                                            {
                                                if ((filter > SufficientCount) && (matchL.Length * FinalizedPercentage < FinalizingL.Count || matchL.Length * FinalizedPercentage > FinalizingLandmarks.Count))
                                                {
                                                    FinalizingLandmarks.Add(queryVector);
                                                    if (FinalizingLandmarkDescriptors != null)
                                                    {
                                                        FinalizingLandmarkDescriptors.push_back(optimisationDescriptors.row(match.queryIdx));
                                                    }
                                                    else
                                                    {
                                                        FinalizingLandmarkDescriptors = optimisationDescriptors.row(match.queryIdx);
                                                    }

                                                    Debug.Log(string.Format("Finalizing :Landmark = {0}\nDescriptors = {1}\nCount ALL = {2}", queryVector, optimisationDescriptors.row(match.queryIdx).ToStringMat(), FinalizingLandmarks.Count));
                                                }
                                                else
                                                {
                                                    FromVectorL.Add(trainVectors[0]);
                                                    ToVectorL.Add(queryVector);
                                                    newLandmarks[match.trainIdx].AddRange(queryVectors.ToArray());
                                                    newDescriptors[match.trainIdx].AddRange(ProvisioningLandmarkDescriptors[match.queryIdx].ToArray());
                                                }
                                            }
                                        }
                                    }
                                    i++;
                                }
                            }
                    }

                    if (FromVectorL.Count == ToVectorL.Count && ToVectorL.Count > 0)
                    {
                        Quaternion newAttitude;
                        float      error = ARCameraManager.Instance.LMedS(FromVectorL, ToVectorL, out newAttitude);
                        if ((error > 0 && LMedSFilter > error) && (!finLMedS))
                        {
                            Attitude = newAttitude;
                            _trackingEvent.Invoke(Attitude);
                            //ARCameraManager.Instance.UpdateCameraPosture(Attitude);
                            Debug.Log(string.Format("Attitude = {0}\nError = {1}\nFinalizMatch = {2}\nAccuracy = {3}", Attitude, error, FinalizingL.Count, 100 * FinalizingL.Count / FromVectorL.Count));
                        }
                        for (int i = 0; i < newLandmarks.Count; i++)
                        {
                            if (FinalizingL.IndexOf(i) < 0)
                            {
                                newLandmarks[i][0] = Attitude * newLandmarks[i][0];
                            }
                        }
                    }
                    _matchingEvent.Invoke(FromVectorL.Count);
                    FromVectorL.Clear();
                    ToVectorL.Clear();
                    ProvisioningLandmarks.Clear();
                    ProvisioningLandmarks           = newLandmarks;
                    ProvisioningLandmarkDescriptors = newDescriptors;
                }
        }