コード例 #1
0
        // Draw matches between two images
        public static Mat getMatchesImage(Mat query, Mat pattern, MatOfKeyPoint queryKp, MatOfKeyPoint trainKp, MatOfDMatch matches, int maxMatchesDrawn)
        {
            Mat outImg = new Mat();

            List <DMatch> matchesList = matches.toList();

            if (matchesList.Count > maxMatchesDrawn)
            {
                matchesList.RemoveRange(maxMatchesDrawn, matchesList.Count - maxMatchesDrawn);
            }

            MatOfDMatch tmpMatches = new MatOfDMatch();

            tmpMatches.fromList(matchesList);

            Features2d.drawMatches
            (
                query,
                queryKp,
                pattern,
                trainKp,
                tmpMatches,
                outImg,
                new Scalar(0, 200, 0, 255),
                Scalar.all(-1),
                new MatOfByte()
            );

            return(outImg);
        }
コード例 #2
0
        /// <summary>
        /// Gets the matches.
        /// </summary>
        /// <param name="queryDescriptors">Query descriptors.</param>
        /// <param name="matches">Matches.</param>
        void getMatches(Mat queryDescriptors, MatOfDMatch matches)
        {
            List <DMatch> matchesList = new List <DMatch>();

            //matches.clear();

            if (enableRatioTest)
            {
                // To avoid NaN's when best match has zero distance we will use inversed ratio.
                float minRatio = 1.0f / 1.5f;

                // KNN match will return 2 nearest matches for each query descriptor
                m_matcher.knnMatch(queryDescriptors, m_knnMatches, 2);

                for (int i = 0; i < m_knnMatches.Count; i++)
                {
                    List <DMatch> m_knnMatchesList = m_knnMatches[i].toList();

                    DMatch bestMatch   = m_knnMatchesList[0];
                    DMatch betterMatch = m_knnMatchesList[1];

                    float distanceRatio = bestMatch.distance / betterMatch.distance;

                    // Pass only matches where distance ratio between
                    // nearest matches is greater than 1.5 (distinct criteria)
                    if (distanceRatio < minRatio)
                    {
                        matchesList.Add(bestMatch);
                    }
                }

                matches.fromList(matchesList);
            }
            else
            {
                matches.fromList(matchesList);

                // Perform regular match
                m_matcher.match(queryDescriptors, matches);
            }

            //Debug.Log ("getMatches " + matches.ToString ());
        }
コード例 #3
0
        /// <summary>
        /// Refines the matches with homography.
        /// </summary>
        /// <returns><c>true</c>, if matches with homography was refined, <c>false</c> otherwise.</returns>
        /// <param name="queryKeypoints">Query keypoints.</param>
        /// <param name="trainKeypoints">Train keypoints.</param>
        /// <param name="reprojectionThreshold">Reprojection threshold.</param>
        /// <param name="matches">Matches.</param>
        /// <param name="homography">Homography.</param>
        static bool refineMatchesWithHomography
        (
            MatOfKeyPoint queryKeypoints,
            MatOfKeyPoint trainKeypoints,
            float reprojectionThreshold,
            MatOfDMatch matches,
            Mat homography
        )
        {
            //Debug.Log ("matches " + matches.ToString ());

            int minNumberMatchesAllowed = 8;

            List <KeyPoint> queryKeypointsList = queryKeypoints.toList();
            List <KeyPoint> trainKeypointsList = trainKeypoints.toList();
            List <DMatch>   matchesList        = matches.toList();

            if (matchesList.Count < minNumberMatchesAllowed)
            {
                return(false);
            }

            // Prepare data for cv::findHomography
            List <Point> srcPointsList = new List <Point>(matchesList.Count);
            List <Point> dstPointsList = new List <Point>(matchesList.Count);

            for (int i = 0; i < matchesList.Count; i++)
            {
                srcPointsList.Add(trainKeypointsList[matchesList[i].trainIdx].pt);
                dstPointsList.Add(queryKeypointsList[matchesList[i].queryIdx].pt);
            }

            // Find homography matrix and get inliers mask
            using (MatOfPoint2f srcPoints = new MatOfPoint2f())
                using (MatOfPoint2f dstPoints = new MatOfPoint2f())
                    using (MatOfByte inliersMask = new MatOfByte(new byte[srcPointsList.Count]))
                    {
                        srcPoints.fromList(srcPointsList);
                        dstPoints.fromList(dstPointsList);

                        //Debug.Log ("srcPoints " + srcPoints.ToString ());
                        //Debug.Log ("dstPoints " + dstPoints.ToString ());

                        Calib3d.findHomography(srcPoints,
                                               dstPoints,
                                               Calib3d.FM_RANSAC,
                                               reprojectionThreshold,
                                               inliersMask, 2000, 0.955).copyTo(homography);

                        if (homography.rows() != 3 || homography.cols() != 3)
                        {
                            return(false);
                        }

                        //Debug.Log ("homography " + homography.ToString ());
                        //Debug.Log ("inliersMask " + inliersMask.dump ());

                        List <byte> inliersMaskList = inliersMask.toList();

                        List <DMatch> inliers = new List <DMatch>();
                        for (int i = 0; i < inliersMaskList.Count; i++)
                        {
                            if (inliersMaskList[i] == 1)
                            {
                                inliers.Add(matchesList[i]);
                            }
                        }

                        matches.fromList(inliers);
                        //Debug.Log ("matches " + matches.ToString ());
                    }

            return(matchesList.Count > minNumberMatchesAllowed);
        }
コード例 #4
0
ファイル: Match.cs プロジェクト: iml885203/csie-seminar
    public bool descriptorsORB_Old(Mat RGB, Mat cameraFeed, string targetName)//找出特徵的顏色方法三(可運行但效率不佳放棄)
    {
        if (RGB == null)
        {
            Debug.Log("RGB Mat is Null");
            return(false);
        }
        //將傳入的RGB存入Src
        Mat SrcMat = new Mat();

        RGB.copyTo(SrcMat);
        //比對樣本
        Texture2D imgTexture = Resources.Load(targetName) as Texture2D;
        //  Texture2D imgTexture2 = Resources.Load("lenaK") as Texture2D;

        //Texture2D轉Mat
        Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

        Utils.texture2DToMat(imgTexture, img1Mat);

        //創建 ORB的特徵點裝置
        FeatureDetector     detector  = FeatureDetector.create(FeatureDetector.ORB);
        DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB);
        //產生存放特徵點Mat
        MatOfKeyPoint keypoints1     = new MatOfKeyPoint();
        Mat           descriptors1   = new Mat();
        MatOfKeyPoint keypointsSrc   = new MatOfKeyPoint();
        Mat           descriptorsSrc = new Mat();

        //找特徵點圖1
        detector.detect(img1Mat, keypoints1);
        extractor.compute(img1Mat, keypoints1, descriptors1);
        //找特徵點圖Src
        detector.detect(SrcMat, keypointsSrc);
        extractor.compute(SrcMat, keypointsSrc, descriptorsSrc);

        DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
        MatOfDMatch       matches = new MatOfDMatch();

        matcher.match(descriptors1, descriptorsSrc, matches);
        DMatch[] arrayDmatch = matches.toArray();

        for (int i = arrayDmatch.Length - 1; i >= 0; i--)
        {
            //   Debug.Log("match " + i + ": " + arrayDmatch[i].distance);
        }
        //做篩選
        double max_dist = 0;
        double min_dist = 100;
        //-- Quick calculation of max and min distances between keypoints
        double dist = new double();

        for (int i = 0; i < matches.rows(); i++)
        {
            dist = arrayDmatch[i].distance;
            if (dist < min_dist)
            {
                min_dist = dist;
            }
            if (dist > max_dist)
            {
                max_dist = dist;
            }
        }
        Debug.Log("Max dist :" + max_dist);
        Debug.Log("Min dist :" + min_dist);
        //只畫好的點

        List <DMatch> matchesGoodList = new List <DMatch>();

        for (int i = 0; i < matches.rows(); i++)
        {
            //if (arrayDmatch[i].distance < RateDist.value * min_dist)
            //{
            //    //Debug.Log("match " + i + ": " + arrayDmatch[i].distance);
            //    matchesGoodList.Add(arrayDmatch[i]);
            //}
        }
        MatOfDMatch matchesGood = new MatOfDMatch();

        matchesGood.fromList(matchesGoodList);

        //Draw Keypoints
        Features2d.drawKeypoints(SrcMat, keypointsSrc, SrcMat);

        //做輸出的轉換予宣告

        Mat resultImg = new Mat();
        // Features2d.drawMatches(img1Mat, keypoints1, SrcMat, keypointsSrc, matchesGood, resultImg);

        List <Point> P1 = new List <Point>();
        // List<Point> P2 = new List<Point>();
        List <Point> pSrc = new List <Point>();

        Debug.Log("MatchCount" + matchesGoodList.Count);
        for (int i = 0; i < matchesGoodList.Count; i++)
        {
            P1.Add(new Point(keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.x, keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.y));
            pSrc.Add(new Point(keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.x, keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.y));
            //Debug.Log("ID = " + matchesGoodList[i].queryIdx );
            //Debug.Log("x,y =" + (int)keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.x + "," + (int)keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.y);
            //Debug.Log("x,y =" + (int)keypoints2.toArray()[matchesGoodList[i].trainIdx].pt.x + "," + (int)keypoints2.toArray()[matchesGoodList[i].trainIdx].pt.y);
        }

        MatOfPoint2f p2fTarget = new MatOfPoint2f(P1.ToArray());
        MatOfPoint2f p2fSrc    = new MatOfPoint2f(pSrc.ToArray());

        Mat          matrixH         = Calib3d.findHomography(p2fTarget, p2fSrc, Calib3d.RANSAC, 3);
        List <Point> srcPointCorners = new List <Point>();

        srcPointCorners.Add(new Point(0, 0));
        srcPointCorners.Add(new Point(img1Mat.width(), 0));
        srcPointCorners.Add(new Point(img1Mat.width(), img1Mat.height()));
        srcPointCorners.Add(new Point(0, img1Mat.height()));

        Mat          originalRect       = Converters.vector_Point2f_to_Mat(srcPointCorners);
        List <Point> srcPointCornersEnd = new List <Point>();

        srcPointCornersEnd.Add(new Point(0, img1Mat.height()));
        srcPointCornersEnd.Add(new Point(0, 0));
        srcPointCornersEnd.Add(new Point(img1Mat.width(), 0));
        srcPointCornersEnd.Add(new Point(img1Mat.width(), img1Mat.height()));

        Mat changeRect = Converters.vector_Point2f_to_Mat(srcPointCornersEnd);

        Core.perspectiveTransform(originalRect, changeRect, matrixH);
        List <Point> srcPointCornersSave = new List <Point>();

        Converters.Mat_to_vector_Point(changeRect, srcPointCornersSave);

        if ((srcPointCornersSave[2].x - srcPointCornersSave[0].x) < 5 || (srcPointCornersSave[2].y - srcPointCornersSave[0].y) < 5)
        {
            Debug.Log("Match Out Put image is to small");
            SrcMat.copyTo(cameraFeed);
            SrcMat.release();
            Imgproc.putText(cameraFeed, "X-S", new Point(10, 50), 0, 1, new Scalar(255, 255, 255), 2);
            return(false);
        }
        //    Features2d.drawMatches(img1Mat, keypoints1, SrcMat, keypointsSrc, matchesGood, resultImg);
        Imgproc.line(SrcMat, srcPointCornersSave[0], srcPointCornersSave[1], new Scalar(255, 0, 0), 3);
        Imgproc.line(SrcMat, srcPointCornersSave[1], srcPointCornersSave[2], new Scalar(255, 0, 0), 3);
        Imgproc.line(SrcMat, srcPointCornersSave[2], srcPointCornersSave[3], new Scalar(255, 0, 0), 3);
        Imgproc.line(SrcMat, srcPointCornersSave[3], srcPointCornersSave[0], new Scalar(255, 0, 0), 3);

        SrcMat.copyTo(cameraFeed);
        keypoints1.release();
        img1Mat.release();
        SrcMat.release();
        return(true);
    }
コード例 #5
0
ファイル: Match.cs プロジェクト: iml885203/csie-seminar
//============================================================
//=================以下為沒有再使用的函式=====================
//============================================================

    //找出特徵的顏色方法三(ORB特徵點比對)
    public bool descriptorsORB(Mat RGB, Mat cameraFeed, string targetName)
    {
        if (RGB == null)
        {
            Debug.Log("RGB Mat is Null");
            return(false);
        }
        //將傳入的RGB存入Src
        Mat SrcMat = new Mat();

        RGB.copyTo(SrcMat);
        //比對樣本載入
        Texture2D imgTexture = Resources.Load(targetName) as Texture2D;

        //Texture2D轉Mat
        Mat targetMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

        Utils.texture2DToMat(imgTexture, targetMat);

        //創建 ORB的特徵點裝置
        FeatureDetector     detector  = FeatureDetector.create(FeatureDetector.ORB);
        DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

        //產生存放特徵點Mat
        MatOfKeyPoint keypointsTarget   = new MatOfKeyPoint();
        Mat           descriptorsTarget = new Mat();
        MatOfKeyPoint keypointsSrc      = new MatOfKeyPoint();
        Mat           descriptorsSrc    = new Mat();

        //找特徵點圖Target
        detector.detect(targetMat, keypointsTarget);
        extractor.compute(targetMat, keypointsTarget, descriptorsTarget);

        //找特徵點圖Src
        detector.detect(SrcMat, keypointsSrc);
        extractor.compute(SrcMat, keypointsSrc, descriptorsSrc);

        //創建特徵點比對物件
        DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
        MatOfDMatch       matches = new MatOfDMatch();

        //丟入兩影像的特徵點
        matcher.match(descriptorsTarget, descriptorsSrc, matches);
        DMatch[] arrayDmatch = matches.toArray();

        //做篩選
        double max_dist = 0;
        double min_dist = 100;
        //-- Quick calculation of max and min distances between keypoints
        double dist = new double();

        for (int i = 0; i < matches.rows(); i++)
        {
            dist = arrayDmatch[i].distance;
            if (dist < min_dist)
            {
                min_dist = dist;
            }
            if (dist > max_dist)
            {
                max_dist = dist;
            }
        }
        Debug.Log("Max dist :" + max_dist);
        Debug.Log("Min dist :" + min_dist);

        List <DMatch> matchesGoodList = new List <DMatch>();

        MatOfDMatch matchesGood = new MatOfDMatch();

        matchesGood.fromList(matchesGoodList);

        //Draw Keypoints
        Features2d.drawKeypoints(SrcMat, keypointsSrc, SrcMat);

        List <Point> pTarget = new List <Point>();
        List <Point> pSrc    = new List <Point>();

        Debug.Log("MatchCount" + matchesGoodList.Count);
        for (int i = 0; i < matchesGoodList.Count; i++)
        {
            pTarget.Add(new Point(keypointsTarget.toArray()[matchesGoodList[i].queryIdx].pt.x, keypointsTarget.toArray()[matchesGoodList[i].queryIdx].pt.y));
            pSrc.Add(new Point(keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.x, keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.y));
        }

        MatOfPoint2f p2fTarget = new MatOfPoint2f(pTarget.ToArray());
        MatOfPoint2f p2fSrc    = new MatOfPoint2f(pSrc.ToArray());

        Mat matrixH = Calib3d.findHomography(p2fTarget, p2fSrc, Calib3d.RANSAC, 3);

        List <Point> srcPointCorners = new List <Point>();

        srcPointCorners.Add(new Point(0, 0));
        srcPointCorners.Add(new Point(targetMat.width(), 0));
        srcPointCorners.Add(new Point(targetMat.width(), targetMat.height()));
        srcPointCorners.Add(new Point(0, targetMat.height()));
        Mat originalRect = Converters.vector_Point2f_to_Mat(srcPointCorners);

        List <Point> srcPointCornersEnd = new List <Point>();

        srcPointCornersEnd.Add(new Point(0, targetMat.height()));
        srcPointCornersEnd.Add(new Point(0, 0));
        srcPointCornersEnd.Add(new Point(targetMat.width(), 0));
        srcPointCornersEnd.Add(new Point(targetMat.width(), targetMat.height()));
        Mat changeRect = Converters.vector_Point2f_to_Mat(srcPointCornersEnd);

        Core.perspectiveTransform(originalRect, changeRect, matrixH);
        List <Point> srcPointCornersSave = new List <Point>();

        Converters.Mat_to_vector_Point(changeRect, srcPointCornersSave);

        if ((srcPointCornersSave[2].x - srcPointCornersSave[0].x) < 5 || (srcPointCornersSave[2].y - srcPointCornersSave[0].y) < 5)
        {
            Debug.Log("Match Out Put image is to small");
            SrcMat.copyTo(cameraFeed);
            SrcMat.release();
            Imgproc.putText(cameraFeed, targetName, srcPointCornersSave[0], 0, 1, new Scalar(255, 255, 255), 2);
            return(false);
        }
        //畫出框框
        Imgproc.line(SrcMat, srcPointCornersSave[0], srcPointCornersSave[1], new Scalar(255, 0, 0), 3);
        Imgproc.line(SrcMat, srcPointCornersSave[1], srcPointCornersSave[2], new Scalar(255, 0, 0), 3);
        Imgproc.line(SrcMat, srcPointCornersSave[2], srcPointCornersSave[3], new Scalar(255, 0, 0), 3);
        Imgproc.line(SrcMat, srcPointCornersSave[3], srcPointCornersSave[0], new Scalar(255, 0, 0), 3);
        //畫中心
        Point middlePoint = new Point((srcPointCornersSave[0].x + srcPointCornersSave[2].x) / 2, (srcPointCornersSave[0].y + srcPointCornersSave[2].y) / 2);

        Imgproc.line(SrcMat, middlePoint, middlePoint, new Scalar(0, 0, 255), 10);


        SrcMat.copyTo(cameraFeed);
        keypointsTarget.release();
        targetMat.release();
        SrcMat.release();
        return(true);
    }
コード例 #6
0
ファイル: orb.cs プロジェクト: Hengle/OpenCVForUnity
    void Orb()
    {
        p1Mat = Imgcodecs.imread(Application.dataPath + "/Textures/1.jpg", 1);
        p2Mat = Imgcodecs.imread(Application.dataPath + "/Textures/3.jpg", 1);
        Imgproc.cvtColor(p1Mat, p1Mat, Imgproc.COLOR_BGR2RGB);
        Imgproc.cvtColor(p2Mat, p2Mat, Imgproc.COLOR_BGR2RGB);
        Imgproc.resize(p2Mat, p2Mat, new Size(p1Mat.width(), p1Mat.height()));
        Debug.Log(p2Mat);

        /*
         * //仿射变换(矩阵旋转)
         * float angle = UnityEngine.Random.Range(0, 360), scale = 1.0f;
         * Point center = new Point(img2Mat.cols() * 0.5f, img2Mat.rows() * 0.5f);
         *
         * Mat affine_matrix = Imgproc.getRotationMatrix2D(center, angle, scale);
         * Imgproc.warpAffine(img1Mat, img2Mat, affine_matrix, img2Mat.size());
         *
         * Texture2D texture = new Texture2D(img2Mat.cols(), img2Mat.rows());
         * Utils.matToTexture2D(img2Mat, texture);
         * outputRawImage.texture = texture;
         */

        ORB detector  = ORB.create();
        ORB extractor = ORB.create();

        //提取图一特征点
        MatOfKeyPoint keypoints1   = new MatOfKeyPoint();
        Mat           descriptors1 = new Mat();

        detector.detect(p1Mat, keypoints1);
        extractor.compute(p1Mat, keypoints1, descriptors1);

        //提取图二特征点
        MatOfKeyPoint keypoints2   = new MatOfKeyPoint();
        Mat           descriptors2 = new Mat();

        detector.detect(p2Mat, keypoints2);
        extractor.compute(p2Mat, keypoints2, descriptors2);

        //第一次匹配结果(密密麻麻)
        DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
        MatOfDMatch       matches = new MatOfDMatch();

        matcher.match(descriptors1, descriptors2, matches);

        //筛选(非官方)
        //计算向量距离的最大值/最小值
        double max_dist = 0;
        double min_dist = 15; //通过距离控制需要的特征。
        //(设到10,最终只有2个耳朵匹配。。。)
        //(设到15,尾巴也开始匹配。。。。。。)

        //新建两个容器存放筛选样本
        List <DMatch> matchesArray = matches.toList(); //用Unity版API多转一步
        //Debug.Log(matchesArray.Count); //500
        List <DMatch> goodmatchesArray = new List <DMatch>();

        //Debug.Log(img1Mat.rows()); //512
        for (int i = 0; i < matchesArray.Count; i++)
        {
            Debug.Log("[" + i + "]" + matchesArray[i].distance);
            if (matchesArray[i].distance > max_dist)
            {
                //max_dist = matchesArray[i].distance;
            }
            if (matchesArray[i].distance < min_dist)
            {
                min_dist = matchesArray[i].distance;
            }
        }
        //Debug.Log("The max distance is: " + max_dist);
        Debug.Log("The min distance is: " + min_dist);

        for (int i = 0; i < matchesArray.Count; i++)
        {
            if (matchesArray[i].distance < 2 * min_dist) //
            {
                goodmatchesArray.Add(matchesArray[i]);
            }
        }
        MatOfDMatch newMatches = new MatOfDMatch();

        newMatches.fromList(goodmatchesArray);
        Debug.Log(newMatches.toList().Count); //第二次筛选后符合的

        //绘制第二次筛选结果
        dstMat = new Mat();
        Features2d.drawMatches(p1Mat, keypoints1, p2Mat, keypoints2, newMatches, dstMat);

        Texture2D t2d = new Texture2D(dstMat.width(), dstMat.height());

        Utils.matToTexture2D(dstMat, t2d);
        Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_dstImage.sprite         = sp;
        m_dstImage.preserveAspect = true;
    }
コード例 #7
0
    private void Update()
    {
        inputMat = webCamTextureToMatHelper.GetMat();

        MatOfKeyPoint camKeyPoints   = new MatOfKeyPoint();
        Mat           camDescriptors = new Mat();

        Imgproc.cvtColor(inputMat, grayMat, Imgproc.COLOR_BGR2GRAY);

        detector.detect(grayMat, camKeyPoints);
        extractor.compute(grayMat, camKeyPoints, camDescriptors);

        if (camKeyPoints.toList().Count < 1)
        {
            return;
        }

        List <MatOfDMatch> matches = new List <MatOfDMatch>();

        matcher.knnMatch(makerDescriptors, camDescriptors, matches, 2);

        //-- Filter matches using the Lowe's ratio test
        float         ratioThresh       = 0.75f;
        List <DMatch> listOfGoodMatches = new List <DMatch>();

        for (int i = 0; i < matches.Count; i++)
        {
            if (matches[i].rows() > 1)
            {
                DMatch[] dMatches = matches[i].toArray();
                if (dMatches[0].distance < ratioThresh * dMatches[1].distance)
                {
                    listOfGoodMatches.Add(dMatches[0]);
                }
            }
        }
        MatOfDMatch goodMatches = new MatOfDMatch();

        goodMatches.fromList(listOfGoodMatches);

        //-- Draw matches
        Mat resultImg = new Mat();

        Features2d.drawMatches(makerMat, makerKeyPoints, grayMat, camKeyPoints, goodMatches, resultImg);

        //listOfGoodMatches = goodMatches.toList();

        ////-- Localize the object
        //List<Point> obj = new List<Point>();
        //List<Point> scene = new List<Point>();
        //List<KeyPoint> listOfKeypointsObject = makerKeyPoints.toList();
        //List<KeyPoint> listOfKeypointsScene = camKeyPoints.toList();
        //for (int i = 0; i < listOfGoodMatches.Count(); i++)
        //{
        //    //-- Get the keypoints from the good matches
        //    obj.Add(listOfKeypointsObject[listOfGoodMatches[i].queryIdx].pt);
        //    scene.Add(listOfKeypointsScene[listOfGoodMatches[i].trainIdx].pt);
        //}
        //MatOfPoint2f objMat = new MatOfPoint2f();
        //MatOfPoint2f sceneMat = new MatOfPoint2f();
        //objMat.fromList(obj);
        //sceneMat.fromList(scene);
        //double ransacReprojThreshold = 3.0;
        //Mat H = Calib3d.findHomography(objMat, sceneMat, Calib3d.RANSAC, ransacReprojThreshold);

        ////-- Get the corners from the image_1 ( the object to be "detected" )
        //Mat objCorners = new Mat(4, 1, CvType.CV_32FC2);
        //Mat sceneCorners = new Mat();
        //float[] objCornersData = new float[(int)(objCorners.total() * objCorners.channels())];
        //objCorners.get(0, 0, objCornersData);
        //objCornersData[0] = 0;
        //objCornersData[1] = 0;
        //objCornersData[2] = makerMat.cols();
        //objCornersData[3] = 0;
        //objCornersData[4] = makerMat.cols();
        //objCornersData[5] = makerMat.rows();
        //objCornersData[6] = 0;
        //objCornersData[7] = makerMat.rows();
        //objCorners.put(0, 0, objCornersData);

        //Core.perspectiveTransform(objCorners, sceneCorners, H);
        //byte[] sceneCornersData = new byte[(int)(sceneCorners.total() * sceneCorners.channels())];
        //sceneCorners.get(0, 0, sceneCornersData);

        ////-- Draw lines between the corners (the mapped object in the scene - image_2 )
        //Imgproc.line(resultImg, new Point(sceneCornersData[0] + makerMat.cols(), sceneCornersData[1]),
        //        new Point(sceneCornersData[2] + makerMat.cols(), sceneCornersData[3]), new Scalar(0, 255, 0), 4);
        //Imgproc.line(resultImg, new Point(sceneCornersData[2] + makerMat.cols(), sceneCornersData[3]),
        //        new Point(sceneCornersData[4] + makerMat.cols(), sceneCornersData[5]), new Scalar(0, 255, 0), 4);
        //Imgproc.line(resultImg, new Point(sceneCornersData[4] + makerMat.cols(), sceneCornersData[5]),
        //        new Point(sceneCornersData[6] + makerMat.cols(), sceneCornersData[7]), new Scalar(0, 255, 0), 4);
        //Imgproc.line(resultImg, new Point(sceneCornersData[6] + makerMat.cols(), sceneCornersData[7]),
        //        new Point(sceneCornersData[0] + makerMat.cols(), sceneCornersData[1]), new Scalar(0, 255, 0), 4);

        if (!first)
        {
            texture = new Texture2D(resultImg.cols(), resultImg.rows(), TextureFormat.RGBA32, false);
            dstQuad.GetComponent <Renderer>().material.mainTexture = texture;
            first = true;
        }

        Utils.matToTexture2D(resultImg, texture);
    }