Exemplo n.º 1
0
        // Draw matches between two images
        public static Mat getMatchesImage(Mat query, Mat pattern, MatOfKeyPoint queryKp, MatOfKeyPoint trainKp, MatOfDMatch matches, int maxMatchesDrawn)
        {
            Mat outImg = new Mat();

            List <DMatch> matchesList = matches.toList();

            if (matchesList.Count > maxMatchesDrawn)
            {
                matchesList.RemoveRange(maxMatchesDrawn, matchesList.Count - maxMatchesDrawn);
            }

            MatOfDMatch tmpMatches = new MatOfDMatch();

            tmpMatches.fromList(matchesList);

            Features2d.drawMatches
            (
                query,
                queryKp,
                pattern,
                trainKp,
                tmpMatches,
                outImg,
                new Scalar(0, 200, 0, 255),
                Scalar.all(-1),
                new MatOfByte()
            );

            return(outImg);
        }
Exemplo n.º 2
0
        // Use this for initialization
        void Start()
        {
            Texture2D imgTexture = Resources.Load("lena") as Texture2D;

            Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Utils.texture2DToMat(imgTexture, img1Mat);
            Debug.Log("img1Mat dst ToString " + img1Mat.ToString());

            Mat img2Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3);

            Utils.texture2DToMat(imgTexture, img2Mat);
            Debug.Log("img2Mat dst ToString " + img2Mat.ToString());



            float angle = UnityEngine.Random.Range(0, 360), scale = 1.0f;

            Point center = new Point(img2Mat.cols() * 0.5f, img2Mat.rows() * 0.5f);

            Mat affine_matrix = Imgproc.getRotationMatrix2D(center, angle, scale);

            Imgproc.warpAffine(img1Mat, img2Mat, affine_matrix, img2Mat.size());


            FeatureDetector     detector  = FeatureDetector.create(FeatureDetector.ORB);
            DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

            MatOfKeyPoint keypoints1   = new MatOfKeyPoint();
            Mat           descriptors1 = new Mat();

            detector.detect(img1Mat, keypoints1);
            extractor.compute(img1Mat, keypoints1, descriptors1);

            MatOfKeyPoint keypoints2   = new MatOfKeyPoint();
            Mat           descriptors2 = new Mat();

            detector.detect(img2Mat, keypoints2);
            extractor.compute(img2Mat, keypoints2, descriptors2);


            DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
            MatOfDMatch       matches = new MatOfDMatch();

            matcher.match(descriptors1, descriptors2, matches);


            Mat resultImg = new Mat();

            Features2d.drawMatches(img1Mat, keypoints1, img2Mat, keypoints2, matches, resultImg);



            Texture2D texture = new Texture2D(resultImg.cols(), resultImg.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(resultImg, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
Exemplo n.º 3
0
    void Orb()
    {
        p1Mat = Imgcodecs.imread(Application.dataPath + "/Textures/1.jpg", 1);
        p2Mat = Imgcodecs.imread(Application.dataPath + "/Textures/3.jpg", 1);
        Imgproc.cvtColor(p1Mat, p1Mat, Imgproc.COLOR_BGR2RGB);
        Imgproc.cvtColor(p2Mat, p2Mat, Imgproc.COLOR_BGR2RGB);
        Imgproc.resize(p2Mat, p2Mat, new Size(p1Mat.width(), p1Mat.height()));
        Debug.Log(p2Mat);

        /*
         * //仿射变换(矩阵旋转)
         * float angle = UnityEngine.Random.Range(0, 360), scale = 1.0f;
         * Point center = new Point(img2Mat.cols() * 0.5f, img2Mat.rows() * 0.5f);
         *
         * Mat affine_matrix = Imgproc.getRotationMatrix2D(center, angle, scale);
         * Imgproc.warpAffine(img1Mat, img2Mat, affine_matrix, img2Mat.size());
         *
         * Texture2D texture = new Texture2D(img2Mat.cols(), img2Mat.rows());
         * Utils.matToTexture2D(img2Mat, texture);
         * outputRawImage.texture = texture;
         */

        ORB detector  = ORB.create();
        ORB extractor = ORB.create();

        //提取图一特征点
        MatOfKeyPoint keypoints1   = new MatOfKeyPoint();
        Mat           descriptors1 = new Mat();

        detector.detect(p1Mat, keypoints1);
        extractor.compute(p1Mat, keypoints1, descriptors1);

        //提取图二特征点
        MatOfKeyPoint keypoints2   = new MatOfKeyPoint();
        Mat           descriptors2 = new Mat();

        detector.detect(p2Mat, keypoints2);
        extractor.compute(p2Mat, keypoints2, descriptors2);

        //第一次匹配结果(密密麻麻)
        DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
        MatOfDMatch       matches = new MatOfDMatch();

        matcher.match(descriptors1, descriptors2, matches);

        //筛选(非官方)
        //计算向量距离的最大值/最小值
        double max_dist = 0;
        double min_dist = 15; //通过距离控制需要的特征。
        //(设到10,最终只有2个耳朵匹配。。。)
        //(设到15,尾巴也开始匹配。。。。。。)

        //新建两个容器存放筛选样本
        List <DMatch> matchesArray = matches.toList(); //用Unity版API多转一步
        //Debug.Log(matchesArray.Count); //500
        List <DMatch> goodmatchesArray = new List <DMatch>();

        //Debug.Log(img1Mat.rows()); //512
        for (int i = 0; i < matchesArray.Count; i++)
        {
            Debug.Log("[" + i + "]" + matchesArray[i].distance);
            if (matchesArray[i].distance > max_dist)
            {
                //max_dist = matchesArray[i].distance;
            }
            if (matchesArray[i].distance < min_dist)
            {
                min_dist = matchesArray[i].distance;
            }
        }
        //Debug.Log("The max distance is: " + max_dist);
        Debug.Log("The min distance is: " + min_dist);

        for (int i = 0; i < matchesArray.Count; i++)
        {
            if (matchesArray[i].distance < 2 * min_dist) //
            {
                goodmatchesArray.Add(matchesArray[i]);
            }
        }
        MatOfDMatch newMatches = new MatOfDMatch();

        newMatches.fromList(goodmatchesArray);
        Debug.Log(newMatches.toList().Count); //第二次筛选后符合的

        //绘制第二次筛选结果
        dstMat = new Mat();
        Features2d.drawMatches(p1Mat, keypoints1, p2Mat, keypoints2, newMatches, dstMat);

        Texture2D t2d = new Texture2D(dstMat.width(), dstMat.height());

        Utils.matToTexture2D(dstMat, t2d);
        Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_dstImage.sprite         = sp;
        m_dstImage.preserveAspect = true;
    }
Exemplo n.º 4
0
    private void Update()
    {
        inputMat = webCamTextureToMatHelper.GetMat();

        MatOfKeyPoint camKeyPoints   = new MatOfKeyPoint();
        Mat           camDescriptors = new Mat();

        Imgproc.cvtColor(inputMat, grayMat, Imgproc.COLOR_BGR2GRAY);

        detector.detect(grayMat, camKeyPoints);
        extractor.compute(grayMat, camKeyPoints, camDescriptors);

        if (camKeyPoints.toList().Count < 1)
        {
            return;
        }

        List <MatOfDMatch> matches = new List <MatOfDMatch>();

        matcher.knnMatch(makerDescriptors, camDescriptors, matches, 2);

        //-- Filter matches using the Lowe's ratio test
        float         ratioThresh       = 0.75f;
        List <DMatch> listOfGoodMatches = new List <DMatch>();

        for (int i = 0; i < matches.Count; i++)
        {
            if (matches[i].rows() > 1)
            {
                DMatch[] dMatches = matches[i].toArray();
                if (dMatches[0].distance < ratioThresh * dMatches[1].distance)
                {
                    listOfGoodMatches.Add(dMatches[0]);
                }
            }
        }
        MatOfDMatch goodMatches = new MatOfDMatch();

        goodMatches.fromList(listOfGoodMatches);

        //-- Draw matches
        Mat resultImg = new Mat();

        Features2d.drawMatches(makerMat, makerKeyPoints, grayMat, camKeyPoints, goodMatches, resultImg);

        //listOfGoodMatches = goodMatches.toList();

        ////-- Localize the object
        //List<Point> obj = new List<Point>();
        //List<Point> scene = new List<Point>();
        //List<KeyPoint> listOfKeypointsObject = makerKeyPoints.toList();
        //List<KeyPoint> listOfKeypointsScene = camKeyPoints.toList();
        //for (int i = 0; i < listOfGoodMatches.Count(); i++)
        //{
        //    //-- Get the keypoints from the good matches
        //    obj.Add(listOfKeypointsObject[listOfGoodMatches[i].queryIdx].pt);
        //    scene.Add(listOfKeypointsScene[listOfGoodMatches[i].trainIdx].pt);
        //}
        //MatOfPoint2f objMat = new MatOfPoint2f();
        //MatOfPoint2f sceneMat = new MatOfPoint2f();
        //objMat.fromList(obj);
        //sceneMat.fromList(scene);
        //double ransacReprojThreshold = 3.0;
        //Mat H = Calib3d.findHomography(objMat, sceneMat, Calib3d.RANSAC, ransacReprojThreshold);

        ////-- Get the corners from the image_1 ( the object to be "detected" )
        //Mat objCorners = new Mat(4, 1, CvType.CV_32FC2);
        //Mat sceneCorners = new Mat();
        //float[] objCornersData = new float[(int)(objCorners.total() * objCorners.channels())];
        //objCorners.get(0, 0, objCornersData);
        //objCornersData[0] = 0;
        //objCornersData[1] = 0;
        //objCornersData[2] = makerMat.cols();
        //objCornersData[3] = 0;
        //objCornersData[4] = makerMat.cols();
        //objCornersData[5] = makerMat.rows();
        //objCornersData[6] = 0;
        //objCornersData[7] = makerMat.rows();
        //objCorners.put(0, 0, objCornersData);

        //Core.perspectiveTransform(objCorners, sceneCorners, H);
        //byte[] sceneCornersData = new byte[(int)(sceneCorners.total() * sceneCorners.channels())];
        //sceneCorners.get(0, 0, sceneCornersData);

        ////-- Draw lines between the corners (the mapped object in the scene - image_2 )
        //Imgproc.line(resultImg, new Point(sceneCornersData[0] + makerMat.cols(), sceneCornersData[1]),
        //        new Point(sceneCornersData[2] + makerMat.cols(), sceneCornersData[3]), new Scalar(0, 255, 0), 4);
        //Imgproc.line(resultImg, new Point(sceneCornersData[2] + makerMat.cols(), sceneCornersData[3]),
        //        new Point(sceneCornersData[4] + makerMat.cols(), sceneCornersData[5]), new Scalar(0, 255, 0), 4);
        //Imgproc.line(resultImg, new Point(sceneCornersData[4] + makerMat.cols(), sceneCornersData[5]),
        //        new Point(sceneCornersData[6] + makerMat.cols(), sceneCornersData[7]), new Scalar(0, 255, 0), 4);
        //Imgproc.line(resultImg, new Point(sceneCornersData[6] + makerMat.cols(), sceneCornersData[7]),
        //        new Point(sceneCornersData[0] + makerMat.cols(), sceneCornersData[1]), new Scalar(0, 255, 0), 4);

        if (!first)
        {
            texture = new Texture2D(resultImg.cols(), resultImg.rows(), TextureFormat.RGBA32, false);
            dstQuad.GetComponent <Renderer>().material.mainTexture = texture;
            first = true;
        }

        Utils.matToTexture2D(resultImg, texture);
    }