Exemple #1
0
        private static void BForceMatcherSample()
        {
            var src1 = new Mat("data/match1.png");
            var src2 = new Mat("data/match2.png");

            var gray1 = new Mat();
            var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            var fast = new FastFeatureDetector(10);
            var descriptorExtractor = new BriefDescriptorExtractor(32);

            var descriptors1 = new Mat();
            var descriptors2 = new Mat();

            KeyPoint[] keypoints1 = fast.Run(gray1, null);
            descriptorExtractor.Compute(gray1, ref keypoints1, descriptors1);

            KeyPoint[] keypoints2 = fast.Run(gray2, null);
            descriptorExtractor.Compute(gray2, ref keypoints2, descriptors2);

            // Match descriptor vectors
            var bfMatcher = new BFMatcher(NormType.L2, false);

            DMatch[][] bfMatches = bfMatcher.KnnMatch(descriptors1, descriptors2, 3, null, false);
            bfMatches.ToString();

            var view = new Mat();

            Cv2.DrawMatches(src1, keypoints1, src2, keypoints2, bfMatches, view);
            Window.ShowImages(view);
        }
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;
            double hessianThresh       = 100;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    SURF surfCPU = new SURF(hessianThresh);
                    SIFT siftCPU = new SIFT();


                    //extract features from the object image
                    UMat modelDescriptors = new UMat();

                    //surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    siftCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    UMat observedDescriptors = new UMat();

                    //surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    siftCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));

                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                    //Features2DToolbox.VoteForUniqueness(matches, 1, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                   matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                  observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
Exemple #3
0
        public Image <Bgr, byte> pointComp(Image <Bgr, byte> baseImg, Image <Bgr, byte> twistedImg)
        {
            Image <Gray, byte> baseImgGray    = baseImg.Convert <Gray, byte>();
            Image <Gray, byte> twistedImgGray = twistedImg.Convert <Gray, byte>();
            Brisk            descriptor       = new Brisk();
            GFTTDetector     detector         = new GFTTDetector(40, 0.01, 5, 3, true);
            VectorOfKeyPoint GFP1             = new VectorOfKeyPoint();
            UMat             baseDesc         = new UMat();
            UMat             bimg             = twistedImgGray.Mat.GetUMat(AccessType.Read);
            VectorOfKeyPoint GFP2             = new VectorOfKeyPoint();
            UMat             twistedDesc      = new UMat();
            UMat             timg             = baseImgGray.Mat.GetUMat(AccessType.Read);

            detector.DetectRaw(bimg, GFP1);
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            matcher.Add(baseDesc);
            matcher.KnnMatch(twistedDesc, matches, 2, null);
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            //int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(GFP1, GFP1, matches, mask, 1.5, 20);
            Image <Bgr, byte> res = baseImg.CopyBlank();

            Features2DToolbox.DrawMatches(twistedImg, GFP1, baseImg, GFP2, matches, res, new MCvScalar(255, 0, 0), new MCvScalar(255, 0, 0), mask);
            return(res);
        }
        public bool MatchDescriptorsWithRatioTest(BFMatcher descriptorMatcher, ref Mat descriptorsEvalImage, Mat trainDescriptors, ref VectorOfDMatch matchesFilteredOut, float maxDistanceRatio)
        {
            if (trainDescriptors.Rows < 4)
            {
                return(false);
            }

            matchesFilteredOut.Clear();
            descriptorMatcher.Add(trainDescriptors);

            VectorOfVectorOfDMatch matchesKNN = new VectorOfVectorOfDMatch();

            descriptorMatcher.KnnMatch(descriptorsEvalImage, matchesKNN, 2, null);
            for (int matchPos = 0; matchPos < matchesKNN.Size; ++matchPos)
            {
                if (matchesKNN[matchPos].Size >= 2)
                {
                    if (matchesKNN[matchPos][0].Distance <= maxDistanceRatio * matchesKNN[matchPos][1].Distance)
                    {
                        matchesFilteredOut.Push(new MDMatch[] { matchesKNN[matchPos][0] });
                    }
                }
            }

            return(!(matchesFilteredOut.Size == 0));
        }
Exemple #5
0
        /// <summary>
        /// To avoid NaN's when best match has zero distance we will use inversed ratio.
        /// KNN match will return 2 nearest matches for each query descriptor
        /// </summary>
        List <DMatch> GetMatches(BFMatcher matcher, Mat queryDescriptors, Mat trainDescriptors)
        {
            List <DMatch> matchesList = new List <DMatch>();

            if (enableRatioTest)
            {
                float      minRatio = 1.0f / 1.5f;
                DMatch[][] dm       = matcher.KnnMatch(queryDescriptors, trainDescriptors, 2);

                for (int i = 0; i < dm.Length; i++)
                {
                    DMatch bestMatch   = dm[i][0];
                    DMatch betterMatch = dm[i][1];

                    float distanceRatio = bestMatch.Distance / betterMatch.Distance;

                    if (distanceRatio < minRatio)
                    {
                        matchesList.Add(bestMatch);
                    }
                }
            }
            else
            {
                matchesList.AddRange(matcher.Match(queryDescriptors, trainDescriptors));
            }
            return(matchesList);
        }
        public void FindFeaturePointsBetweenTwoImages()
        {
            var filename            = "";
            var filename2           = "";
            var orb                 = new ORBDetector(2000);
            Image <Bgr, byte> left  = new Image <Bgr, byte>(filename);
            Image <Bgr, byte> right = new Image <Bgr, byte>(filename2);
            var vectorLeft          = new VectorOfKeyPoint();
            var vectorRight         = new VectorOfKeyPoint();
            var matLeft             = new Mat();
            var matRight            = new Mat();

            orb.DetectAndCompute(left, null, vectorLeft, matLeft, false);
            orb.DetectAndCompute(right, null, vectorRight, matRight, false);

            var matcher = new BFMatcher(DistanceType.Hamming2, true);
            var matches = new VectorOfVectorOfDMatch();

            matcher.Add(matLeft);
            matcher.KnnMatch(matRight, matches, 1, null);


            CalculateEssentialMAtrix(vectorLeft, vectorRight, camera.CameraMatrix);
            CalculateFundamentalMatrix(vectorLeft, vectorRight);
        }
        public static VectorOfVectorOfDMatch KnnMatch(this BFMatcher matcher, UMat pdesc, int i, IInputArray o = null)
        {
            var puzzelmatches = new VectorOfVectorOfDMatch();

            matcher.KnnMatch(pdesc, puzzelmatches, i, o);
            return(puzzelmatches);
        }
Exemple #8
0
        private void FindMatch(Mat observedImage)
        {
            int k = 2;

            mask           = new Mat();
            homography     = null;
            matches        = new VectorOfVectorOfDMatch();
            uObservedImage = observedImage.GetUMat(AccessType.ReadWrite);

            // extract features from the observed image
            ORBCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
            matcher = new BFMatcher(DistanceType.L2);
            matcher.Add(objDescriptors);
            if (objDescriptors.Size.Height > 3 && observedDescriptors.Size.Height > 3)
            {
                matcher.KnnMatch(observedDescriptors, matches, k, null);
                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                int nonZeroCount = CvInvoke.CountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    //nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(objKeyPoints, observedKeyPoints,
                    //matches, mask, 1, 2);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(objKeyPoints,
                                                                                              observedKeyPoints, matches, mask, 3);
                    }
                }
            }
        }
Exemple #9
0
        public static void FindMatch(string modelFileName, string observedFileName, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();
            {
                using (UMat uModelImage = CvInvoke.Imread(modelFileName, ImreadModes.Color).GetUMat(AccessType.Read))
                    using (UMat uObservedImage = CvInvoke.Imread(observedFileName, ImreadModes.Color).GetUMat(AccessType.Read))
                    {
                        SIFT sift             = new SIFT();
                        UMat modelDescriptors = new UMat();
                        sift.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                        UMat observedDescriptors = new UMat();
                        sift.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                        BFMatcher matcher = new BFMatcher(DistanceType.L2);
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                    }
            }
        }
        public static void GetMatches(VectorOfKeyPoint imageKeypoints, IInputArray imageDescriptors, VectorOfKeyPoint patternKeypoints, IInputArray patternDescriptors, out VectorOfVectorOfDMatch matches, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;

            homography = null;

            matches = new VectorOfVectorOfDMatch();

            var matcher = new BFMatcher(DistanceType.L2);

            matcher.Add(patternDescriptors);
            matcher.KnnMatch(imageDescriptors, matches, k, null);

            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(patternKeypoints, imageKeypoints, matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(patternKeypoints, imageKeypoints, matches, mask, 2);
                }
            }
        }
Exemple #11
0
        private static DMatch[][] GetMatches(Mat query, Mat train)
        {
            BFMatcher matcher = new BFMatcher();
            var       matches = matcher.KnnMatch(query, train, k: 2);

            return(matches);
        }
Exemple #12
0
        private void button3_Click(object sender, EventArgs e)
        {
            GFTTDetector detector = new GFTTDetector(40, 0.01, 5, 3, true);

            var baseImgGray    = baseImg.Convert <Gray, byte>();
            var twistedImgGray = twistedImg.Convert <Gray, byte>();

            //генератор описания ключевых точек
            Brisk descriptor = new Brisk();

            //поскольку в данном случае необходимо посчитать обратное преобразование
            //базой будет являться изменённое изображение
            VectorOfKeyPoint GFP1     = new VectorOfKeyPoint();
            UMat             baseDesc = new UMat();
            UMat             bimg     = twistedImgGray.Mat.GetUMat(AccessType.Read);

            VectorOfKeyPoint GFP2        = new VectorOfKeyPoint();
            UMat             twistedDesc = new UMat();
            UMat             timg        = baseImgGray.Mat.GetUMat(AccessType.Read);

            //получение необработанной информации о характерных точках изображений
            detector.DetectRaw(bimg, GFP1);

            //генерация описания характерных точек изображений
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);

            //класс позволяющий сравнивать описания наборов ключевых точек
            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            //массив для хранения совпадений характерных точек
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            //добавление описания базовых точек
            matcher.Add(baseDesc);
            //сравнение с описанием изменённых
            matcher.KnnMatch(twistedDesc, matches, 2, null);
            //3й параметр - количество ближайших соседей среди которых осуществляется поиск совпадений
            //4й параметр - маска, в данном случае не нужна

            //маска для определения отбрасываемых значений (аномальных и не уникальных)
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            //определение уникальных совпадений
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

            Mat homography;

            //получение матрицы гомографии
            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(GFP1, GFP2, matches, mask, 2);

            var destImage = new Image <Bgr, byte>(baseImg.Size);

            CvInvoke.WarpPerspective(twistedImg, destImage, homography, destImage.Size);
            twistedImg      = destImage;
            imageBox2.Image = destImage.Resize(640, 480, Inter.Linear);
        }
        void OnFast()
        {
            Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_01.jpg");
            Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_02.jpg");

            Mat image1 = new Mat(), image2 = new Mat();

            Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY);
            Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY);
            KeyPoint[] keyPoint1 = Cv2.FAST(image1, 50, true);
            KeyPoint[] keyPoint2 = Cv2.FAST(image2, 50, true);
            using (Mat descriptor1 = new Mat())
                using (Mat descriptor2 = new Mat())
                    using (var orb = ORB.Create(50))
                        using (var matcher = new BFMatcher())
                        {
                            orb.Compute(image1, ref keyPoint1, descriptor1);
                            orb.Compute(image2, ref keyPoint2, descriptor2);
                            Debug.Log(string.Format("keyPoints has {0},{1} items.", keyPoint1.Length, keyPoint2.Length));
                            Debug.Log(string.Format("descriptor has {0},{1} items.", descriptor1.Rows, descriptor2.Rows));

                            List <DMatch> goodMatchePoints = new List <DMatch>();
                            var           dm = matcher.KnnMatch(descriptor1, descriptor2, 2);

                            #region matched 175
                            for (int i = 0; i < dm.Length; i++)
                            {
                                if (dm[i][0].Distance < 0.6 * dm[i][1].Distance)
                                {
                                    goodMatchePoints.Add(dm[i][0]);
                                }
                            }
                            #endregion

                            #region matched 90
                            float minRatio = 1.0f / 1.5f;
                            for (int i = 0; i < dm.Length; i++)
                            {
                                DMatch bestMatch     = dm[i][0];
                                DMatch betterMatch   = dm[i][1];
                                float  distanceRatio = bestMatch.Distance / betterMatch.Distance;
                                if (distanceRatio < minRatio)
                                {
                                    goodMatchePoints.Add(bestMatch);
                                }
                            }
                            #endregion

                            var dstMat = new Mat();
                            Debug.Log(string.Format("matchePoints has {0} items", goodMatchePoints.Count));
                            Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, goodMatchePoints, dstMat);
                            t2d = Utils.MatToTexture2D(dstMat);
                        }

            Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

            SrcSprite.sprite = dst_sp;
        }
        //-------------------------------------------------------------------------------------------------------

        /// <summary>
        /// Dette er funksjonen
        /// </summary>
        /// <param name="modelImage"> Referansebilde i formatet Mat </param>
        /// <param name="observedImage"> Bildet som kommer fra kameraet, etter at det har blitt behandlet, i formatet Mat </param>
        /// <param name="matchTime"> Returnerer tiden funksjonen brukte på gjennomføre analyseringen av det observerte bildet. </param>
        /// <param name="modelKeyPoints"> Punkter som KAZE setter på referansebildet </param>
        /// <param name="observedKeyPoints"> Punkter som KAZE setter på det observerte bildet </param>
        /// <param name="matches">  </param>
        /// <param name="mask"></param>
        /// <param name="homography"></param>
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    //extract features from the object image
                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    // Bruteforce, slower but more accurate
                    using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                        using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                            using (BFMatcher matcher = new BFMatcher(DistanceType.L1, false))
                            {
                                matcher.Add(modelDescriptors);

                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); // Cv8U er den eneste som virker?
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                               matches, mask, 1.5, 20);

                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                              observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }

                    watch.Stop();
                }

            matchTime = watch.ElapsedMilliseconds;
        }
Exemple #15
0
        public static int SiftComparison(string img1, string img2)
        {
            var sift = new Emgu.CV.XFeatures2D.SIFT();

            var modelKeyPoints   = new VectorOfKeyPoint();
            Mat modelDescriptors = new Mat();

            var observedKeyPoints   = new VectorOfKeyPoint();
            Mat observedDescriptors = new Mat();
            Mat mask = new Mat();

            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
            int    k = 2;
            double uniquenessThreshold = 0.80;

            using (Mat modelImage = CvInvoke.Imread(img1, ImreadModes.Grayscale))
                using (Mat observedImage = CvInvoke.Imread(img2, ImreadModes.Grayscale))
                {
                    sift.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);
                    sift.DetectAndCompute(observedImage, null, observedKeyPoints, observedDescriptors, false);
                    BFMatcher matcher = new BFMatcher(DistanceType.L1);

                    matcher.Add(modelDescriptors);
                    //matcher.Add(observedDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    try
                    {
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                    }
                    catch (Exception ex)
                    {
                        Log(ex.Message);
                        Log("Error with SIFT algorithm, unable to compare images..");
                        return(0);
                    }
                }

            int score = 0;

            for (int i = 0; i < matches.Size; i++)
            {
                if (mask.GetData(i)[0] == 0)
                {
                    continue;
                }
                foreach (var e in matches[i].ToArray())
                {
                    ++score;
                }
            }

            return(score);
        }
        public static Mat Draw(Mat modelImage, Mat observedImage)
        {
            var sift = new SIFT();

            var modelKeyPoints    = new VectorOfKeyPoint();
            var observedKeyPoints = new VectorOfKeyPoint();

            UMat modelDescriptors    = new UMat();
            UMat observedDescriptors = new UMat();

            sift.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);
            sift.DetectAndCompute(observedImage, null, observedKeyPoints, observedDescriptors, false);

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            matcher.Add(modelDescriptors);

            var matches = new VectorOfVectorOfDMatch();

            matcher.KnnMatch(observedDescriptors, matches, 2, null);

            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);

            var homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 10);

            var result = new Mat();

            Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, matches, result,
                                          new MCvScalar(255, 255, 255),
                                          new MCvScalar(0, 0, 0),
                                          mask,
                                          Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);

            PointF[] pts =
            {
                new PointF(rect.Left,  rect.Bottom),
                new PointF(rect.Right, rect.Bottom),
                new PointF(rect.Right, rect.Top),
                new PointF(rect.Left,  rect.Top)
            };
            pts = CvInvoke.PerspectiveTransform(pts, homography);

            Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
            using (VectorOfPoint vp = new VectorOfPoint(points))
            {
                CvInvoke.Polylines(result, vp, true, new MCvScalar(0, 255, 0, 55), 2);
            }

            return(result);
        }
        static public void Match(
            MatOfFloat descriptors1,
            MatOfFloat descriptors2,
            int knnLevel,
            out DMatch[][] matches)
        {
            var bfMatcher = new BFMatcher(NormTypes.L2SQR, false);

            matches = bfMatcher.KnnMatch(descriptors1, descriptors2, knnLevel);
        }
        public static List <ImageSearchResult> SearchImageForObjects(WorldObject modelObject, string imageToSearch)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            int nonZeroThreshold = 10;

            ObjectFeatures targetImageFeatures = DetectFeatures_Brisk(imageToSearch);

            Mat mask;

            List <ImageSearchResult> searchResults = new List <ImageSearchResult>();

            foreach (ObjectView view in modelObject.Views)
            {
                if (view == null)
                {
                    continue;
                }

                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                BFMatcher matcher = new BFMatcher(DistanceType.L2);
                matcher.Add(view.Features.Descriptors);

                matcher.KnnMatch(targetImageFeatures.Descriptors, matches, 2, null);

                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

                mask.SetTo(new MCvScalar(255));

                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                int nonZeroCount = CvInvoke.CountNonZero(mask);

                if (nonZeroCount >= nonZeroThreshold)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(view.Features.KeyPoints,
                                                                               targetImageFeatures.KeyPoints, matches, mask, 1.5, 20);

                    if (nonZeroCount >= nonZeroThreshold)
                    {
                        Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(view.Features.KeyPoints,
                                                                                                  targetImageFeatures.KeyPoints, matches, mask, 2);

                        searchResults.Add(new ImageSearchResult(view, homography, matches, targetImageFeatures, mask));
                    }
                }
            }

            return(searchResults);
        }
Exemple #19
0
        public Image <Bgr, byte> ReturnCompared(out Image <Bgr, byte> def, out Image <Bgr, byte> twistdef)
        {
            var image      = sourceImage.Copy();
            var twistedImg = additionalImage.Copy();
            //генератор описания ключевых точек
            Brisk        descriptor = new Brisk();
            GFTTDetector detector   = new GFTTDetector(40, 0.01, 5, 3, true);
            //поскольку в данном случае необходимо посчитать обратное преобразование
            //базой будет являться изменённое изображение
            VectorOfKeyPoint GFP1           = new VectorOfKeyPoint();
            UMat             baseDesc       = new UMat();
            var              twistedImgGray = twistedImg.Convert <Gray, byte>();
            var              baseImgGray    = image.Convert <Gray, byte>();
            UMat             bimg           = twistedImgGray.Mat.GetUMat(AccessType.Read);
            VectorOfKeyPoint GFP2           = new VectorOfKeyPoint();
            UMat             twistedDesc    = new UMat();
            UMat             timg           = baseImgGray.Mat.GetUMat(AccessType.Read);

            //получение необработанной информации о характерных точках изображений
            detector.DetectRaw(bimg, GFP1);
            //генерация описания характерных точек изображений
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);


            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            //массив для хранения совпадений характерных точек
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            //добавление описания базовых точек
            matcher.Add(baseDesc);
            //сравнение с описанием изменённых
            matcher.KnnMatch(twistedDesc, matches, 2, null);


            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            //определение уникальных совпадений
            Mat resM = new Mat(image.Height, image.Width * 2, DepthType.Cv8U, 3);
            var res  = resM.ToImage <Bgr, byte>();

            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(GFP1, GFP1, matches, mask, 1.5, 20);

            Features2DToolbox.DrawMatches(twistedImg, GFP1, image, GFP2, matches, res, new MCvScalar(255, 0,
                                                                                                     0), new MCvScalar(255, 0, 0), mask);
            def      = image;
            twistdef = twistedImg;
            return(res);
        }
Exemple #20
0
        /// <summary>
        ///
        ///
        /// TODO: thresholds must be set
        /// </summary>
        /// <param name="model"></param>
        /// <param name="imageToSearch"></param>
        /// <returns></returns>
        public static bool SearchImageForObjects(List <ObjectFeatures> model, Bitmap image)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            ObjectFeatures targetImageFeatures = DetectFeatures(image);

            Mat mask;

            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            foreach (ObjectFeatures of in model)
            {
                matcher.Add(of.Descriptors);

                matcher.KnnMatch(targetImageFeatures.Descriptors, matches, 2, null);

                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

                mask.SetTo(new MCvScalar(255));

                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                int nonZeroCount = CvInvoke.CountNonZero(mask);

                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(of.KeyPoints,
                                                                               targetImageFeatures.KeyPoints, matches, mask, 1.5, 20);

                    if (nonZeroCount >= 4)
                    {
                        return(true);

                        /*
                         * Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(view.Features.KeyPoints,
                         *  targetImageFeatures.KeyPoints, matches, mask, 2);
                         *
                         * searchResults.Add(new ImageSearchResult(view, homography, matches));
                         */
                    }
                }
            }

            return(false);
        }
Exemple #21
0
        static void Main(string[] args)
        {
            using var srcA  = new Mat("./input/A.jpg", ImreadModes.Color);
            using var srcB  = new Mat("./input/B.jpg", ImreadModes.Color);
            using var maskA = new Mat("./input/A_mask.jpg", ImreadModes.Grayscale);
            using var maskB = new Mat("./input/B_mask.jpg", ImreadModes.Grayscale);

            var sift = OpenCvSharp.Features2D.SIFT.Create();

            KeyPoint[] kp1, kp2;
            Mat        des1 = new Mat(), des2 = new Mat();

            sift.DetectAndCompute(srcA, maskA, out kp1, des1);
            sift.DetectAndCompute(srcB, maskB, out kp2, des2);

            var bf      = new BFMatcher();
            var matches = bf.KnnMatch(des1, des2, 2);

            // ratio test
            double ratio = 0.8;
            var    good  = matches.Where(match => {
                if (match[0].Distance < ratio * match[1].Distance)
                {
                    return(true);
                }
                return(false);
            }).Select(match => new DMatch[1] {
                match[0]
            });

            Mat result = new Mat();

            Cv2.DrawMatchesKnn(srcA, kp1, srcB, kp2, good, result);

            Cv2.ImWrite("result.png", result);

            var ptsA = good.Select(m => kp1[m[0].QueryIdx].Pt).ToList();
            var ptsB = good.Select(m => kp2[m[0].TrainIdx].Pt).ToList();

            var H = Cv2.FindHomography(InputArray.Create(ptsA), InputArray.Create(ptsB), HomographyMethods.Ransac, 5.0);

            Mat warped = new Mat();

            Cv2.WarpPerspective(srcA, warped, H, srcA.Size());
            Cv2.ImWrite("warped_0.png", warped);
            Cv2.ImWrite("warped_1.png", srcB);
            Console.WriteLine("Example done!");
        }
Exemple #22
0
        public static void FindMatchWM(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, Feature2D computer, Feature2D detector)
        {
            Stopwatch watch;

            modelKeyPoints    = new VectorOfKeyPoint(); // точки на модели
            observedKeyPoints = new VectorOfKeyPoint(); // точки на большем изображении
            homography        = null;
            int k = 2;


            using (Mat uModelImage = modelImage.Clone())
                using (Mat uObservedImage = observedImage.Clone())
                {
                    //получаем дескрипторы из первого изображения
                    Mat modelDescriptors = new Mat();
                    DetectAndCompute(uModelImage, out modelKeyPoints, out modelDescriptors, detector, computer);

                    watch = Stopwatch.StartNew();

                    // ... из второго изображения
                    Mat observedDescriptors = new Mat();
                    DetectAndCompute(uObservedImage, out observedKeyPoints, out observedDescriptors, detector, computer);


                    BFMatcher matcher = new BFMatcher(DistanceType.L2); // "сравниватель" дескрипторов на 2-х изображениях
                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null); // сравнение
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, 0.8, mask); // построениии маски (см ниже)

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                   matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, // получение предположительной зоны, куда должна встать модель
                                                                                                  observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
        public static void FindMatch(string pageFile, string templateFile)
        {
            Image <Rgb, byte> page     = getPreprocessedImage(pageFile);
            Image <Rgb, byte> template = getPreprocessedImage(templateFile);

            var detector = new ORBDetector();
            VectorOfKeyPoint templateKeyPoints = new VectorOfKeyPoint();
            Mat templateDescriptors            = new Mat();

            detector.DetectAndCompute(template, null, templateKeyPoints, templateDescriptors, false);

            VectorOfKeyPoint pageKeyPoints = new VectorOfKeyPoint();
            Mat pageDescriptors            = new Mat();

            detector.DetectAndCompute(page, null, pageKeyPoints, pageDescriptors, false);
            using (var matcher = new BFMatcher(DistanceType.L1))
            {
                matcher.Add(templateDescriptors);
                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                //VectorOfDMatch matches2 = new VectorOfDMatch();
                //matcher.Match(pageDescriptors, matches2);


                matcher.KnnMatch(pageDescriptors, matches, 2, null);

                Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
                Mat homography   = new Mat();
                int nonZeroCount = CvInvoke.CountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(templateKeyPoints, pageKeyPoints, matches, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(templateKeyPoints, pageKeyPoints, matches, mask, 2);
                    }
                }

                Mat result = new Mat();
                Features2DToolbox.DrawMatches(template, templateKeyPoints, page, pageKeyPoints, matches, result, new MCvScalar(0, 255, 0), new MCvScalar(255, 0, 0), mask, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

                //Features2DToolbox.DrawMatches(template, templateKeyPoints, page, pageKeyPoints, matches2, result, new MCvScalar(0, 255, 0), new MCvScalar(255, 0, 0), null, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

                MainForm.This.PageBox.Image = result.ToBitmap();
            }
        }
        public MatcherResult knnMatch(KeyPoints queryDescriptor, BFMatcher matcher, string leafCategory, int distanceCutoff = int.MaxValue, PreProcessedImage trainingData = null)
        {
            MatcherResult result = new MatcherResult();

            result.Category = leafCategory;

            using (VectorOfVectorOfDMatch vectorMatchesForSift = new VectorOfVectorOfDMatch())
            {
                matcher.KnnMatch(queryDescriptor.Descriptor, vectorMatchesForSift, 2, null);

                int numberOfMatches = 0;

                Dictionary <int, int> counts      = new Dictionary <int, int>();
                List <MDMatch>        goodMatches = new List <MDMatch>(vectorMatchesForSift.Size);
                for (int i = 0; i < vectorMatchesForSift.Size; i++)
                {
                    // Do Ratio test: Reject matches where ratio of closest match with second closest if greater than 0.8
                    if (
                        (vectorMatchesForSift[i].Size == 1 ||
                         vectorMatchesForSift[i][0].Distance < 0.75 * vectorMatchesForSift[i][1].Distance) &&
                        vectorMatchesForSift[i][0].Distance < distanceCutoff)
                    {
                        goodMatches.Add(vectorMatchesForSift[i][0]);
                        numberOfMatches++;
                    }
                }

                //goodMatches = clusterBasedOnPoseEstimation(goodMatches, queryDescriptor, trainingData);

                int maxResults = int.MaxValue;
                result.MatchingPoints = goodMatches.Count;
                if (!goodMatches.Any())
                {
                    result.MatchDistance        = float.MaxValue;
                    result.AverageDistance      = 0;
                    result.MatchDistanceWeight  = 0;
                    result.MatchingPointsWeight = 0;
                }
                else
                {
                    result.MatchDistance        = goodMatches.OrderBy(item => item.Distance).Take(maxResults).Sum(item => item.Distance);
                    result.AverageDistance      = result.MatchDistance / result.MatchingPoints;
                    result.MatchDistanceWeight  = 1 / Math.Pow(result.AverageDistance, 2);
                    result.MatchingPointsWeight = result.MatchingPoints * result.MatchingPoints;
                }
            }
            return(result);
        }
Exemple #25
0
        private void CalculateSimilarity(Mat query_desc, Mat train_desc, out List <float> distances)
        {
            distances = new List <float>();
            if (train_desc.Rows >= 2)
            {
                var matches = m_matcher.KnnMatch(query_desc, train_desc, 2);

                for (int i = 0; i < matches.Length; ++i)
                {
                    if (matches[i][0].Distance < 0.75 * matches[i][1].Distance)
                    {
                        distances.Add(matches[i][0].Distance);
                    }
                }
            }
        }
        private void detectImgFeatures()
        {
            ORBDetector detector = new ORBDetector(100, 1.2f, 8);

            MKeyPoint[]      img0_keyPoints        = detector.Detect(imgs[0]);
            VectorOfKeyPoint img0_vector_keypoints = new VectorOfKeyPoint(img0_keyPoints);
            Matrix <Byte>    img0_descriptors      = new Matrix <Byte>(img0_vector_keypoints.Size, detector.DescriptorSize);

            MKeyPoint[]      img1_keyPoints        = detector.Detect(imgs[1]);
            VectorOfKeyPoint img1_vector_keypoints = new VectorOfKeyPoint(img1_keyPoints);
            Matrix <Byte>    img1_descriptors      = new Matrix <Byte>(img1_vector_keypoints.Size, detector.DescriptorSize);

            detector.Compute(imgs[0], img0_vector_keypoints, img0_descriptors);

            // display keypoints in red
            Image <Bgr, Byte> newImg = new Image <Bgr, Byte>(imgs[0].Width, imgs[0].Height);

            Features2DToolbox.DrawKeypoints(imgs[0], img0_vector_keypoints, newImg, new Bgr(255, 0, 255),
                                            Features2DToolbox.KeypointDrawType.DrawRichKeypoints);
            imgbox_original.Image = newImg;

            Image <Bgr, Byte> newImg2 = new Image <Bgr, Byte>(imgs[1].Width, imgs[1].Height);

            Features2DToolbox.DrawKeypoints(imgs[1], img1_vector_keypoints, newImg2, new Bgr(255, 0, 255),
                                            Features2DToolbox.KeypointDrawType.DrawRichKeypoints);
            imgbox_second.Image = newImg2;

            // apply BFMatcher to find matches in two images
            BFMatcher bfMatcher            = new BFMatcher(DistanceType.Hamming, true);
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            numberFoundPairs = matches.Size;
            bfMatcher.Add(img0_descriptors);
            bfMatcher.KnnMatch(img1_descriptors, matches, 1, null);

            // display final image as two merged images with keypoints
            Mat matched_image = new Mat();

            Features2DToolbox.DrawMatches(imgs[0], img0_vector_keypoints, imgs[1], img1_vector_keypoints,
                                          matches, matched_image, new MCvScalar(255, 0, 255), new MCvScalar(0, 255, 0));
            img_final = matched_image.ToImage <Bgr, Byte>();
        }
        private static Mat FindMatchWithoutCuda(Mat modelImage, Mat observedImage, VectorOfKeyPoint modelKeyPoints, VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, Mat homography, int k, double uniquenessThreshold, double hessianThresh)
        {
            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    SURF surfCPU = new SURF(hessianThresh, upright: true);

                    UMat modelDescriptors;
                    if (!FindDescriptors(surfCPU, modelKeyPoints, uModelImage, out modelDescriptors))
                    {
                        Logger.Log(LogType.Error, "Feature Descriptor for Model image is empty. Is the image too small?");
                        return(mask = null);
                    }

                    UMat observedDescriptors;
                    if (!FindDescriptors(surfCPU, observedKeyPoints, uObservedImage, out observedDescriptors))
                    {
                        Logger.Log(LogType.Error, "Feature Descriptor for Observed image is empty. Is the image too small?");
                        return(mask = null);
                    }

                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);
                    matcher.KnnMatch(observedDescriptors, matches, k, null);

                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                        }
                    }
                }

            return(homography);
        }
Exemple #28
0
        public override FeatureMatchResult Match(Mat sourceMat, Mat searchMat, FeatureMatchArgument argument)
        {
            //创建SIFT
            using var sift = SIFT.Create();

            //创建特征点描述对象,为下边的特征点匹配做准备
            using var sourceDescriptors = new Mat();
            using var searchDescriptors = new Mat();

            //提取特征点,并进行特征点描述
            sift.DetectAndCompute(sourceMat, null, out var sourceKeyPoints, sourceDescriptors);
            sift.DetectAndCompute(searchMat, null, out var searchKeyPoints, searchDescriptors);

            //创建Brute-force descriptor matcher
            using var bfMatcher = new BFMatcher();
            //对原图特征点描述加入训练
            bfMatcher.Add(new List <Mat>()
            {
                sourceDescriptors
            });
            bfMatcher.Train();
            //获得匹配特征点,并提取最优配对
            var matches = bfMatcher.KnnMatch(sourceDescriptors, searchDescriptors, (int)argument.MatchPoints);

            argument.OutputDebugMessage($"[FeatureMatch] [SIFT] The number of matching points is ({matches.Length}).");

            //即使使用SIFT算法,但此时没有经过点筛选的匹配效果同样糟糕,所进一步获取优秀匹配点
            var goodMatches = SelectGoodMatches(matches, argument, sourceKeyPoints, searchKeyPoints);

            //获取匹配结果
            var matchResult = GetMatchResult(goodMatches, sourceKeyPoints, searchKeyPoints);

            argument.OutputDebugMessage($"[FeatureMatch] [SIFT] The result of the match is ({matchResult.Success}) ({matchResult.MatchItems.Count}).");
            if (matchResult.Success)
            {
                var bestMatch = matchResult.MatchItems[0];
                argument.OutputDebugMessage($"[FeatureMatch] [SIFT] The center point of the best match is ({bestMatch.Point}), and the rect is {bestMatch.Rectangle}.");
            }
            argument.PreviewDebugFeatureMatchResult(matchResult, sourceMat, searchMat, sourceKeyPoints, searchKeyPoints, goodMatches);
            return(matchResult);
        }
Exemple #29
0
        public int  FindMatch1(Mat modelImage, Mat observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k                   = 2;
            int    nonZeroCount        = 0;
            double uniquenessThreshold = 0.80;

            homography        = null;
            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();
            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))

                {
                    var featureDetector  = new ORBDetector(9000);
                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    using (var matcher = new BFMatcher(DistanceType.Hamming, false))
                    {
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                        nonZeroCount = CvInvoke.CountNonZero(mask);
                        if (nonZeroCount >= 4)
                        {
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                       matches, mask, 1.5, 20);
                            //if (nonZeroCount >= 4)
                            //    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                            //        observedKeyPoints, matches, mask, 2);
                        }
                    }
                }
            return(nonZeroCount);
        }
Exemple #30
0
        public Image <Bgr, byte> PointHomo(Image <Bgr, byte> image, Image <Bgr, byte> image2)
        {
            Image <Gray, byte> baseImgGray    = image.Convert <Gray, byte>();
            Image <Gray, byte> twistedImgGray = image2.Convert <Gray, byte>();
            Brisk            descriptor       = new Brisk();
            GFTTDetector     detector         = new GFTTDetector(40, 0.01, 5, 3, true);
            VectorOfKeyPoint GFP1             = new VectorOfKeyPoint();
            UMat             baseDesc         = new UMat();
            UMat             bimg             = twistedImgGray.Mat.GetUMat(AccessType.Read);
            VectorOfKeyPoint GFP2             = new VectorOfKeyPoint();
            UMat             twistedDesc      = new UMat();
            UMat             timg             = baseImgGray.Mat.GetUMat(AccessType.Read);

            detector.DetectRaw(bimg, GFP1);
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            matcher.Add(baseDesc);
            matcher.KnnMatch(twistedDesc, matches, 2, null);
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            int nonZeroCount      = Features2DToolbox.VoteForSizeAndOrientation(GFP1, GFP1, matches, mask, 1.5, 20);
            Image <Bgr, byte> res = image.CopyBlank();

            Features2DToolbox.DrawMatches(image2, GFP1, image, GFP2, matches, res, new MCvScalar(255, 0, 0), new MCvScalar(255, 0, 0), mask);

            Mat homography;

            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(GFP1, GFP2, matches, mask, 2);
            var destImage = new Image <Bgr, byte>(image2.Size);

            CvInvoke.WarpPerspective(image2, destImage, homography, destImage.Size);

            return(destImage);
        }