コード例 #1
0
        // Initialises the matcher
        private void InitMatcher()
        {
            LinearIndexParams ip = new LinearIndexParams();
            SearchParams      sp = new SearchParams();

            matcher = new FlannBasedMatcher(ip, sp);
        }
コード例 #2
0
        protected virtual DescriptorMatcher GetDescritorMatcher()
        {
            var ip = new LinearIndexParams();
            var sp = new SearchParams();

            return(new FlannBasedMatcher(ip, sp));
        }
コード例 #3
0
 public void New5()
 {
     using var ip = new LinearIndexParams();
     using var sp = new SearchParams();
     using (var descriptorExtractor = KAZE.Create())
         using (var descriptorMatcher = new FlannBasedMatcher(ip, sp))
             using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
 }
コード例 #4
0
        public void New4()
        {
            LinearIndexParams ip = new LinearIndexParams();
            SearchParams      sp = new SearchParams();

            using (var descriptorExtractor = SURF.Create(100))
                using (var descriptorMatcher = new FlannBasedMatcher(ip, sp))
                    using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
        }
コード例 #5
0
        public void New5()
        {
            var descriptorExtractor = KAZE.Create();
            LinearIndexParams ip    = new LinearIndexParams();
            SearchParams      sp    = new SearchParams();
            var descriptorMatcher   = new FlannBasedMatcher(ip, sp);

            new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher);
        }
コード例 #6
0
ファイル: FeatureMatch.cs プロジェクト: benjcp/SIFT-Demo
        //Method that takes 2 images, gets the features from the model Image and then detects similar features on the second image
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    //extract features from the object image
                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    // Bruteforce, slower but more accurate
                    // You can use KDTree for faster matching with slight loss in accuracy
                    using (LinearIndexParams ip = new LinearIndexParams())
                        using (SearchParams sp = new SearchParams())
                            using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                            {
                                matcher.Add(modelDescriptors);

                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                               matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                              observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }
                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
コード例 #7
0
ファイル: FeatureMatching.cs プロジェクト: LongHK27/NNets
        /// <summary>
        ///
        /// </summary>
        /// <param name="observedKeyPoints"></param>
        /// <param name="modelKeyPoint"></param>
        /// <returns></returns>
        public static VectorOfPoint Matching(VectorOfKeyPoint observedKeyPoints, VectorOfKeyPoint modelKeyPoints, Mat observedDescriptors, Mat modelDescriptors, Size modelSize)
        {
            KAZE featureDetector = new KAZE();

            using (var ip = new LinearIndexParams())
                using (var sp = new SearchParams())
                    using (var matcher = new FlannBasedMatcher(ip, sp))
                        using (var matches = new VectorOfVectorOfDMatch())
                        {
                            Mat homography = new Mat();
                            matcher.Add(modelDescriptors);

                            matcher.KnnMatch(observedDescriptors, matches, 2, null);
                            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, 0.80, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                }
                            }

                            if (homography != null && !homography.Size.IsEmpty)
                            {
                                Rectangle rect = new Rectangle(Point.Empty, modelSize);
                                PointF[]  pts  = new PointF[]
                                {
                                    new PointF(rect.Left, rect.Bottom),
                                    new PointF(rect.Right, rect.Bottom),
                                    new PointF(rect.Right, rect.Top),
                                    new PointF(rect.Left, rect.Top)
                                };

                                pts = CvInvoke.PerspectiveTransform(pts, homography);

                                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);

                                return(new VectorOfPoint(points));
                            }
                            else
                            {
                                return(new VectorOfPoint());
                            }
                        }
        }
コード例 #8
0
        public void RunTest()
        {
            LinearIndexParams ip = new LinearIndexParams();
            SearchParams      sp = new SearchParams();

            using (var descriptorExtractor = SIFT.Create(500))
                //using (var descriptorMatcher = new FlannBasedMatcher(ip, sp))
                using (var descriptorMatcher = new BFMatcher())
                    using (var img = Image("lenna.png"))
                    {
                        KeyPoint[] keypoints;
                        Mat        dictionary;
                        var        tc = new TermCriteria(CriteriaType.MaxIter, 100, 0.001d);
                        using (var bowTrainer = new BOWKMeansTrainer(200, tc, 1, KMeansFlags.PpCenters))
                        {
                            var descriptors = new Mat();
                            descriptorExtractor.DetectAndCompute(img, null, out keypoints, descriptors);

                            Mat featuresUnclustered = new Mat();
                            featuresUnclustered.PushBack(descriptors);
                            featuresUnclustered.ConvertTo(featuresUnclustered, MatType.CV_32F);
                            dictionary = bowTrainer.Cluster(featuresUnclustered);
                        }

                        using (var bowDE = new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher))
                        {
                            bowDE.SetVocabulary(dictionary);

                            try
                            {
                                int[][] arr;
                                Mat     descriptors = new Mat();
                                descriptorExtractor.Compute(img, ref keypoints, descriptors);
                                descriptors.ConvertTo(descriptors, MatType.CV_32F);
                                bowDE.Compute(descriptors, ref keypoints, descriptors, out arr);
                                Console.WriteLine(arr.Length);
                                Console.WriteLine(arr[0].Length);
                            }
                            catch (OpenCVException ex)
                            {
                                Console.WriteLine(ex.FileName);
                                Console.WriteLine(ex.FuncName);
                                Console.WriteLine(ex.Line);
                                throw;
                            }
                        }
                    }
        }
コード例 #9
0
        private void find(Mat modelImage, out VectorOfKeyPoint modelKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.70;

            homography = null;

            VectorOfKeyPoint currentKeyPoints = new VectorOfKeyPoint();
            Mat currentDescriptors            = new Mat();

            detector.DetectAndCompute(current, null, currentKeyPoints, currentDescriptors, false);

            modelKeyPoints = new VectorOfKeyPoint();
            Mat modelDescriptors = new Mat();

            detector.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);

            LinearIndexParams ip      = new LinearIndexParams();
            SearchParams      sp      = new SearchParams();
            DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp);

            matcher.Add(modelDescriptors);

            matcher.KnnMatch(currentDescriptors, matches, k, null);
            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, currentKeyPoints, matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, currentKeyPoints, matches, mask, 2);
                }
            }
        }
コード例 #10
0
        private static VectorOfPoint ProcessImageFLANN(Image <Gray, byte> template, Image <Gray, byte> sceneImage)
        {
            try
            {
                // initialization
                VectorOfPoint    finalPoints        = null;
                Mat              homography         = null;
                VectorOfKeyPoint templateKeyPoints  = new VectorOfKeyPoint();
                VectorOfKeyPoint sceneKeyPoints     = new VectorOfKeyPoint();
                Mat              tempalteDescriptor = new Mat();
                Mat              sceneDescriptor    = new Mat();

                Mat    mask;
                int    k = 2;
                double uniquenessthreshold     = 0.80;
                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                // feature detectino and description
                KAZE featureDetector = new KAZE();
                featureDetector.DetectAndCompute(template, null, templateKeyPoints, tempalteDescriptor, false);
                featureDetector.DetectAndCompute(sceneImage, null, sceneKeyPoints, sceneDescriptor, false);


                // Matching

                //KdTreeIndexParams ip = new KdTreeIndexParams();
                //var ip = new AutotunedIndexParams();
                var               ip      = new LinearIndexParams();
                SearchParams      sp      = new SearchParams();
                FlannBasedMatcher matcher = new FlannBasedMatcher(ip, sp);


                matcher.Add(tempalteDescriptor);
                matcher.KnnMatch(sceneDescriptor, matches, k);

                mask = new Mat(matches.Size, 1, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));

                Features2DToolbox.VoteForUniqueness(matches, uniquenessthreshold, mask);

                int count = Features2DToolbox.VoteForSizeAndOrientation(templateKeyPoints, sceneKeyPoints, matches, mask, 1.5, 20);

                if (count >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(templateKeyPoints,
                                                                                          sceneKeyPoints, matches, mask, 5);
                }

                if (homography != null)
                {
                    System.Drawing.Rectangle rect = new System.Drawing.Rectangle(System.Drawing.Point.Empty, template.Size);
                    PointF[] pts = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    pts = CvInvoke.PerspectiveTransform(pts, homography);
                    System.Drawing.Point[] points = Array.ConvertAll <PointF, System.Drawing.Point>(pts, System.Drawing.Point.Round);
                    finalPoints = new VectorOfPoint(points);
                }

                return(finalPoints);
            }
            catch (Exception ex)
            {
                throw new Exception(ex.Message);
            }
        }
コード例 #11
0
        /// <summary>
        /// Matches a list of known signs with a list of candidates
        /// </summary>
        /// <param name="candidates"> candidate signs </param>
        /// <param name="knownSigns"> known signs </param>
        /// <returns> matched signs </returns>
        public List <TrafficSignMatch> MatchSigns(List <TrafficSign> candidates, List <TrafficSign> knownSigns)
        {
            // Return empty list if parameters are unusable
            if (candidates == null || knownSigns == null || candidates.Count < 1 || knownSigns.Count < 1)
            {
                return(new List <TrafficSignMatch>());
            }

            Matches = new List <TrafficSignMatch>();
            List <TrafficSignMatch> result = new List <TrafficSignMatch>();
            LinearIndexParams       ip     = new LinearIndexParams();
            SearchParams            sp     = new SearchParams();

            // Iterate over candidates
            foreach (TrafficSign candi in Candidates)
            {
                // best match and score
                int bestScore = 0;
                TrafficSignMatch bestMatch = null;

                // Iterate over known signs
                foreach (TrafficSign knownsign in knownSigns)
                {
                    VectorOfVectorOfDMatch match = new VectorOfVectorOfDMatch();

                    // Match the sign or log the problem
                    try
                    {
                        match = knownsign.MatchToOtherSign(candi);
                    } catch (Exception ex)
                    {
                        Console.WriteLine(ex.Message);
                    }

                    Mat mask = new Mat(match.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));

                    // Filter duplicate matches
                    Features2DToolbox.VoteForUniqueness(match, 0.8, mask);

                    // Compute the homography matrix
                    Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(knownsign.KeyPoints, candi.KeyPoints, match, mask, 1.5);

                    // Compute a score for the match
                    int score = ScoreMatch(homography, knownsign, candi);

                    // Current score is better than previous
                    if (score > bestScore)
                    {
                        bestScore = score;
                        bestMatch = new TrafficSignMatch(candi, knownsign, score, match, homography);
                        //ImageViewer viewer = new ImageViewer();
                        //viewer.Image = Draw(knownsign.ImageGray.Convert<Bgr, byte>(), candi.ImageGray.Convert<Bgr, byte>(), knownsign.KeyPoints, candi.KeyPoints, match);
                        //viewer.Text = "Match score: " + score;
                        //viewer.Show();
                    }
                }
                if (bestMatch != null && bestMatch.MatchScore > 0)
                {
                    Matches.Add(bestMatch);
                }
            }
            return(result);
        }