예제 #1
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out long score)
        {
            int       k = 2;
            double    uniquenessThreshold = 0.80;
            Stopwatch watch;

            homography        = null;
            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    //   KAZE featureDetector = new KAZE();
                    SURF featureDetector = new SURF(100);
                    //    SIFT featureDetector = new SIFT();
                    Mat modelDescriptors = new Mat();
                    //进行检测和计算,把opencv中的两部分和到一起了,分开用也可以
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    watch = Stopwatch.StartNew();

                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    // KdTree for faster results / less accuracy
                    using (var ip = new Emgu.CV.Flann.KdTreeIndexParams())
                        using (var sp = new SearchParams())
                            //  using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))//开始进行匹配
                            using (BFMatcher matcher = new BFMatcher(DistanceType.L2))
                            {
                                matcher.Add(modelDescriptors);
                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);//去除重复的匹配

                                // Calculate score based on matches size
                                // ---------------------------------------------->
                                score = 0;
                                for (int i = 0; i < matches.Size; i++)
                                {
                                    if (mask.GetData(i)[0] == 0)
                                    {
                                        continue;
                                    }
                                    foreach (var e in matches[i].ToArray())
                                    {
                                        ++score;
                                    }
                                }
                                // <----------------------------------------------

                                int nonZeroCount = CvInvoke.CountNonZero(mask);//用于寻找模板在图中的位置
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }
                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
예제 #2
0
        public FeatureResult GetFeature(Bitmap bmpSrc)
        {
            Mat matTest = CVUtil.BitmapToMat(bmpSrc);

            matTest = ProcessImage(matTest);

            /*Bitmap bmp = Utils.cropImage(bmpSrc, cropRect);
             * Mat matTest = CVUtil.BitmapToMat(bmp);
             * if (colorIndex != -1)
             * {
             *  matTest = matTest.Split()[colorIndex];
             * }*/
            Mat observedDescriptors            = new Mat();
            VectorOfKeyPoint observedKeyPoints = new VectorOfKeyPoint();

            featureDetector.DetectAndCompute(matTest, mask, observedKeyPoints, observedDescriptors, false);
            int    k = 2;
            double uniquenessThreshold = 0.80;

            //Mat homography = null;
            // Bruteforce, slower but more accurate
            // You can use KDTree for faster matching with slight loss in accuracy
            using (Emgu.CV.Flann.KdTreeIndexParams ip = new Emgu.CV.Flann.KdTreeIndexParams())
                using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                    using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                    {
                        VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
                        foreach (SimpleFeatureData sd in this)
                        {
                            matcher.Add(sd.descriptors);
                            //break;
                        }

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        lastMatches          = matches;
                        lastObserved         = matTest;
                        lastObservedKeyPoint = observedKeyPoints;
                        //Mat mat = new Mat();
                        //Features2DToolbox.DrawKeypoints(matTest, observedKeyPoints, mat, new Bgr(Color.Blue));
                        //FormOpenCV.lstMat.Add(mat);
                        //Console.WriteLine(CVUtil.ToString(observedDescriptors));
                        //Console.WriteLine(CVUtil.ToString(observedKeyPoints));
                        //Console.WriteLine(CVUtil.ToString(matches));
                        //Console.WriteLine(MatchesToString(matches));
                        Mat uniqueMask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        uniqueMask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, uniqueMask);

                        int nonZeroCount = CvInvoke.CountNonZero(uniqueMask);
                        if (nonZeroCount > 4)
                        {
                            //Console.WriteLine(CVUtil.ToString(uniqueMask));
                            String            retLabel = GetLabelFromMatches(matches, uniqueMask);
                            SimpleFeatureData mfd      = lastMatchFeatureData;
                            try
                            {
                                //int nonZeroCount2 = Features2DToolbox.VoteForSizeAndOrientation(mfd.keyPoints, observedKeyPoints, matches, uniqueMask, 1.5, 20);
                                //Console.WriteLine("nonZeroCount2=" + nonZeroCount2);
                                if (nonZeroCount > 4)
                                {
                                    Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(mfd.keyPoints, observedKeyPoints, matches, uniqueMask, 2);

                                    /*Console.WriteLine(CVUtil.ToString(homography));
                                     * Rectangle rect = CVUtil.GetRect(mfd.keyPoints);
                                     * PointF[] src = {
                                     *  new PointF(rect.X,rect.Y),new PointF(rect.X,rect.Y + rect.Height-1),
                                     *  new PointF(rect.X + rect.Width-1,rect.Y + rect.Height-1),new PointF(rect.X + rect.Width-1,rect.Y)
                                     * };
                                     * PointF[] points = CvInvoke.PerspectiveTransform(src, homography);
                                     * foreach (var p in points)
                                     * {
                                     *  Console.WriteLine(p.ToString());
                                     * }
                                     * Point[] ap = Array.ConvertAll(points,
                                     * new Converter<PointF, Point>(CVUtil.PointFToPoint));
                                     * Mat testImage = matTest.Clone();
                                     * CvInvoke.Polylines(testImage, ap, true, new MCvScalar(255, 0, 0));
                                     */
                                    //CvInvoke.Rectangle(testImage, new Rectangle(0, 0, 100, 100), new MCvScalar(255, 255, 0));
                                    //CvInvoke.Circle(testImage, new Point(100, 100), 50, new MCvScalar(255, 255, 0), -1);
                                    //lstMat.Add(testImage);
                                    FeatureResult ret = new FeatureResult();
                                    ret.keyPoint         = observedKeyPoints;
                                    ret.label            = retLabel;
                                    ret.homography       = homography;
                                    ret.matchFeatureData = mfd;
                                    return(ret);
                                }
                            }catch (Exception ex)
                            {
                            }
                        }
                        return(null);
                    }
        }