Пример #1
2
      public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
      {
         int k = 2;
         double uniquenessThreshold = 0.80;

         Stopwatch watch;
         homography = null;

         modelKeyPoints = new VectorOfKeyPoint();
         observedKeyPoints = new VectorOfKeyPoint();

         using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
         using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
         {
            KAZE featureDetector = new KAZE();

            //extract features from the object image
            Mat modelDescriptors = new Mat();
            featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            Mat observedDescriptors = new Mat();
            featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            
            matcher.Add(modelDescriptors);

            matcher.KnnMatch(observedDescriptors, matches, k, null);
            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            int nonZeroCount = CvInvoke.CountNonZero(mask);
            if (nonZeroCount >= 4)
            {
               nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                  matches, mask, 1.5, 20);
               if (nonZeroCount >= 4)
                  homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                     observedKeyPoints, matches, mask, 2);
            }

            watch.Stop();

         }
         matchTime = watch.ElapsedMilliseconds;
      }
Пример #2
1
 internal extern static IntPtr cveAKAZEDetectorCreate(
    AKAZE.DescriptorType descriptorType, int descriptorSize, int descriptorChannels,
    float threshold, int octaves, int nOctaveLayers, KAZE.Diffusivity diffusivity,
    ref IntPtr feature2D);
Пример #3
0
 internal extern static IntPtr cveKAZEDetectorCreate(
    [MarshalAs(CvInvoke.BoolMarshalType)]
    bool extended, 
    [MarshalAs(CvInvoke.BoolMarshalType)]
    bool upright, 
    float threshold,
    int octaves, int sublevels, KAZE.Diffusivity diffusivity,
    ref IntPtr feature2D);
Пример #4
0
 /// <summary>
 /// Create AKAZE using the specific values
 /// </summary>
 /// <param name="descriptorType">Type of the extracted descriptor</param>
 /// <param name="descriptorSize">Size of the descriptor in bits. 0 -> Full size</param>
 /// <param name="descriptorChannels">Number of channels in the descriptor (1, 2, 3)</param>
 /// <param name="threshold">Detector response threshold to accept point</param>
 /// <param name="nOctaveLayers"> Default number of sublevels per scale level</param>
 /// <param name="nOctaves">Maximum octave evolution of the image</param>
 /// <param name="diffusivity">Diffusivity type</param>
 public AKAZE(DescriptorType descriptorType = DescriptorType.Mldb, int descriptorSize = 0, int descriptorChannels = 3,
    float threshold = 0.001f, int nOctaves = 4, int nOctaveLayers = 4, KAZE.Diffusivity diffusivity = KAZE.Diffusivity.PmG2)
 {
    _ptr = CvInvoke.cveAKAZEDetectorCreate(
       descriptorType, descriptorSize, descriptorChannels, 
       threshold, nOctaves, nOctaveLayers, diffusivity,
       ref _feature2D);
 }
Пример #5
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out long score)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    // KdTree for faster results / less accuracy
                    using (var ip = new Emgu.CV.Flann.KdTreeIndexParams())
                        using (var sp = new SearchParams())
                            using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                            {
                                matcher.Add(modelDescriptors);

                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                // Calculate score based on matches size
                                // ---------------------------------------------->
                                score = 0;
                                for (int i = 0; i < matches.Size; i++)
                                {
                                    if (mask.GetData(i)[0] == 0)
                                    {
                                        continue;
                                    }
                                    foreach (var e in matches[i].ToArray())
                                    {
                                        ++score;
                                    }
                                }
                                // <----------------------------------------------

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }
                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }