Beispiel #1
0
        /// <summary>
        /// Recover the homography matrix using RANDSAC. If the matrix cannot be recovered, null is returned.
        /// </summary>
        /// <param name="matchedFeatures">The Matched Features, only the first ModelFeature will be considered</param>
        /// <returns>The homography matrix, if it cannot be found, null is returned</returns>
        public static HomographyMatrix GetHomographyMatrixFromMatchedFeatures(MatchedSURFFeature[] matchedFeatures)
        {
            if (matchedFeatures.Length < 4)
            return null;

             HomographyMatrix homography;
             if (matchedFeatures.Length < _randsacRequiredMatch)
             {  // Too few points for randsac, use 4 points only
            PointF[] pts1 = new PointF[4];
            PointF[] pts2 = new PointF[4];
            for (int i = 0; i < 4; i++)
            {
               pts1[i] = matchedFeatures[i].SimilarFeatures[0].Feature.Point.pt;
               pts2[i] = matchedFeatures[i].ObservedFeature.Point.pt;
            }
            homography = CameraCalibration.GetPerspectiveTransform(pts1, pts2);
             }
             else
             {
            //use randsac to find the Homography Matrix
            PointF[] pts1 = new PointF[matchedFeatures.Length];
            PointF[] pts2 = new PointF[matchedFeatures.Length];
            for (int i = 0; i < matchedFeatures.Length; i++)
            {
               pts1[i] = matchedFeatures[i].SimilarFeatures[0].Feature.Point.pt;
               pts2[i] = matchedFeatures[i].ObservedFeature.Point.pt;
            }

            homography = CameraCalibration.FindHomography(
               pts1, //points on the model image
               pts2, //points on the observed image
               CvEnum.HOMOGRAPHY_METHOD.RANSAC,
               3);
            if (homography == null)
               return null;
             }

             if (homography.IsValid(10))
            return homography;
             else
             {
            homography.Dispose();
            return null;
             }
        }
Beispiel #2
0
            /*
             * private static int CompareSimilarFeature(SimilarFeature f1, SimilarFeature f2)
             * {
             * if (f1.Distance < f2.Distance)
             *    return -1;
             * if (f1.Distance == f2.Distance)
             *    return 0;
             * else
             *    return 1;
             * }*/

            /// <summary>
            /// Match the SURF feature from the observed image to the features from the model image
            /// </summary>
            /// <param name="observedFeatures">The SURF feature from the observed image</param>
            /// <param name="k">The number of neighbors to find</param>
            /// <param name="emax">For k-d tree only: the maximum number of leaves to visit.</param>
            /// <returns>The matched features</returns>
            public MatchedSURFFeature[] MatchFeature(SURFFeature[] observedFeatures, int k, int emax)
            {
                if (observedFeatures.Length == 0)
                {
                    return(new MatchedSURFFeature[0]);
                }

                float[][] descriptors = new float[observedFeatures.Length][];
                for (int i = 0; i < observedFeatures.Length; i++)
                {
                    descriptors[i] = observedFeatures[i].Descriptor;
                }
                using (Matrix <int> result1 = new Matrix <int>(descriptors.Length, k))
                    using (Matrix <float> dist1 = new Matrix <float>(descriptors.Length, k))
                    {
                        _modelIndex.KnnSearch(Util.GetMatrixFromDescriptors(descriptors), result1, dist1, k, emax);

                        int[,] indexes     = result1.Data;
                        float[,] distances = dist1.Data;

                        MatchedSURFFeature[]  res             = new MatchedSURFFeature[observedFeatures.Length];
                        List <SimilarFeature> matchedFeatures = new List <SimilarFeature>();

                        for (int i = 0; i < res.Length; i++)
                        {
                            matchedFeatures.Clear();

                            for (int j = 0; j < k; j++)
                            {
                                int index = indexes[i, j];
                                if (index >= 0)
                                {
                                    matchedFeatures.Add(new SimilarFeature(distances[i, j], _modelFeatures[index]));
                                }
                            }

                            res[i].ObservedFeature = observedFeatures[i];
                            res[i].SimilarFeatures = matchedFeatures.ToArray();
                        }
                        return(res);
                    }
            }
Beispiel #3
0
        /// <summary>
        /// Eliminate the matched features whose scale and rotation do not aggree with the majority's scale and rotation.
        /// </summary>
        /// <param name="rotationBins">The numbers of bins for rotation, a good value might be 20 (which means each bin covers 18 degree)</param>
        /// <param name="scaleIncrement">This determins the different in scale for neighbour hood bins, a good value might be 1.5 (which means matched features in bin i+1 is scaled 1.5 times larger than matched features in bin i</param>
        /// <param name="matchedFeatures">The matched feature that will be participated in the voting. For each matchedFeatures, only the zero indexed ModelFeature will be considered.</param>
        public static MatchedSURFFeature[] VoteForSizeAndOrientation(MatchedSURFFeature[] matchedFeatures, double scaleIncrement, int rotationBins)
        {
            int elementsCount = matchedFeatures.Length;

            float[] scales    = new float[elementsCount];
            float[] rotations = new float[elementsCount];
            float[] flags     = new float[elementsCount];
            float   minScale  = float.MaxValue;
            float   maxScale  = float.MinValue;

            for (int i = 0; i < matchedFeatures.Length; i++)
            {
                float scale = (float)matchedFeatures[i].ObservedFeature.Point.size / (float)matchedFeatures[i].SimilarFeatures[0].Feature.Point.size;
                scale     = (float)Math.Log10(scale);
                scales[i] = scale;
                if (scale < minScale)
                {
                    minScale = scale;
                }
                if (scale > maxScale)
                {
                    maxScale = scale;
                }

                float rotation = matchedFeatures[i].ObservedFeature.Point.dir - matchedFeatures[i].SimilarFeatures[0].Feature.Point.dir;
                rotations[i] = rotation < 0.0 ? rotation + 360 : rotation;
            }

            int scaleBinSize = (int)Math.Max(((maxScale - minScale) / Math.Log10(scaleIncrement)), 1);
            int count;

            using (DenseHistogram h = new DenseHistogram(new int[] { scaleBinSize, rotationBins }, new RangeF[] { new RangeF(minScale, maxScale), new RangeF(0, 360) }))
            {
                GCHandle scaleHandle    = GCHandle.Alloc(scales, GCHandleType.Pinned);
                GCHandle rotationHandle = GCHandle.Alloc(rotations, GCHandleType.Pinned);
                GCHandle flagsHandle    = GCHandle.Alloc(flags, GCHandleType.Pinned);

                using (Matrix <float> flagsMat = new Matrix <float>(1, elementsCount, flagsHandle.AddrOfPinnedObject()))
                    using (Matrix <float> scalesMat = new Matrix <float>(1, elementsCount, scaleHandle.AddrOfPinnedObject()))
                        using (Matrix <float> rotationsMat = new Matrix <float>(1, elementsCount, rotationHandle.AddrOfPinnedObject()))
                        {
                            h.Calculate(new Matrix <float>[] { scalesMat, rotationsMat }, true, null);

                            float minVal, maxVal;
                            int[] minLoc, maxLoc;
                            h.MinMax(out minVal, out maxVal, out minLoc, out maxLoc);

                            h.Threshold(maxVal * 0.5);

                            CvInvoke.cvCalcBackProject(new IntPtr[] { scalesMat.Ptr, rotationsMat.Ptr }, flagsMat.Ptr, h.Ptr);
                            count = CvInvoke.cvCountNonZero(flagsMat);
                        }
                scaleHandle.Free();
                rotationHandle.Free();
                flagsHandle.Free();

                MatchedSURFFeature[] matchedGoodFeatures = new MatchedSURFFeature[count];
                int index = 0;
                for (int i = 0; i < matchedFeatures.Length; i++)
                {
                    if (flags[i] != 0)
                    {
                        matchedGoodFeatures[index++] = matchedFeatures[i];
                    }
                }

                return(matchedGoodFeatures);
            }
        }
Beispiel #4
0
         /*
         private static int CompareSimilarFeature(SimilarFeature f1, SimilarFeature f2)
         {
            if (f1.Distance < f2.Distance)
               return -1;
            if (f1.Distance == f2.Distance)
               return 0;
            else
               return 1;
         }*/

         /// <summary>
         /// Match the SURF feature from the observed image to the features from the model image
         /// </summary>
         /// <param name="observedFeatures">The SURF feature from the observed image</param>
         /// <param name="k">The number of neighbors to find</param>
         /// <param name="emax">For k-d tree only: the maximum number of leaves to visit.</param>
         /// <returns>The matched features</returns>
         public MatchedSURFFeature[] MatchFeature(SURFFeature[] observedFeatures, int k, int emax)
         {
            if (observedFeatures.Length == 0) return new MatchedSURFFeature[0];

            float[][] descriptors = new float[observedFeatures.Length][];
            for (int i = 0; i < observedFeatures.Length; i++)
               descriptors[i] = observedFeatures[i].Descriptor;
            using(Matrix<int> result1 = new Matrix<int>(descriptors.Length, k))
            using (Matrix<float> dist1 = new Matrix<float>(descriptors.Length, k))
            {
               _modelIndex.KnnSearch(Util.GetMatrixFromDescriptors(descriptors), result1, dist1, k, emax);

               int[,] indexes = result1.Data;
               float[,] distances = dist1.Data;

               MatchedSURFFeature[] res = new MatchedSURFFeature[observedFeatures.Length];
               List<SimilarFeature> matchedFeatures = new List<SimilarFeature>();

               for (int i = 0; i < res.Length; i++)
               {
                  matchedFeatures.Clear();

                  for (int j = 0; j < k; j++)
                  {
                     int index = indexes[i, j];
                     if (index >= 0)
                     {
                        matchedFeatures.Add(new SimilarFeature(distances[i, j], _modelFeatures[index]));
                     }
                  }

                  res[i].ObservedFeature = observedFeatures[i];
                  res[i].SimilarFeatures = matchedFeatures.ToArray();
               }
               return res;
            }
         }
Beispiel #5
0
      /// <summary>
      /// Eliminate the matched features whose scale and rotation do not aggree with the majority's scale and rotation.
      /// </summary>
      /// <param name="rotationBins">The numbers of bins for rotation, a good value might be 20 (which means each bin covers 18 degree)</param>
      /// <param name="scaleIncrement">This determins the different in scale for neighbour hood bins, a good value might be 1.5 (which means matched features in bin i+1 is scaled 1.5 times larger than matched features in bin i</param>
      /// <param name="matchedFeatures">The matched feature that will be participated in the voting. For each matchedFeatures, only the zero indexed ModelFeature will be considered.</param>
      public static MatchedSURFFeature[] VoteForSizeAndOrientation(MatchedSURFFeature[] matchedFeatures, double scaleIncrement, int rotationBins)
      {
         int elementsCount = matchedFeatures.Length;
         float[] scales = new float[elementsCount];
         float[] rotations = new float[elementsCount];
         float[] flags = new float[elementsCount];
         float minScale = float.MaxValue;
         float maxScale = float.MinValue;

         for (int i = 0; i < matchedFeatures.Length; i++)
         {
            float scale = (float)matchedFeatures[i].ObservedFeature.Point.size / (float)matchedFeatures[i].SimilarFeatures[0].Feature.Point.size;
            scale = (float)Math.Log10(scale);
            scales[i] = scale;
            if (scale < minScale) minScale = scale;
            if (scale > maxScale) maxScale = scale;

            float rotation = matchedFeatures[i].ObservedFeature.Point.dir - matchedFeatures[i].SimilarFeatures[0].Feature.Point.dir;
            rotations[i] = rotation < 0.0 ? rotation + 360 : rotation;
         }

         int scaleBinSize = (int)Math.Max(((maxScale - minScale) / Math.Log10(scaleIncrement)), 1);
         int count;
         using (DenseHistogram h = new DenseHistogram(new int[] { scaleBinSize, rotationBins }, new RangeF[] { new RangeF(minScale, maxScale), new RangeF(0, 360) }))
         {
            GCHandle scaleHandle = GCHandle.Alloc(scales, GCHandleType.Pinned);
            GCHandle rotationHandle = GCHandle.Alloc(rotations, GCHandleType.Pinned);
            GCHandle flagsHandle = GCHandle.Alloc(flags, GCHandleType.Pinned);

            using (Matrix<float> flagsMat = new Matrix<float>(1, elementsCount, flagsHandle.AddrOfPinnedObject()))
            using (Matrix<float> scalesMat = new Matrix<float>(1, elementsCount, scaleHandle.AddrOfPinnedObject()))
            using (Matrix<float> rotationsMat = new Matrix<float>(1, elementsCount, rotationHandle.AddrOfPinnedObject()))
            {
               h.Calculate(new Matrix<float>[] { scalesMat, rotationsMat }, true, null);

               float minVal, maxVal;
               int[] minLoc, maxLoc;
               h.MinMax(out minVal, out maxVal, out minLoc, out maxLoc);

               h.Threshold(maxVal * 0.5);

               CvInvoke.cvCalcBackProject(new IntPtr[] { scalesMat.Ptr, rotationsMat.Ptr }, flagsMat.Ptr, h.Ptr);
               count = CvInvoke.cvCountNonZero(flagsMat);
            }
            scaleHandle.Free();
            rotationHandle.Free();
            flagsHandle.Free();

            MatchedSURFFeature[] matchedGoodFeatures = new MatchedSURFFeature[count];
            int index = 0;
            for (int i = 0; i < matchedFeatures.Length; i++)
               if (flags[i] != 0)
                  matchedGoodFeatures[index++] = matchedFeatures[i];

            return matchedGoodFeatures;
         }
      }
Beispiel #6
0
 /// <summary>
 /// Filter the matched Features, such that if a match is not unique, it is rejected.
 /// </summary>
 /// <param name="matchedFeatures">The Matched SURF features, each of them has the model feature sorted by distance. (e.g. SortMatchedFeaturesByDistance )</param>
 /// <param name="uniquenessThreshold">The distance different ratio which a match is consider unique, a good number will be 0.8</param>
 /// <returns>The filtered matched SURF Features</returns>
 public static MatchedSURFFeature[] VoteForUniqueness(MatchedSURFFeature[] matchedFeatures, double uniquenessThreshold)
 {
    return Array.FindAll<MatchedSURFFeature>(matchedFeatures,
       delegate(MatchedSURFFeature f)
       {
          return
             f.SimilarFeatures.Length == 1 //this is the only match
             || (f.SimilarFeatures[0].Distance / f.SimilarFeatures[1].Distance <= uniquenessThreshold); //if the first model feature is a good match 
       });
 }