A wrapper for CvFeatureTree
Inheritance: Emgu.Util.UnmanagedObject
Ejemplo n.º 1
0
        public void TestKDTree()
        {
            float[][] features = new float[10][];
             for (int i = 0; i < features.Length; i++)
            features[i] = new float[] { (float)i };
             FeatureTree tree = new FeatureTree(features);

             Matrix<Int32> result;
             Matrix<double> distance;
             float[][] features2 = new float[1][];
             features2[0] = new float[] { 5.0f };

             tree.FindFeatures(features2, out result, out distance, 1, 20);
             Assert.AreEqual(result[0, 0], 5);
             Assert.AreEqual(distance[0, 0], 0.0);
        }
Ejemplo n.º 2
0
      static void Run()
      {
         Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png");

         #region extract features from the object image
         MCvSURFParams param1 = new MCvSURFParams(500, false);
         SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref param1);
         SURFFeature[] modelFeaturesPositiveLaplacian = Array.FindAll<SURFFeature>(modelFeatures, delegate(SURFFeature f) { return f.Point.laplacian >= 0; });
         SURFFeature[] modelFeaturesNegativeLaplacian = Array.FindAll<SURFFeature>(modelFeatures, delegate(SURFFeature f) { return f.Point.laplacian < 0; });

         //Create feature trees for the given features
         FeatureTree featureTreePositiveLaplacian = new FeatureTree(
            Array.ConvertAll<SURFFeature, Matrix<float>>(
               modelFeaturesPositiveLaplacian,
               delegate(SURFFeature f) { return f.Descriptor; }));
         FeatureTree featureTreeNegativeLaplacian = new FeatureTree(
            Array.ConvertAll<SURFFeature, Matrix<float>>(
               modelFeaturesNegativeLaplacian,
               delegate(SURFFeature f) { return f.Descriptor; }));
         #endregion

         Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");

         #region extract features from the observed image
         MCvSURFParams param2 = new MCvSURFParams(500, false);
         SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref param2);
         SURFFeature[] imageFeaturesPositiveLaplacian = Array.FindAll<SURFFeature>(imageFeatures, delegate(SURFFeature f) { return f.Point.laplacian >= 0; });
         SURFFeature[] imageFeaturesNegativeLaplacian = Array.FindAll<SURFFeature>(imageFeatures, delegate(SURFFeature f) { return f.Point.laplacian < 0; });
         #endregion

         #region Merge the object image and the observed image into one image for display
         Image<Gray, Byte> res = new Image<Gray, byte>(Math.Max(modelImage.Width, observedImage.Width), modelImage.Height + observedImage.Height);
         res.ROI = new System.Drawing.Rectangle(0, 0, modelImage.Width, modelImage.Height);
         modelImage.Copy(res, null);
         res.ROI = new System.Drawing.Rectangle(0, modelImage.Height, observedImage.Width, observedImage.Height);
         observedImage.Copy(res, null);
         res.ROI = Rectangle.Empty;
         #endregion

         double matchDistanceRatio = 0.8;
         List<PointF> modelPoints = new List<PointF>();
         List<PointF> observePoints = new List<PointF>();

         #region using Feature Tree to match feature
         Matrix<float>[] imageFeatureDescriptorsPositiveLaplacian = Array.ConvertAll<SURFFeature, Matrix<float>>(
            imageFeaturesPositiveLaplacian,
            delegate(SURFFeature f) { return f.Descriptor; });
         Matrix<float>[] imageFeatureDescriptorsNegativeLaplacian = Array.ConvertAll<SURFFeature, Matrix<float>>(
            imageFeaturesNegativeLaplacian,
            delegate(SURFFeature f) { return f.Descriptor; });
         Matrix<Int32> result1;
         Matrix<double> dist1;

         featureTreePositiveLaplacian.FindFeatures(imageFeatureDescriptorsPositiveLaplacian, out result1, out dist1, 2, 20);
         MatchSURFFeatureWithFeatureTree(
           modelFeaturesPositiveLaplacian,
           imageFeaturesPositiveLaplacian,
           matchDistanceRatio, result1.Data, dist1.Data, modelPoints, observePoints);

         featureTreeNegativeLaplacian.FindFeatures(imageFeatureDescriptorsNegativeLaplacian, out result1, out dist1, 2, 20);
         MatchSURFFeatureWithFeatureTree(
              modelFeaturesNegativeLaplacian,
              imageFeaturesNegativeLaplacian,
              matchDistanceRatio, result1.Data, dist1.Data, modelPoints, observePoints);
         #endregion

         Matrix<float> homographyMatrix = CameraCalibration.FindHomography(
            modelPoints.ToArray(), //points on the object image
            observePoints.ToArray(), //points on the observed image
            HOMOGRAPHY_METHOD.RANSAC,
            3).Convert<float>();

         #region draw the projected object in observed image
         for (int i = 0; i < modelPoints.Count; i++)
         {
            PointF p = observePoints[i];
            p.Y += modelImage.Height;
            res.Draw(new LineSegment2DF(modelPoints[i], p), new Gray(0), 1);
         }

         System.Drawing.Rectangle rect = modelImage.ROI;
         Matrix<float> orginalCornerCoordinate = new Matrix<float>(new float[,] 
            {{  rect.Left, rect.Bottom, 1.0f},
               { rect.Right, rect.Bottom, 1.0f},
               { rect.Right, rect.Top, 1.0f},
               { rect.Left, rect.Top, 1.0f}});

         Matrix<float> destCornerCoordinate = homographyMatrix * orginalCornerCoordinate.Transpose();
         float[,] destCornerCoordinateArray = destCornerCoordinate.Data;

         Point[] destCornerPoints = new Point[4];
         for (int i = 0; i < destCornerPoints.Length; i++)
         {
            float denominator = destCornerCoordinateArray[2, i];
            destCornerPoints[i] = new Point(
               (int)(destCornerCoordinateArray[0, i] / denominator),
               (int)(destCornerCoordinateArray[1, i] / denominator) + modelImage.Height);
         }

         res.DrawPolyline(destCornerPoints, true, new Gray(255.0), 5);
         #endregion

         ImageViewer.Show(res);
      }