示例#1
0
        public bool Match(IImageLocalFeatures feature1, IImageLocalFeatures feature2)
        {
            bool                      match = false;
            int                       k     = 2;
            double                    uniquenessThreshold = 0.8;
            Matrix <byte>             mask;
            Matrix <int>              indices;
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(_distanceType);

            matcher.Add(feature1.Descriptors);

            indices = new Matrix <int>(feature2.Descriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(feature2.Descriptors.Rows, k))
            {
                matcher.KnnMatch(feature2.Descriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 25)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(feature1.KeyPoints, feature2.KeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 6)
                {
                    _homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(feature1.KeyPoints, feature2.KeyPoints, indices, mask, 2);
                    match       = true;
                }
            }

            /*
             * _result = Features2DToolbox.DrawMatches(feature1.Image, feature1.KeyPoints, feature2.Image, feature2.KeyPoints,
             *                                      indices,
             *                                      new Bgr(255, 0, 0),
             *                                      new Bgr(255, 255, 255),
             *                                      mask,
             *                                      Features2DToolbox.KeypointDrawType.DEFAULT);
             *
             * if (_homography != null)
             * {
             *  Rectangle rect = feature2.Image.ROI;
             *  PointF[] pts = new PointF[]
             *  {
             *      new PointF(rect.Left, rect.Bottom),
             *      new PointF(rect.Right, rect.Bottom),
             *      new PointF(rect.Right, rect.Top),
             *      new PointF(rect.Left, rect.Top)
             *  };
             *
             *  _homography.ProjectPoints(pts);
             *  _result.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
             * }
             */

            return(match);
        }
示例#2
0
        private static void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, SurfSettings surfSettings, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            #region Surf Dectator Region
            double hessianThresh       = 500;
            double uniquenessThreshold = 0.8;

            if (surfSettings != null)
            {
                hessianThresh       = surfSettings.HessianThresh.Value;
                uniquenessThreshold = surfSettings.UniquenessThreshold.Value;
            }

            SURFDetector surfCPU = new SURFDetector(hessianThresh, false);
            #endregion



            int       k = 2;
            Stopwatch watch;
            homography = null;


            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix <float>            observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            matchTime = watch.ElapsedMilliseconds;
        }
        //主要是在這端對比的
        public void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints,
                              out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            int          k = 2;
            double       uniquenessThreshold = 0.8;
            SURFDetector surfCPU             = new SURFDetector(500, false); //設定處理特徵值的方式
            Stopwatch    watch;                                              //監看處理時間

            homography = null;                                               //如果相同,取得四邊形

            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints); //modelKeyPoints : 算出 特徵點? //modelDescriptors :



            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix <float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints); //observedKeyPoints : 取得特徵點 //

            //ImagePrecess processor = new ImagePrecess(observedImage.ToBitmap(),320,240);
            //observedDescriptors = processor.GetImageFeature();
            //observedKeyPoints=processor.GetImageVectorOfKeyPoint();


            watch = Stopwatch.StartNew();
            //
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null); //取得對比
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);//會把剛剛match完的結果抓來看是不是不明確或是不確定的,而跑完的結果放在mask中。
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }
            watch.Stop();
            matchTime = watch.ElapsedMilliseconds;
        }
示例#4
0
        public Tuple <Image <Bgr, byte>, HomographyMatrix> DrawHomography(Image <Gray, byte> model, Image <Gray, byte> observed, double uniquenessThreshold)
        {
            HomographyMatrix  homography = null;
            Image <Bgr, Byte> result     = observed.Convert <Bgr, byte>();

            SURFDetector     surfCPU = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <byte>    mask;
            int k = 2;

            modelKeyPoints = surfCPU.DetectKeyPointsRaw(model, null);             // Extract features from the object image
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(model, null, modelKeyPoints);

            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observed, null);             // Extract features from the observed image

            if (modelKeyPoints.Size <= 0)
            {
                throw new System.ArgumentException("Can't find any keypoints in your model image!");
            }

            if (observedKeyPoints.Size > 0)
            {
                Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observed, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float> (DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int> (observedDescriptors.Rows, k);

                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte> (dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 10)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 10)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                result = Features2DToolbox.DrawMatches(model, modelKeyPoints, observed, observedKeyPoints,
                                                       indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
            }

            return(new Tuple <Image <Bgr, byte>, HomographyMatrix>(result, homography));
        }
        public Tuple <Image <Bgr, byte>, HomographyMatrix> SURFMatcher_KNN(Image <Gray, byte> model, Image <Gray, byte> observed, SURFDetector surfCPU, List <VectorOfKeyPoint> keyPointsList, double uniquenessThreshold, int TM)
        {
            HomographyMatrix  homography        = null;
            Image <Bgr, Byte> result            = null;
            VectorOfKeyPoint  modelKeyPoints    = keyPointsList.First <VectorOfKeyPoint>();
            VectorOfKeyPoint  observedKeyPoints = keyPointsList.Last <VectorOfKeyPoint>();;
            Matrix <int>      indices;
            Matrix <byte>     mask;
            int k = 2;
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);


            try
            {
                result = observed.Convert <Bgr, byte>();
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(model, null, modelKeyPoints);
                matcher.Add(modelDescriptors);
                if (observedKeyPoints.Size > 0)
                {
                    Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observed, null, observedKeyPoints);
                    indices = new Matrix <int>(observedDescriptors.Rows, k);

                    using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                    {
                        matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                        mask = new Matrix <byte>(dist.Rows, 1);
                        mask.SetValue(255);
                        Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                    }

                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                    if (nonZeroCount >= TM)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                        if (nonZeroCount >= TM)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                        }
                    }
                    result = Features2DToolbox.DrawMatches(model, modelKeyPoints, observed, observedKeyPoints,
                                                           indices, new Bgr(100, 200, 214), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
                }
                return(new Tuple <Image <Bgr, byte>, HomographyMatrix>(result, homography));
            }
            catch (Exception e)
            {
                throw e;
            }
        }
示例#6
0
        /// <summary>
        /// 使用BruteForce匹配(較精確但較慢)
        /// </summary>
        /// <param name="template">樣板的特徵點類別</param>
        /// <param name="observedScene">被觀察的場景匹配的特徵點</param>
        /// <returns>回傳匹配的資料類別</returns>
        public static SURFMatchedData MatchSURFFeatureByBruteForce(SURFFeatureData template, SURFFeatureData observedScene)
        {
            Matrix <byte>    mask;
            int              k = 2;
            double           uniquenessThreshold = 0.5; //default:0.8
            Matrix <int>     indices;
            HomographyMatrix homography = null;
            Stopwatch        watch;

            try
            {
                watch = Stopwatch.StartNew();
                #region bruteForce match for CPU
                //match
                BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2Sqr); //default:L2
                matcher.Add(template.GetDescriptors());

                indices = new Matrix <int>(observedScene.GetDescriptors().Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedScene.GetDescriptors().Rows, k))
                {
                    matcher.KnnMatch(observedScene.GetDescriptors(), indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                Console.WriteLine("-----------------\nVoteForUniqueness pairCount => " + nonZeroCount.ToString() + "\n-----------------");
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(template.GetKeyPoints(), observedScene.GetKeyPoints(), indices, mask, 1.5, 30); //default:1.5 , 10, scale increment:1.5 rotatebin:50
                    Console.WriteLine("VoteForSizeAndOrientation pairCount => " + nonZeroCount.ToString() + "\n-----------------");
                    if (nonZeroCount >= 25)                                                                                                                    //defalut :4 , modify: 15
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(template.GetKeyPoints(), observedScene.GetKeyPoints(), indices, mask, 5);
                    }
                }
                #endregion
                watch.Stop();
                Console.WriteLine("Cal SURF Match time => " + watch.ElapsedMilliseconds.ToString() + "\n-----------------");

                return(new SURFMatchedData(indices, homography, mask, nonZeroCount, template));
            }
            catch (CvException ex)
            {
                System.Windows.Forms.MessageBox.Show(ex.ErrorMessage);
                return(null);
            }
        }
        //====================底下都是重新整理的class=====================

        //找出兩張圖是不是一樣
        //model : 部分圖片(要找尋部分)
        //observe : 整體 ,model 可能在 observe裡面的其中一個區塊
        public bool TwoImageIsMatch(Matrix <float> modelDescriptors, Matrix <float> observedDescriptors, int width, int Height, VectorOfKeyPoint modelKeyPoints, VectorOfKeyPoint observedKeyPoints)
        {
            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;
            Matrix <int>  indices;
            //
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null); //取得對比
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            //indices = new Matrix<int>(observedDescriptors.Rows, k);
            HomographyMatrix homography = null;
            int nonZeroCount            = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }
            if (homography != null)
            {
                if (HomographySizeIsLegal(homography, width, Height))
                {
                    return(true);
                }
            }

            return(false);
        }
        //取得mask,用兩張圖的點做個比對
        public Matrix <byte> GetMask(Matrix <float> modelDescriptors, Matrix <float> observedDescriptors)
        {
            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;
            Matrix <int>  indices;
            //
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null); //取得對比
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }
            return(mask);
        }
示例#9
0
        public uint Similar(IImageLocalFeatures feature1, IImageLocalFeatures feature2)
        {
            int                       k = 2;
            double                    uniquenessThreshold = 0.8;
            Matrix <byte>             mask;
            Matrix <int>              indices;
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(_distanceType);

            matcher.Add(feature1.Descriptors);

            indices = new Matrix <int>(feature2.Descriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(feature2.Descriptors.Rows, k))
            {
                matcher.KnnMatch(feature2.Descriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            return((uint)CvInvoke.cvCountNonZero(mask));
        }
示例#10
0
      private static bool IsModelInObserved( Image<Gray, byte> modelImage, Image<Gray, byte> observedImage, double similarityThreshold = 0.075 )
      {
         var surfCpu = new SURFDetector(500, false);

         Matrix<byte> mask;
         int k = 2;
         double uniquenessThreshold = 0.8;

         //extract features from the object image
         var modelKeyPoints = surfCpu.DetectKeyPointsRaw( modelImage, null );
         Matrix<float> modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

         // extract features from the observed image
         var observedKeyPoints = surfCpu.DetectKeyPointsRaw( observedImage, null );
         Matrix<float> observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
         BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
         matcher.Add( modelDescriptors );

         var indices = new Matrix<int>( observedDescriptors.Rows, k );
         using ( var dist = new Matrix<float>( observedDescriptors.Rows, k ) )
         {
            matcher.KnnMatch( observedDescriptors, indices, dist, k, null );
            mask = new Matrix<byte>( dist.Rows, 1 );
            mask.SetValue( 255 );
            Features2DToolbox.VoteForUniqueness( dist, uniquenessThreshold, mask );
         }

         int keypointMatchCount = CvInvoke.cvCountNonZero( mask );
         if ( keypointMatchCount >= 4 )
         {
            keypointMatchCount = Features2DToolbox.VoteForSizeAndOrientation( modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20 );
            if ( keypointMatchCount >= 4 )
            {
               Features2DToolbox.GetHomographyMatrixFromMatchedFeatures( modelKeyPoints, observedKeyPoints, indices, mask, 2 );
            }
         }

         var similarity = (double)keypointMatchCount / observedKeyPoints.Size;
         return similarity > similarityThreshold;
      }
示例#11
0
        private void SURFfeature(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            int          k = 2;
            double       uniquenessThreshold = 0.8;
            SURFDetector surfCPU             = new SURFDetector(300, false);

            homography = null;

            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix <float>            observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }
        }
示例#12
0
        /// <summary>
        /// 商品辨識使用BruteForce匹配(較精確但較慢)
        /// </summary>
        /// <param name="template">樣板的特徵點類別</param>
        /// <param name="observedScene">被觀察的場景匹配的特徵點</param>
        /// <returns>回傳匹配的資料類別</returns>
        public static SURFMatchedData MatchSURFFeatureByBruteForceForGoods(SURFFeatureData template, SURFFeatureData observedScene)
        {
            //This matrix indicates which row is valid for the matches.
            Matrix <byte> mask;
            //Number of nearest neighbors to search for
            int k = 2;
            //The distance different ratio which a match is consider unique, a good number will be 0.8 , NNDR match
            double uniquenessThreshold = 0.8; //default:0.8

            //The resulting n*k matrix of descriptor index from the training descriptors
            Matrix <int>     indices;
            HomographyMatrix homography = null;
            Stopwatch        watch;

            try
            {
                watch = Stopwatch.StartNew();
                #region bruteForce match for CPU
                //match
                BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2Sqr); //default:L2
                matcher.Add(template.GetDescriptors());

                indices = new Matrix <int>(observedScene.GetDescriptors().Rows, k);
                //The resulting n*k matrix of distance value from the training descriptors
                using (Matrix <float> dist = new Matrix <float>(observedScene.GetDescriptors().Rows, k))
                {
                    matcher.KnnMatch(observedScene.GetDescriptors(), indices, dist, k, null);
                    #region Test Output
                    //for (int i = 0; i < indices.Rows; i++)
                    //{
                    //    for (int j = 0; j < indices.Cols; j++)
                    //    {
                    //        Console.Write(indices[i, j] + " ");
                    //    }
                    //    Console.Write("\n");
                    //}
                    //Console.WriteLine("\n distance");
                    //for (int i = 0; i < dist.Rows; i++)
                    //{
                    //    for (int j = 0; j < dist.Cols; j++)
                    //    {
                    //        Console.Write(dist[i, j] + " ");
                    //    }
                    //    Console.Write("\n");
                    //}
                    //Console.WriteLine("\n");
                    #endregion

                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255); //mask is 拉式信號
                    //http://stackoverflow.com/questions/21932861/how-does-features2dtoolbox-voteforuniqueness-work
                    //how the VoteForUniqueness work...
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask); //means good match
                Console.WriteLine("-----------------\nVoteForUniqueness pairCount => " + nonZeroCount.ToString() + "\n-----------------");
                if (nonZeroCount >= 4)
                {
                    //50 is model and mathing image rotation similarity ex: m1 = 60 m2 = 50 => 60 - 50 <=50 so is similar
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(template.GetKeyPoints(), observedScene.GetKeyPoints(), indices, mask, 1.5, 50); //default:1.5 , 10
                    Console.WriteLine("VoteForSizeAndOrientation pairCount => " + nonZeroCount.ToString() + "\n-----------------");
                    if (nonZeroCount >= 15)                                                                                                                    //defalut :4 ,set 15
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(template.GetKeyPoints(), observedScene.GetKeyPoints(), indices, mask, 5);
                    }
                }
                #endregion
                watch.Stop();
                Console.WriteLine("Cal SURF Match time => " + watch.ElapsedMilliseconds.ToString() + "\n-----------------");

                return(new SURFMatchedData(indices, homography, mask, nonZeroCount, template));
            }
            catch (CvException ex)
            {
                System.Windows.Forms.MessageBox.Show(ex.ErrorMessage);
                return(null);
            }
        }
示例#13
0
    public Image <Bgr, Byte> Drawtwo(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage)
    {
        HomographyMatrix homography = null;

        FastDetector     fastCPU = new FastDetector(10, true);
        VectorOfKeyPoint modelKeyPoints;
        VectorOfKeyPoint observedKeyPoints;
        Matrix <int>     indices;

        BriefDescriptorExtractor descriptor = new BriefDescriptorExtractor();

        Matrix <byte> mask;
        int           k = 2;
        double        uniquenessThreshold = 0.8;

        //extract features from the object image
        modelKeyPoints = fastCPU.DetectKeyPointsRaw(modelImage, null);
        Matrix <Byte> modelDescriptors = descriptor.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

        // extract features from the observed image
        observedKeyPoints = fastCPU.DetectKeyPointsRaw(observedImage, null);
        Matrix <Byte>            observedDescriptors = descriptor.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
        BruteForceMatcher <Byte> matcher             = new BruteForceMatcher <Byte>(DistanceType.L2);

        matcher.Add(modelDescriptors);

        indices = new Matrix <int>(observedDescriptors.Rows, k);
        using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
        {
            matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
            mask = new Matrix <byte>(dist.Rows, 1);
            mask.SetValue(255);
            Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
        }

        nonZeroCount = CvInvoke.cvCountNonZero(mask);
        //print("nonZeroCount is "+nonZeroCount);
        if (nonZeroCount >= 4)
        {
            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
            if (nonZeroCount >= 4)
            {
                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(
                    modelKeyPoints, observedKeyPoints, indices, mask, 2);
            }
        }

        //Draw the matched keypoints
        Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                 indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

        #region draw the projected region on the image
        if (homography != null)
        {  //draw a rectangle along the projected model
            Rectangle rect = modelImage.ROI;
            PointF[]  pts  = new PointF[] {
                new PointF(rect.Left, rect.Bottom),
                new PointF(rect.Right, rect.Bottom),
                new PointF(rect.Right, rect.Top),
                new PointF(rect.Left, rect.Top)
            };
            homography.ProjectPoints(pts);
            //area = Math.Abs((rect.Top - rect.Bottom) * (rect.Right - rect.Left));
            result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(System.Drawing.Color.Red), 5);
        }
        #endregion



        return(result);
    }
示例#14
0
        public static void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            int          k = 2;
            double       uniquenessThreshold = 0.8;
            SURFDetector surfCPU             = new SURFDetector(500, false);
            Stopwatch    watch;

            homography = null;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))
                                                        using (Stream stream = new Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                                                                }

                                                            observedKeyPoints = new VectorOfKeyPoint();
                                                            surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                                                                }
                                                            }

                                                            watch.Stop();
                                                        }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = new VectorOfKeyPoint();
                Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = new VectorOfKeyPoint();
                Matrix <float>            observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int>(observedDescriptors.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                watch.Stop();
            }
            matchTime = watch.ElapsedMilliseconds;
        }
示例#15
0
    public Image <Gray, byte> ObjectTrackingSurf(Image <Gray, byte> liveImg, Image <Gray, byte> templateImg, bool showOnLiveImg)
    {
        vkpLiveKeyPoint    = surfDetector.DetectKeyPointsRaw(liveImg, null);
        mtxLiveDescriptors = surfDetector.ComputeDescriptorsRaw(liveImg, null, vkpLiveKeyPoint);

        vkpTemplateKeyPoint    = surfDetector.DetectKeyPointsRaw(templateImg, null);
        mtxTemplateDescriptors = surfDetector.ComputeDescriptorsRaw(templateImg, null, vkpTemplateKeyPoint);

        bruteForceMatcher = new BruteForceMatcher <Single> (DistanceType.L2);
        bruteForceMatcher.Add(mtxTemplateDescriptors);

        mtxMatchIndices = new Matrix <int> (mtxLiveDescriptors.Rows, KNumNearestNeighbors);
        mtxDistance     = new Matrix <Single> (mtxLiveDescriptors.Rows, KNumNearestNeighbors);

        bruteForceMatcher.KnnMatch(mtxLiveDescriptors, mtxMatchIndices, mtxDistance, KNumNearestNeighbors, null);

        mtxMask = new Matrix <Byte> (mtxDistance.Rows, 1);
        mtxMask.SetValue(255);
        Features2DToolbox.VoteForUniqueness(mtxDistance, UniquenessThreshold, mtxMask);

        NumNonZeroElements = CvInvoke.cvCountNonZero(mtxMask);
        if (NumNonZeroElements >= 4)
        {
            NumNonZeroElements = Features2DToolbox.VoteForSizeAndOrientation(vkpTemplateKeyPoint,
                                                                             vkpLiveKeyPoint,
                                                                             mtxMatchIndices,
                                                                             mtxMask,
                                                                             ScaleIncrement,
                                                                             RotationBins);
            if (NumNonZeroElements >= 4)
            {
                homographyMatrix = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(vkpTemplateKeyPoint,
                                                                                            vkpLiveKeyPoint,
                                                                                            mtxMatchIndices,
                                                                                            mtxMask,
                                                                                            RansacReprojectionThreshold);
            }
        }
        //templateImgCopy = templateImg.Copy ();
        //templateImgCopy.Draw (new Rectangle (1, 1, templateImgCopy.Width - 3, templateImgCopy.Height - 3), boxGray, 2);
        liveImgCopy = liveImg.Copy();         //.ConcateHorizontal(templateImgCopy);

        if (homographyMatrix != null)
        {
            rect.X       = 0;
            rect.Y       = 0;
            rect.Width   = templateImg.Width;
            rect.Height  = templateImg.Height;
            pointsF[0].X = rect.Left; pointsF[0].Y = rect.Top;
            pointsF[1].X = rect.Right; pointsF[1].Y = rect.Top;
            pointsF[2].X = rect.Right; pointsF[2].Y = rect.Bottom;
            pointsF[3].X = rect.Left; pointsF[3].Y = rect.Bottom;

            homographyMatrix.ProjectPoints(pointsF);
            //Debug.Log("live w: "+ liveImgCopy.Width + "live h: " + liveImgCopy.Height);
            //Debug.Log ("pf0: " + pointsF[0] + "pf1: "+ pointsF[1] + " pf2: " + pointsF[2] + " pf3: " + pointsF[3]);

            centerPointF.X = 0;
            centerPointF.Y = 0;
            for (int i = 0; i < pointsF.Length; ++i)
            {
                centerPointF.X += pointsF[i].X;
                centerPointF.Y += pointsF[i].Y;
            }
            centerPointF.X = centerPointF.X / 4f;
            centerPointF.Y = centerPointF.Y / 4f;
            //Debug.Log("centerF: " + centerPointF);
            points[0] = Point.Round(pointsF[0]);
            points[1] = Point.Round(pointsF[1]);
            points[2] = Point.Round(pointsF[2]);
            points[3] = Point.Round(pointsF[3]);

            liveImgCopy.DrawPolyline(points, true, boxGray, 4);
        }
        if (showOnLiveImg)
        {
            return(liveImgCopy);
        }
        else
        {
            return(templateImgCopy);
        }
    }
示例#16
0
        public Image <Bgr, Byte> DrawResult(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out double area, int minarea, out Point center)
        {
            center = new Point(320, 240);
            Stopwatch watch;

            area = 0;
            double modelarea = (modelImage.ROI.Right - modelImage.ROI.Left) * (modelImage.ROI.Bottom - modelImage.ROI.Top);
            //单应矩阵
            HomographyMatrix homography = null;

            //surf算法检测器
            SURFDetector surfCPU = new SURFDetector(500, false);

            //原图与实际图中的关键点
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            Matrix <int>  indices;
            Matrix <byte> mask;

            //knn匹配的系数
            int k = 2;
            //滤波系数
            double uniquenessThreshold = 0.8;


            //从标记图中,提取surf特征点与描述子
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // 从实际图片提取surf特征点与描述子
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);

            if (observedDescriptors == null)
            {
                watch.Stop(); matchTime = watch.ElapsedMilliseconds;
                return(null);
            }

            //使用BF匹配算法,匹配特征向量
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);
            indices = new Matrix <int>(observedDescriptors.Rows, k);
            //通过特征向量筛选匹配对
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                //最近邻2点特征向量匹配
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                //匹配成功的,将特征点存入mask
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                //通过滤波系数,过滤非特征点,剩余特征点存入mask
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 10)
            {
                //过滤旋转与变形系数异常的特征点,剩余存入mask
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 10)
                {
                    //使用剩余特征点,构建单应矩阵
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();
            // }

            //画出匹配的特征点
            //Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,indices, new Bgr(0, 0, 255), new Bgr(0, 255, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
            Image <Bgr, byte> result = null;

            System.Drawing.Bitmap bm = observedImage.ToBitmap();
            result = new Image <Bgr, byte>(bm);
            #region draw the projected region on the image
            //画出单应矩阵
            if (homography != null)
            {
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                //根据整个图片的旋转、变形情况,计算出原图中四个顶点转换后的坐标,并画出四边形
                homography.ProjectPoints(pts);
                area = getarea(pts); double xsum = 0; double ysum = 0;
                foreach (PointF point in pts)
                {
                    xsum += point.X; ysum += point.Y;
                }
                center = new Point(Convert.ToInt32(xsum / 4), Convert.ToInt32(ysum / 4));
                if (area > minarea)
                {
                    Image <Bgr, byte> temp = new Image <Bgr, Byte>(result.Width, result.Height);
                    temp.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);

                    //temp.Save("D:\\temp\\" + (++index) + ".jpg");

                    int a = CountContours(temp.ToBitmap());
                    if (a == 2)
                    {
                        result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
                    }
                    else
                    {
                        matchTime = 0; area = 0; return(result);
                    }
                }
            }
            else
            {
                area = 0;
            }
            #endregion

            matchTime = watch.ElapsedMilliseconds;

            return(result);
        }
示例#17
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Image <Bgr, Byte> Draw(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, int state, out long matchTime, out int p)
        {
            Stopwatch        watch;
            HomographyMatrix homography = null;


            SURFDetector     surfCPU = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            if (state == 1)
            {
                uniquenessThreshold = 0.8;
            }



            //extract features from the object image
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }



            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 1)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 1)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            p = mask.ManagedArray.OfType <byte>().ToList().Where(q => q > 0).Count();


            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            matchTime = watch.ElapsedMilliseconds;



            return(result);
        }
示例#18
0
        public static Image <Bgr, Byte> FAST(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage)
        {
            bool isFound = false;

            long      matchTime;
            Stopwatch watch;

            HomographyMatrix homography = null;

            FastDetector     fastCPU = new FastDetector(10, true);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            BriefDescriptorExtractor descriptor = new BriefDescriptorExtractor();

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            watch = Stopwatch.StartNew();

            //extract features from the object image
            modelKeyPoints = fastCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <Byte> modelDescriptors = descriptor.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            // extract features from the observed image
            observedKeyPoints = fastCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix <Byte>            observedDescriptors = descriptor.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher <Byte> matcher             = new BruteForceMatcher <Byte>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(
                        modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null)
            {  //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                if (CvInvoke.cvCountNonZero(mask) >= 10)
                {
                    isFound = true;
                }


                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.LightGreen), 5);
            }
            #endregion

            matchTime = watch.ElapsedMilliseconds;
            _richTextBox1.Clear();
            _richTextBox1.AppendText("objek ditemukan: " + isFound + "\n");
            _richTextBox1.AppendText("waktu pendeteksian FAST: " + matchTime + "ms\n");
            _richTextBox1.AppendText("fitur model yang terdeteksi: " + modelKeyPoints.Size + "\n");
            _richTextBox1.AppendText("match yang ditemukan: " + CvInvoke.cvCountNonZero(mask).ToString());

            return(result);
        }
示例#19
0
        private Image <Bgr, byte> Match(Image <Bgr, byte> image1, Image <Bgr, byte> image2, int flag)
        {
            HomographyMatrix homography      = null;
            SURFDetector     surfDetectorCPU = new SURFDetector(500, false);

            int    k = 2;           //number of matches that we want ot find between image1 and image2
            double uniquenessThreshold = 0.8;

            Matrix <int>  indices;
            Matrix <byte> mask;

            VectorOfKeyPoint KeyPointsImage1;
            VectorOfKeyPoint KeyPointsImage2;

            Image <Gray, Byte> Image1G = image1.Convert <Gray, Byte>();
            Image <Gray, Byte> Image2G = image2.Convert <Gray, Byte>();

            if (GpuInvoke.HasCuda)      //Using CUDA, the GPUs can be used for general purpose processing (i.e., not exclusively graphics), speed up performance
            {
                Console.WriteLine("Here");
                GpuSURFDetector surfDetectorGPU = new GpuSURFDetector(surfDetectorCPU.SURFParams, 0.01f);

                // extract features from Image1
                using (GpuImage <Gray, Byte> gpuImage1 = new GpuImage <Gray, byte>(Image1G))                                                     //convert CPU input image to GPUImage(greyscale)
                    using (GpuMat <float> gpuKeyPointsImage1 = surfDetectorGPU.DetectKeyPointsRaw(gpuImage1, null))                              //find key points for image
                        using (GpuMat <float> gpuDescriptorsImage1 = surfDetectorGPU.ComputeDescriptorsRaw(gpuImage1, null, gpuKeyPointsImage1)) //calculate descriptor for each key point
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))                     //create a new matcher object
                            {
                                KeyPointsImage1 = new VectorOfKeyPoint();
                                surfDetectorGPU.DownloadKeypoints(gpuKeyPointsImage1, KeyPointsImage1);                                 //copy the Matrix from GPU to CPU

                                // extract features from Image2
                                using (GpuImage <Gray, Byte> gpuImage2 = new GpuImage <Gray, byte>(Image2G))
                                    using (GpuMat <float> gpuKeyPointsImage2 = surfDetectorGPU.DetectKeyPointsRaw(gpuImage2, null))
                                        using (GpuMat <float> gpuDescriptorsImage2 = surfDetectorGPU.ComputeDescriptorsRaw(gpuImage2, null, gpuKeyPointsImage2))

                                            //for each descriptor of each image2 , we find k best matching points and their distances from image1 descriptors

                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuDescriptorsImage2.Size.Height, k, 1, true))      //stores indices of k best mathces
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuDescriptorsImage2.Size.Height, k, 1, true)) //stores distance of k best matches

                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))               //stores result of comparison
                                                        using (Stream stream = new Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuDescriptorsImage2, gpuDescriptorsImage1, gpuMatchIndices, gpuMatchDist, k, null, stream); //matching descriptors of image2 to image1 and storing the k best indices and corresponding distances

                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream); //by setting stream, we perform an Async Task
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);         //col0 >= 0.8col1 , only then is it considered a good match
                                                                }

                                                            KeyPointsImage2 = new VectorOfKeyPoint();
                                                            surfDetectorGPU.DownloadKeypoints(gpuKeyPointsImage2, KeyPointsImage2);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(KeyPointsImage1, KeyPointsImage2, indices, mask, 1.5, 20); //count the number of nonzero points in the mask(this stored the comparison result of col0 >= 0.8col1)
                                                                //we can create a homography matrix only if we have atleast 4 matching points
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(KeyPointsImage1, KeyPointsImage2, indices, mask, 2);
                                                                }
                                                            }
                                                        }
                            }
            }
            else
            {
                Console.WriteLine("No CUDA");
                //extract features from image2
                KeyPointsImage1 = new VectorOfKeyPoint();
                Matrix <float> DescriptorsImage1 = surfDetectorCPU.DetectAndCompute(Image1G, null, KeyPointsImage1);

                //extract features from image1
                KeyPointsImage2 = new VectorOfKeyPoint();
                Matrix <float>            DescriptorsImage2 = surfDetectorCPU.DetectAndCompute(Image2G, null, KeyPointsImage2);
                BruteForceMatcher <float> matcher           = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(DescriptorsImage1);

                indices = new Matrix <int>(DescriptorsImage2.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(DescriptorsImage2.Rows, k))
                {
                    matcher.KnnMatch(DescriptorsImage2, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(KeyPointsImage1, KeyPointsImage2, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(KeyPointsImage1, KeyPointsImage2, indices, mask, 2);
                    }
                }
            }
            Image <Bgr, Byte> mImage = image1.Convert <Bgr, Byte>();
            Image <Bgr, Byte> oImage = image2.Convert <Bgr, Byte>();
            Image <Bgr, Byte> result = new Image <Bgr, byte>(mImage.Width + oImage.Width, mImage.Height);

            //Image<Bgr, Byte> temp = Features2DToolbox.DrawMatches(image1, KeyPointsImage1, image2, KeyPointsImage2, indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            if (homography != null)
            {  //draw a rectangle along the projected model
                Rectangle rect = image1.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };

                homography.ProjectPoints(pts);

                HomographyMatrix origin = new HomographyMatrix();                //I perform a copy of the left image with a not real shift operation on the origin
                origin.SetIdentity();
                origin.Data[0, 2] = 0;
                origin.Data[1, 2] = 0;
                Image <Bgr, Byte> mosaic = new Image <Bgr, byte>(mImage.Width + oImage.Width, mImage.Height * 2);

                Image <Bgr, byte> warp_image = mosaic.Clone();
                mosaic = mImage.WarpPerspective(origin, mosaic.Width, mosaic.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_DEFAULT, new Bgr(0, 0, 0));

                warp_image = oImage.WarpPerspective(homography, warp_image.Width, warp_image.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Bgr(200, 0, 0));
                Image <Gray, byte> warp_image_mask = oImage.Convert <Gray, byte>();
                warp_image_mask.SetValue(new Gray(255));
                Image <Gray, byte> warp_mosaic_mask = mosaic.Convert <Gray, byte>();
                warp_mosaic_mask.SetZero();
                warp_mosaic_mask = warp_image_mask.WarpPerspective(homography, warp_mosaic_mask.Width, warp_mosaic_mask.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR, Emgu.CV.CvEnum.WARP.CV_WARP_INVERSE_MAP, new Gray(0));

                warp_image.Copy(mosaic, warp_mosaic_mask);
                if (flag == 1)
                {
                    Console.WriteLine("Using Image Blending");
                    return(blend(mosaic, warp_image, warp_mosaic_mask, 2));
                }
                else
                {
                    Console.WriteLine("No Image Blending");
                    return(mosaic);
                }
            }
            return(null);
        }
示例#20
0
        public static Image <Bgr, Byte> Draw(String modelImageFileName, String observedImageFileName, out long matchTime)
        {
            Image <Gray, Byte> modelImage    = new Image <Gray, byte>(modelImageFileName);
            Image <Gray, Byte> observedImage = new Image <Gray, byte>(observedImageFileName);
            Stopwatch          watch;
            HomographyMatrix   homography = null;

            SURFDetector surfCPU = new SURFDetector(600, false);

            //SIFTDetector surfCPU = new SIFTDetector();
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            //extract features from the object image
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                //nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            Image <Bgr, Byte> modelImage2    = new Image <Bgr, Byte>(modelImageFileName);
            Image <Bgr, Byte> observedImage2 = new Image <Bgr, Byte>(observedImageFileName);

            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage2, modelKeyPoints, observedImage2, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            if (homography != null)
            {
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            for (int i = 0; i < observedKeyPoints.Size; ++i)
            {
                Color color      = Color.FromArgb((int)observedImage2[(int)observedKeyPoints[i].Point.Y, (int)observedKeyPoints[i].Point.X].Red, (int)observedImage2[(int)observedKeyPoints[i].Point.Y, (int)observedKeyPoints[i].Point.X].Green, (int)observedImage2[(int)observedKeyPoints[i].Point.Y, (int)observedKeyPoints[i].Point.X].Blue);
                float hue        = color.GetHue();
                float sat        = color.GetSaturation();
                float bright     = color.GetBrightness();
                float satThr     = (float)0.0f / 240.0f;
                float brightTrh  = (float)40.0f / 240.0f;
                float brightThr2 = (float)15.0f / 24.0f;
                if (sat < satThr && bright < brightTrh)
                {
                    continue;
                }
                if (bright > brightThr2)
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.White), -1);
                    continue;
                }
                if (hue > 230)//rosu
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Red), -1);
                }
                //else if(hue>180)//mov
                //    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Purple), -1);
                else if (hue > 120)//albastru
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Blue), -1);
                }
                else if (hue > 60) //verde
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Yellow), -1);
                }
                else if (hue > 30)//galben
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Yellow), -1);
                }
                else
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Red), -1);
                }
            }

            matchTime = watch.ElapsedMilliseconds;

            return(result);
        }
示例#21
0
        public static Image <Bgr, Byte> Parallelogram(String modelImageFileName, String observedImageFileName, out long matchTime)
        {
            //Image<Gray, Byte> cannyEdges = gray.Canny(cannyThreshold, cannyThresholdLinking);
            //Load the image from file
            Image <Bgr, Byte> observedImage = new Image <Bgr, byte>(observedImageFileName);
            Stopwatch         watch;
            HomographyMatrix  homography = null;

            watch = Stopwatch.StartNew();

            Image <Gray, Byte> graySoft = observedImage.Convert <Gray, Byte>();//.PyrDown().PyrUp();
            //ImageViewer.Show(graySoft, "graysoft");
            //Image<Gray, Byte> gray = graySoft.SmoothGaussian(3);
            //ImageViewer.Show(gray, "graysoft");
            //gray = gray.AddWeighted(graySoft, 1.5, -0.5, 0);
            //ImageViewer.Show(graySoft, "graysoft");

            Gray cannyThreshold             = new Gray(149);
            Gray cannyThresholdLinking      = new Gray(149);
            Gray circleAccumulatorThreshold = new Gray(1000);

            Image <Gray, Byte> cannyEdges = graySoft.Canny(cannyThreshold, cannyThresholdLinking);
            Image <Gray, Byte> modelImage = new Image <Gray, Byte>(modelImageFileName).Canny(cannyThreshold, cannyThresholdLinking);
            SURFDetector       surfCPU    = new SURFDetector(200, false);
            VectorOfKeyPoint   modelKeyPoints;
            VectorOfKeyPoint   observedKeyPoints;
            Matrix <int>       indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.99;

            //extract features from the object image
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(cannyEdges, null);
            Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(cannyEdges, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                //nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            //Image<Bgr, Byte> modelImage2 = new Image<Bgr, Byte>(modelImageFileName);
            //Image<Bgr, Byte> observedImage2 = new Image<Bgr, Byte>(observedImageFileName);

            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, cannyEdges, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            if (homography != null)
            {
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            watch.Stop();
            matchTime = watch.ElapsedMilliseconds;
            return(result);
        }
示例#22
0
        /// <summary>
        /// 環境看板辨識使用BruteForce匹配(較精確但較慢)
        /// </summary>
        /// <param name="template">樣板的特徵點類別</param>
        /// <param name="observedScene">被觀察的場景匹配的特徵點</param>
        /// <returns>回傳匹配的資料類別</returns>
        public static SURFMatchedData MatchSURFFeatureByBruteForceForObjs(SURFFeatureData template, SURFFeatureData observedScene)
        {
            //This matrix indicates which row is valid for the matches.
            Matrix <byte> mask;
            //Number of nearest neighbors to search for
            int k = 5;
            //The distance different ratio which a match is consider unique, a good number will be 0.8 , NNDR match
            double uniquenessThreshold = 0.5;  //default 0.8

            //The resulting n*k matrix of descriptor index from the training descriptors
            Matrix <int>     trainIdx;
            HomographyMatrix homography = null;
            Stopwatch        watch;

            try
            {
                watch = Stopwatch.StartNew();
                #region Surf for CPU
                //match
                BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2Sqr);
                matcher.Add(template.GetDescriptors());

                trainIdx = new Matrix <int>(observedScene.GetDescriptors().Rows, k);
                //The resulting n*k matrix of distance value from the training descriptors
                using (Matrix <float> distance = new Matrix <float>(observedScene.GetDescriptors().Rows, k))
                {
                    matcher.KnnMatch(observedScene.GetDescriptors(), trainIdx, distance, k, null);
                    mask = new Matrix <byte>(distance.Rows, 1);
                    mask.SetValue(255); //Mask is 拉式信號匹配
                    //http://stackoverflow.com/questions/21932861/how-does-features2dtoolbox-voteforuniqueness-work
                    //how the VoteForUniqueness work...
                    Features2DToolbox.VoteForUniqueness(distance, uniquenessThreshold, mask);
                }

                Image <Bgr, byte> result = null;
                int nonZeroCount         = CvInvoke.cvCountNonZero(mask); //means good match
                Console.WriteLine("VoteForUniqueness nonZeroCount=> " + nonZeroCount.ToString());
                if (nonZeroCount >= (template.GetKeyPoints().Size * 0.2)) //set 10
                {
                    //50 is model and mathing image rotation similarity ex: m1 = 60 m2 = 50 => 60 - 50 <=50 so is similar
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(template.GetKeyPoints(), observedScene.GetKeyPoints(), trainIdx, mask, 1.2, 50); //default 1.5,10
                    Console.WriteLine("VoteForSizeAndOrientation nonZeroCount=> " + nonZeroCount.ToString());
                    if (nonZeroCount >= (template.GetKeyPoints().Size * 0.5))                                                                                   //default 4 ,set 15
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(template.GetKeyPoints(), observedScene.GetKeyPoints(), trainIdx, mask, 5);
                    }

                    PointF[] matchPts = GetMatchBoundingBox(homography, template);

                    //Draw the matched keypoints
                    result = Features2DToolbox.DrawMatches(template.GetImg(), template.GetKeyPoints(), observedScene.GetImg(), observedScene.GetKeyPoints(),
                                                           trainIdx, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.NOT_DRAW_SINGLE_POINTS);
                    if (matchPts != null)
                    {
                        result.DrawPolyline(Array.ConvertAll <PointF, Point>(matchPts, Point.Round), true, new Bgr(Color.Red), 2);
                    }
                }
                #endregion
                watch.Stop();
                Console.WriteLine("\nCal SURF Match time=======\n=> " + watch.ElapsedTicks.ToString() + "ms\nCal SURF Match time=======");


                return(new SURFMatchedData(trainIdx, homography, mask, nonZeroCount, template));
            }
            catch (CvException ex)
            {
                System.Windows.Forms.MessageBox.Show(ex.ErrorMessage);
                return(null);
            }
        }
示例#23
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Image <Bgr, Byte>  frame         = _capture.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            Image <Gray, Byte> grayframe     = frame.Convert <Gray, Byte>();
            Image <Gray, Byte> modelImage    = new Image <Gray, byte>("DataPlate/" + 10 + ".jpg");
            Image <Gray, Byte> observedImage = grayframe;
            Stopwatch          watch;
            HomographyMatrix   homography = null;
            SURFDetector       surfCPU    = new SURFDetector(500, false);
            VectorOfKeyPoint   modelKeyPoints;
            VectorOfKeyPoint   observedKeyPoints;
            Matrix <int>       indices;
            Matrix <float>     dist;
            Matrix <byte>      mask;


            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))

                    #region SURF
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher matcher = new GpuBruteForceMatcher(GpuBruteForceMatcher.DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, 2, 1))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuMatchIndices.Size, 1))
                                                {
                                                    observedKeyPoints = new VectorOfKeyPoint();
                                                    surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);
                                                    matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, 2, null);
                                                    indices = new Matrix <int>(gpuMatchIndices.Size);
                                                    dist    = new Matrix <float>(indices.Size);
                                                    gpuMatchIndices.Download(indices);
                                                    gpuMatchDist.Download(dist);

                                                    mask = new Matrix <byte>(dist.Rows, 1);

                                                    mask.SetValue(255);

                                                    Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                                                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                                                    if (nonZeroCount >= 4)
                                                    {
                                                        nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                        if (nonZeroCount >= 4)
                                                        {
                                                            homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                                                        }
                                                    }

                                                    watch.Stop();
                                                }
                            }
                #endregion
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                //MKeyPoint[] kpts = modelKeyPoints.ToArray();
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);
                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                Matrix <float>    observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
                BruteForceMatcher matcher             = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32);
                matcher.Add(modelDescriptors);
                int k = 2;
                indices = new Matrix <int>(observedDescriptors.Rows, k);

                dist = new Matrix <float>(observedDescriptors.Rows, k);
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

                mask = new Matrix <byte>(dist.Rows, 1);

                mask.SetValue(255);

                Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 20)
                {
                    nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 20)
                    {
                        homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                        XMLData();
                    }
                    else
                    {
                        textBox1.Text = string.Empty;
                        textBox2.Text = string.Empty;
                        textBox3.Text = string.Empty;
                        textBox4.Text = string.Empty;
                        textBox5.Text = string.Empty;
                    }
                }
                watch.Stop();
                #region draw the projected region on the image
                if (homography != null)
                {  //draw a rectangle along the projected model
                    Rectangle rect = modelImage.ROI;
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    homography.ProjectPoints(pts);
                    frame.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 2);
                }
                #endregion
                CaptureImageBox.Image = frame;
                DataImageBox.Image    = modelImage;
            }
        }
示例#24
0
        private void FindStopSign(Image <Bgr, byte> img, List <Image <Gray, Byte> > stopSignList, List <Rectangle> boxList, VectorOfVectorOfPoint contours, int[,] hierachy, int idx)
        {
            for (; idx >= 0; idx = hierachy[idx, 0])
            {
                using (VectorOfPoint c = contours[idx])
                    using (VectorOfPoint approx = new VectorOfPoint())
                    {
                        CvInvoke.ApproxPolyDP(c, approx, CvInvoke.ArcLength(c, true) * 0.02, true);
                        double area = CvInvoke.ContourArea(approx);
                        if (area > 200)
                        {
                            double ratio = CvInvoke.MatchShapes(_octagon, approx, Emgu.CV.CvEnum.ContoursMatchType.I3);

                            if (ratio > 0.1) //not a good match of contour shape
                            {
                                //check children
                                if (hierachy[idx, 2] >= 0)
                                {
                                    FindStopSign(img, stopSignList, boxList, contours, hierachy, hierachy[idx, 2]);
                                }
                                continue;
                            }

                            Rectangle box = CvInvoke.BoundingRectangle(c);

                            Image <Gray, Byte> candidate;
                            using (Image <Bgr, Byte> tmp = img.Copy(box))
                                candidate = tmp.Convert <Gray, byte>();
                            //Emgu.CV.UI.ImageViewer.Show(candidate);
                            //set the value of pixels not in the contour region to zero
                            using (Image <Gray, Byte> mask = new Image <Gray, byte>(box.Size))
                            {
                                mask.Draw(contours, idx, new Gray(255), -1, LineType.EightConnected, null, int.MaxValue, new Point(-box.X, -box.Y));

                                double mean = CvInvoke.Mean(candidate, mask).V0;
                                candidate._ThresholdBinary(new Gray(mean), new Gray(255.0));
                                candidate._Not();
                                mask._Not();
                                candidate.SetValue(0, mask);
                            }

                            int              minMatchCount         = 8;
                            double           uniquenessThreshold   = 0.8;
                            VectorOfKeyPoint _observeredKeypoint   = new VectorOfKeyPoint();
                            Mat              _observeredDescriptor = new Mat();
                            _detector.DetectAndCompute(candidate, null, _observeredKeypoint, _observeredDescriptor, false);

                            if (_observeredKeypoint.Size >= minMatchCount)
                            {
                                int k = 2;
                                //Matrix<int> indices = new Matrix<int>(_observeredDescriptor.Size.Height, k);
                                Matrix <byte> mask;
                                //using (Matrix<float> dist = new Matrix<float>(_observeredDescriptor.Size.Height, k))
                                using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                                {
                                    _modelDescriptorMatcher.KnnMatch(_observeredDescriptor, matches, k, null);
                                    mask = new Matrix <byte>(matches.Size, 1);
                                    mask.SetValue(255);
                                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                                }

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= minMatchCount)
                                {
                                    boxList.Add(box);
                                    stopSignList.Add(candidate);
                                }
                            }
                        }
                    }
            }
        }
示例#25
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Image <Bgr, Byte> Draw(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime)
        {
            Stopwatch        watch;
            HomographyMatrix homography = null;

            SURFDetector     surfCPU = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))
                                                        using (Stream stream = new Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                                                                }

                                                            observedKeyPoints = new VectorOfKeyPoint();
                                                            surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                                                                }
                                                            }

                                                            watch.Stop();
                                                        }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int>(observedDescriptors.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                watch.Stop();
            }

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null)
            {  //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            #endregion

            matchTime = watch.ElapsedMilliseconds;

            return(result);
        }
示例#26
0
        public Image <Bgr, float> alignment(Image <Bgr, float> fImage, Image <Bgr, float> lImage)
        {
            HomographyMatrix homography = null;
            SURFDetector     surfCPU    = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;

            int    k = 2;
            double uniquenessThreshold = 0.8;


            Image <Gray, Byte> fImageG = fImage.Convert <Gray, Byte>();
            Image <Gray, Byte> lImageG = lImage.Convert <Gray, Byte>();

            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(fImageG, null, modelKeyPoints);


            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix <float>            observedDescriptors = surfCPU.DetectAndCompute(lImageG, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(fImageG, modelKeyPoints, lImageG, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            if (homography != null)
            {
                //draw a rectangle along the projected model
                Rectangle rect = fImageG.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);

                Image <Bgr, byte>  mosaic     = new Image <Bgr, byte>(fImageG.Width + fImageG.Width, fImageG.Height);
                Image <Bgr, byte>  warp_image = mosaic.Clone();
                Image <Bgr, float> result2    = new Image <Bgr, float>(fImage.Size);
                Image <Gray, Byte> result3    = new Image <Gray, Byte>(fImage.Size);
                CvInvoke.cvWarpPerspective(fImage.Ptr, result2, homography.Ptr, (int)INTER.CV_INTER_CUBIC + (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));
                return(result2);
            }
            return(null);
        }
示例#27
0
        /// <summary>
        /// Match feature points using symmetry test and RANSAC
        /// </summary>
        /// <param name="image1">input image1</param>
        /// <param name="image2">input image2</param>
        /// <param name="keypoints1">output keypoint1</param>
        /// <param name="keypoints2">output keypoint2</param>
        /// <returns>return fundemental matrix</returns>
        public Image <Bgr, Byte> Match(Image <Gray, Byte> image1, Image <Gray, Byte> image2,
                                       ref VectorOfKeyPoint keypoints1, ref VectorOfKeyPoint keypoints2, bool computeModelFeatures)
        {
            //1a. Detection of the SURF features
            keypoints2 = null;
            if (computeModelFeatures == true)
            {
                keypoints1 = this._Detector.DetectKeyPointsRaw(image1, null);
            }
            keypoints2 = this._Detector.DetectKeyPointsRaw(image2, null);

            //1b. Extraction of the SURF descriptors
            Matrix <float> descriptors1 = this._Detector.ComputeDescriptorsRaw(image1, null, keypoints1);
            Matrix <float> descriptors2 = this._Detector.ComputeDescriptorsRaw(image2, null, keypoints2);

            //2. Match the two image descriptors
            //Construction of the match
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);

            //from image 1 to image 2
            //based on k nearest neighbours (with k=2)
            matcher.Add(descriptors1);
            //Number of nearest neighbors to search for
            int k = 2;
            int n = descriptors2.Rows;
            //The resulting n*k matrix of descriptor index from the training descriptors
            Matrix <int> trainIdx1 = new Matrix <int>(n, k);
            //The resulting n*k matrix of distance value from the training descriptors
            Matrix <float> distance1 = new Matrix <float>(n, k);

            matcher.KnnMatch(descriptors2, trainIdx1, distance1, k, null);
            matcher.Dispose();

            //from image 1 to image 2
            matcher = new BruteForceMatcher <float>(DistanceType.L2);
            matcher.Add(descriptors2);
            n = descriptors1.Rows;
            //The resulting n*k matrix of descriptor index from the training descriptors
            Matrix <int> trainIdx2 = new Matrix <int>(n, k);
            //The resulting n*k matrix of distance value from the training descriptors
            Matrix <float> distance2 = new Matrix <float>(n, k);

            matcher.KnnMatch(descriptors1, trainIdx2, distance2, k, null);

            //3. Remove matches for which NN ratio is > than threshold
            int removed = RatioTest(ref trainIdx1, ref distance1);

            removed = RatioTest(ref trainIdx2, ref distance2);

            //4. Create symmetrical matches
            Matrix <float> symMatches;
            int            symNumber = SymmetryTest(trainIdx1, distance1, trainIdx2, distance2, out symMatches);

            //--------------modified code for zero matches------------
            if (symNumber == 0)  // no proper symmetrical matches, should retry in this case
            {
                return(null);
            }
            //-----------------end modified code----------------------

            Matrix <double> fundementalMatrix = ApplyRANSAC(symMatches, keypoints1, keypoints2, symNumber);//, image2);

            //         Image<Bgr, Byte> resultImage = Features2DToolbox.DrawMatches(image1, modelKeyPoints, image2, observedKeyPoints,
            //indices, new Bgr(255, 0, 0), new Bgr(0, 255, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            //         return resultImage;
            return(null);   // we do our own drawing of correspondences
        }
示例#28
0
        public static bool FindModelImageInObservedImage(Image <Gray, byte> modelImage, Image <Gray, byte> observedImage)
        {
            var surfCpu = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGpu = new GpuSURFDetector(surfCpu.SURFParams, 0.01f);
                using (GpuImage <Gray, byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGpu.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGpu.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGpu.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);

                                // extract features from the observed image
                                using (GpuImage <Gray, byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGpu.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGpu.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))
                                                        using (var stream = new Emgu.CV.GPU.Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                                                                }

                                                            observedKeyPoints = new VectorOfKeyPoint();
                                                            surfGpu.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                                                                }
                                                                if ((double)nonZeroCount / mask.Height > 0.02)
                                                                {
                                                                    return(true);
                                                                }
                                                            }
                                                        }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCpu.DetectKeyPointsRaw(modelImage, null);
                Matrix <float> modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                // extract features from the observed image
                observedKeyPoints = surfCpu.DetectKeyPointsRaw(observedImage, null);
                Matrix <float>            observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int>(observedDescriptors.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                if ((double)nonZeroCount / mask.Height > 0.02)
                {
                    return(true);
                }
            }

            //Draw the matched keypoints
            //var result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, indices, new Bgr(0, 0, 255), new Bgr(255, 0, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
            //result.Save( @"C:\Users\D.Markachev\Desktop\bleh-keypoints.jpg" );

            return(false);
        }
//..........metoda żeby SURF zrobić........................................................................................
        public void DoSURFDetectAndUpdateForm(object sender, EventArgs e)
        {
            try
            {
                imgSceneColor             = captureWebcam.QueryFrame(); //try pobrać jedną klatkę z obrazu kamery
                lbPreparingCamera.Visible = false;
            }
            catch (Exception ex)                                    //jak się nie da to error wyświetlamy
            {
                this.Text = ex.Message;
            }


            if (imgSceneColor == null)
            {
                this.Text = "error, nie wczytano obrazu z kamery";      //gdy nie odczytano następnej klatki do zmiennej obrazka
            }
            if (imgToFindColor == null)                                 //jeśli jeszcze nie mamy obrazka do znalezienia...
            {
                ibResult.Image = imgSceneColor.ToBitmap();              //...to wywołaj obraz sceny do imageBoxu
            }
            //gdy dotarliśmy aż tutaj, obydwa obrazki są OK i możemy rozpocząć SURF detection

            SURFDetector surfDetector = new SURFDetector(500, false);   //objekt surf, parametr treshold(jak duże punkty bierze pod uwagę i extended flag

            Image <Gray, Byte> imgSceneGray  = null;                    //szary obraz sceny
            Image <Gray, Byte> imgToFindGray = null;                    //szary obrazek do znalezienia

            VectorOfKeyPoint vkpSceneKeyPoints;                         //vektor punktów na obrazie sceny
            VectorOfKeyPoint vkpToFindKeyPoints;                        //vektor punktów na obrazku do znalezienia

            Matrix <Single> mtxSceneDescriptors;                        //macierz deskryptorów do pytania o najbliższe sąsiedztwo
            Matrix <Single> mtxToFindDescriptor;                        //macierz deskryptorów dla szukanego obrazka

            Matrix <int>    mtxMatchIndices;                            //macierz ze wskaźnikami deskryptorów, będzie wypełniana przy trenowaniu deskryptorów (KnnMatch())
            Matrix <Single> mtxDistance;                                //macierz z wartościami odległości, po treningu jak wyżej
            Matrix <Byte>   mtxMask;                                    //input i output dla funkcji VoteForUniqueness(), wskazującej, który rząd pasuje

            BruteForceMatcher <Single> bruteForceMatcher;               //dla każdego deskryptora w pierwszym zestawie, matcher szuka...
                                                                        //...najbliższego deskryptora w drugim zestawie ustawionym przez trening każdego jednego

            HomographyMatrix homographyMatrix = null;                   //dla ProjectPoints() aby ustawić lokalizację znalezionego obrazka w scenie
            int    intKNumNearestNeighbors    = 2;                      //k, liczba najbliższego sąsiedztwa do przeszukania
            double dblUniquenessThreshold     = 0.8;                    //stosunek różncy dystansu dla porównania, żeby wypadło unikalne

            int intNumNonZeroElements;                                  //jako wartość zwracana dla liczby nie-zerowych elementów obu w macierzy maski,...
                                                                        //...także z wywołania GetHomographyMatrixFromMatchedFeatures()

            //parametry do używania przy wywołaniach VoteForSizeAndOrientation()

            double dblScareIncrement = 1.5;                      //określa różnicę w skali dla sąsiadujących komórek
            int    intRotationBins   = 20;                       //liczba komórek dla rotacji z 360 stopni (jeśli =20 to każda komórka pokrywa 18 stopni (20*18=360))

            double dblRansacReprojectionThreshold = 2.0;         //do użycia z GetHomographyMatrixFromMatchedFeatures(), max. dozwolony błąd odwzorowania...
                                                                 //...aby uznać parę punktów za ?inlier?

            Rectangle rectImageToFind = new Rectangle();         //prostokąt obejmujący cały obrazek do znalezienia

            PointF [] pointsF;                                   //4 punkty określające ramkę wokół lokacji znalezionego obrazka na scenie (float)
            Point []  points;                                    //4 punkty, to samo, ale (int)

            imgSceneGray = imgSceneColor.Convert <Gray, Byte>(); //ta sama scena do Graya

            if (isImgToFind == true)
            {
                try
                {
                    imgToFindGray = imgToFindColor.Convert <Gray, Byte>();       // obrazek do znalezienia do Graya
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                }

                vkpSceneKeyPoints   = surfDetector.DetectKeyPointsRaw(imgSceneGray, null);                       //wykrywa punkty w scenie, drugi param. to maska, jeśli null to nie potrzebna
                mtxSceneDescriptors = surfDetector.ComputeDescriptorsRaw(imgSceneGray, null, vkpSceneKeyPoints); //oblicza deskrptory sceny, param. to obraz sceny...
                //...maska, punkty na scenie

                vkpToFindKeyPoints = surfDetector.DetectKeyPointsRaw(imgToFindGray, null);                          //wykrywa punkty na obrazku do znalezienia, drugi param. to...
                //...maska, null bo nie potrzebna

                mtxToFindDescriptor = surfDetector.ComputeDescriptorsRaw(imgToFindGray, null, vkpToFindKeyPoints);                //oblicza aby znaleźć deskryptory(szukany obrazek, maska, szukanego o. punkty)

                bruteForceMatcher = new BruteForceMatcher <Single>(DistanceType.L2);                                              //objekt brute force matchera z L2, kwadrat odległ. Euklidesowej
                bruteForceMatcher.Add(mtxToFindDescriptor);                                                                       //dodaj macierz dla szukanych deskryptorów do brute force matchera

                if (mtxSceneDescriptors != null)                                                                                  //gdy obraz nie ma cech np. ściana
                {
                    mtxMatchIndices = new Matrix <int>(mtxSceneDescriptors.Rows, intKNumNearestNeighbors);                        //objekt macierzy indeksów/komórek (wiersze, kolumny)
                    mtxDistance     = new Matrix <Single>(mtxSceneDescriptors.Rows, intKNumNearestNeighbors);                     //to samo z dystansami

                    bruteForceMatcher.KnnMatch(mtxSceneDescriptors, mtxMatchIndices, mtxDistance, intKNumNearestNeighbors, null); //znajduje k-najbliższy match, (jak null to maska nie potrzebna)

                    mtxMask = new Matrix <Byte>(mtxDistance.Rows, 1);                                                             //objekt macierzy maski
                    mtxMask.SetValue(255);                                                                                        //ustawia wartości wszystkich elementów w macierzy maski

                    Features2DToolbox.VoteForUniqueness(mtxDistance, dblUniquenessThreshold, mtxMask);                            //filtruje pasujące cechy tj. czy match NIE jest unikalny to jest odrzucany

                    intNumNonZeroElements = CvInvoke.cvCountNonZero(mtxMask);                                                     //pobierz liczbę nie-zerowych elementów w macierzy maski
                    if (intNumNonZeroElements >= 4)
                    {
                        //eliminuje dopasowanye cechy, których skla i rotacja nie zgadzają się ze skalą i rotacją większości
                        intNumNonZeroElements = Features2DToolbox.VoteForSizeAndOrientation(vkpToFindKeyPoints, vkpSceneKeyPoints, mtxMatchIndices, mtxMask, dblScareIncrement, intRotationBins);
                        if (intNumNonZeroElements >= 4)             //jeśli ciągle są co najmniej 4 nie-zerowe elementy

                        //pobierz homography matrix używając RANSAC (random sample consensus)
                        {
                            homographyMatrix = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(vkpToFindKeyPoints, vkpSceneKeyPoints, mtxMatchIndices, mtxMask, dblRansacReprojectionThreshold);
                        }
                    }

                    imgCopyOfImageToFindWithBorder = imgToFindColor.Copy();     //robi kopię obrazka do znalezienia aby na tej kopi rysować, bez zmieniania oryginalnego obrazka

                    //rysuje 2pix ramkę wkoło kopi obrazka do znalezienia, używając takiego samego koloru jaki ma box na znaleziony obrazek
                    imgCopyOfImageToFindWithBorder.Draw(new Rectangle(1, 1, imgCopyOfImageToFindWithBorder.Width - 3, imgCopyOfImageToFindWithBorder.Height - 3), bgrFoundImageColor, 2);

                    //rysowanie obrazu sceny i obrazka do znalezienia razem na obrazie rezultatu
                    //3 warunki w zależności od tego, który checkBox jest zaznaczony (rysuj punkty i/lub rysuj linie)
                    if (ckDrawKeyPoints.Checked == true && ckDrawMatchingLines.Checked == true)
                    {
                        //używa DrawMatches() aby połączyć obraz sceny z obrazkiem do znalezienia, potem rysuje punkty i linie
                        imgResult = Features2DToolbox.DrawMatches(imgCopyOfImageToFindWithBorder,
                                                                  vkpToFindKeyPoints,
                                                                  imgSceneColor,
                                                                  vkpSceneKeyPoints,
                                                                  mtxMatchIndices,
                                                                  bgrMatchingLineColor,
                                                                  bgrKeyPointColor,
                                                                  mtxMask,
                                                                  Features2DToolbox.KeypointDrawType.DEFAULT);
                    }
                    else if (ckDrawKeyPoints.Checked == true && ckDrawMatchingLines.Checked == false)
                    {
                        //rysuje scenę z punktami na obrazie rezultatu
                        imgResult = Features2DToolbox.DrawKeypoints(imgSceneColor,
                                                                    vkpSceneKeyPoints,
                                                                    bgrKeyPointColor,
                                                                    Features2DToolbox.KeypointDrawType.DEFAULT);
                        //potem rysuje punkty na kopi obrazka do znalezienia
                        imgCopyOfImageToFindWithBorder = Features2DToolbox.DrawKeypoints(imgCopyOfImageToFindWithBorder,
                                                                                         vkpToFindKeyPoints,
                                                                                         bgrKeyPointColor,
                                                                                         Features2DToolbox.KeypointDrawType.DEFAULT);
                        //potem łączy kopię obrazka do znaleienia na obrazie rezultatu
                        imgResult = imgResult.ConcateHorizontal(imgCopyOfImageToFindWithBorder);
                    }
                    else if (ckDrawKeyPoints.Checked == false && ckDrawMatchingLines.Checked == false)
                    {
                        imgResult = imgSceneColor;                                                  //dołącza scenę do obrazu rezultatu
                        imgResult = imgResult.ConcateHorizontal(imgCopyOfImageToFindWithBorder);    //wiąże kopię szukanego obrazka na obrazie rezultatu
                    }
                    else
                    {
                        MessageBox.Show("Błąd");     //tu już nie powinno nigdy dojść
                    }
                }
                else
                {
                    imgResult = imgSceneColor;                                                  //dołącza scenę do obrazu rezultatu
                    imgResult = imgResult.ConcateHorizontal(imgCopyOfImageToFindWithBorder);    //wiąże kopię szukanego obrazka na obrazie rezultatu
                }

                if (homographyMatrix != null)    //sprawdzanie czy na pewno coś w tej macierzy jest
                {
                    //rysuje ramkę na kawałku sceny z obrazu rezultatu, w miejscu gdzie jest znaleziony szukany obrazek
                    rectImageToFind.X      = 0;     //na starcie ustawia rozmiar prostokąta na pełny rozmiar obrazka do znalezienia
                    rectImageToFind.Y      = 0;
                    rectImageToFind.Width  = imgToFindGray.Width;
                    rectImageToFind.Height = imgToFindGray.Height;

                    //tworzymy obiekt -> array (szereg) tablica na PointF odpowiadające prostokątom
                    pointsF = new PointF[] { new PointF(rectImageToFind.Left, rectImageToFind.Top),
                                             new PointF(rectImageToFind.Right, rectImageToFind.Top),
                                             new PointF(rectImageToFind.Right, rectImageToFind.Bottom),
                                             new PointF(rectImageToFind.Left, rectImageToFind.Bottom) };

                    //ProjectionPoints() ustawia ptfPointsF(przez referencję) na bycie lokacją ramki na fragmencie sceny gdzie jest znaleziony szukany obrazek
                    homographyMatrix.ProjectPoints(pointsF);

                    //konwersja z PointF() do Point() bo ProjectPoints() używa typ PointF() a DrawPolyline() używa Point()
                    points = new Point[] { Point.Round(pointsF[0]),
                                           Point.Round(pointsF[1]),
                                           Point.Round(pointsF[2]),
                                           Point.Round(pointsF[3]) };

                    //rysowanie ramki wkoło znalezionego obrazka na fragmencie sceny obrazu rezultatu
                    imgResult.DrawPolyline(points, true, new Bgr(0, 255, 0), 2);

                    //rysowanie czerwonego myślnika na środku obiektu
                    int x, y, x1, y1, xW, yW;

                    x  = Convert.ToInt32(points[0].X);
                    y  = Convert.ToInt32(points[0].Y);
                    x1 = Convert.ToInt32(points[2].X);
                    y1 = Convert.ToInt32(points[2].Y);

                    xW  = x1 - x;
                    xW /= 2;
                    xW += x;
                    yW  = y1 - y;
                    yW /= 2;
                    yW += y;
                    Point [] pp = new Point[] { new Point(xW, yW), new Point(xW + 10, yW) };    //rysowanie środka wykrytego obiektu
                    imgResult.DrawPolyline(pp, true, new Bgr(0, 0, 255), 5);

                    XX = xW.ToString();
                    YY = yW.ToString();
                    //////////gdy obiekt znika z pola widzenia
                    if (xW == 0 || yW == 0 || xW < -200 || yW < -200 || xW > 800 || yW > 800)
                    {
                        targetLost(-1);
                    }
                    else
                    {
                        targetLost(1);
                    }
                    //////////
                }
                else
                {
                    targetLost(-1);     //strzał w 10!
                }
                //koniec SURF, update całego form

                ibResult.Image = imgResult.ToBitmap();          //pokazanie rezultatu na imageBoxie
            }
        }
示例#30
0
        public static bool TestFeature2DTracker(IFeatureDetector keyPointDetector, IDescriptorExtractor descriptorGenerator)
        {
            //for (int k = 0; k < 1; k++)
            {
                Feature2D feature2D = null;
                if (keyPointDetector == descriptorGenerator)
                {
                    feature2D = keyPointDetector as Feature2D;
                }

                Image <Gray, Byte> modelImage = EmguAssert.LoadImage <Gray, byte>("box.png");
                //Image<Gray, Byte> modelImage = new Image<Gray, byte>("stop.jpg");
                //modelImage = modelImage.Resize(400, 400, true);

                //modelImage._EqualizeHist();

                #region extract features from the object image
                Stopwatch        stopwatch      = Stopwatch.StartNew();
                VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint();
                Mat modelDescriptors            = new Mat();
                if (feature2D != null)
                {
                    feature2D.DetectAndCompute(modelImage, null, modelKeypoints, modelDescriptors, false);
                }
                else
                {
                    keyPointDetector.DetectRaw(modelImage, modelKeypoints);
                    descriptorGenerator.Compute(modelImage, modelKeypoints, modelDescriptors);
                }
                stopwatch.Stop();
                EmguAssert.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                //Image<Gray, Byte> observedImage = new Image<Gray, byte>("traffic.jpg");
                Image <Gray, Byte> observedImage = EmguAssert.LoadImage <Gray, byte>("box_in_scene.png");
                //Image<Gray, Byte> observedImage = modelImage.Rotate(45, new Gray(0.0));
                //image = image.Resize(400, 400, true);

                //observedImage._EqualizeHist();
                #region extract features from the observed image
                stopwatch.Reset();
                stopwatch.Start();
                VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
                using (Mat observedDescriptors = new Mat())
                {
                    if (feature2D != null)
                    {
                        feature2D.DetectAndCompute(observedImage, null, observedKeypoints, observedDescriptors, false);
                    }
                    else
                    {
                        keyPointDetector.DetectRaw(observedImage, observedKeypoints);
                        descriptorGenerator.Compute(observedImage, observedKeypoints, observedDescriptors);
                    }

                    stopwatch.Stop();
                    EmguAssert.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                    #endregion

                    //Merge the object image and the observed image into one big image for display
                    Image <Gray, Byte> res = modelImage.ConcateVertical(observedImage);

                    Rectangle rect = modelImage.ROI;
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    HomographyMatrix homography = null;

                    stopwatch.Reset();
                    stopwatch.Start();

                    int          k  = 2;
                    DistanceType dt = modelDescriptors.Depth == CvEnum.DepthType.Cv8U ? DistanceType.Hamming : DistanceType.L2;
                    //using (Matrix<int> indices = new Matrix<int>(observedDescriptors.Rows, k))
                    //using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
                    using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                        using (BruteForceMatcher matcher = new BruteForceMatcher(dt))
                        {
                            matcher.Add(modelDescriptors);
                            matcher.KnnMatch(observedDescriptors, matches, k, null);

                            Matrix <byte> mask = new Matrix <byte>(matches.Size, 1);
                            mask.SetValue(255);
                            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints, matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints, observedKeypoints, matches, mask, 2);
                                }
                            }
                        }
                    stopwatch.Stop();
                    EmguAssert.WriteLine(String.Format("Time for feature matching: {0} milli-sec", stopwatch.ElapsedMilliseconds));

                    bool success = false;
                    if (homography != null)
                    {
                        PointF[] points = pts.Clone() as PointF[];
                        homography.ProjectPoints(points);

                        for (int i = 0; i < points.Length; i++)
                        {
                            points[i].Y += modelImage.Height;
                        }

                        res.DrawPolyline(
#if NETFX_CORE
                            Extensions.
#else
                            Array.
#endif
                            ConvertAll <PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);

                        success = true;
                    }
                    //Emgu.CV.UI.ImageViewer.Show(res);
                    return(success);
                }



                /*
                 * stopwatch.Reset(); stopwatch.Start();
                 * //set the initial region to be the whole image
                 * using (Image<Gray, Single> priorMask = new Image<Gray, float>(observedImage.Size))
                 * {
                 * priorMask.SetValue(1.0);
                 * homography = tracker.CamShiftTrack(
                 *    observedFeatures,
                 *    (RectangleF)observedImage.ROI,
                 *    priorMask);
                 * }
                 * Trace.WriteLine(String.Format("Time for feature tracking: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                 *
                 * if (homography != null) //set the initial tracking window to be the whole image
                 * {
                 * PointF[] points = pts.Clone() as PointF[];
                 * homography.ProjectPoints(points);
                 *
                 * for (int i = 0; i < points.Length; i++)
                 *    points[i].Y += modelImage.Height;
                 * res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);
                 * return true;
                 * }
                 * else
                 * {
                 * return false;
                 * }*/
            }
        }