コード例 #1
0
        public List <VectorOfKeyPoint> SURF_BruteForceMatcher(Image <Gray, byte> model, Image <Gray, byte> observed, int hessianThreshould, out SURFDetector surfCPU)
        {
            surfCPU = new SURFDetector(hessianThreshould, false);
            List <VectorOfKeyPoint> KeyPointsList = new List <VectorOfKeyPoint>();
            VectorOfKeyPoint        modelKeyPoints;
            VectorOfKeyPoint        observedKeyPoints;

            try
            {
                modelKeyPoints    = surfCPU.DetectKeyPointsRaw(model, null);    // Extract features from the object image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observed, null); // Extract features from the observed image

                if (modelKeyPoints.Size <= 0)
                {
                    throw new System.ArgumentException("Can't find any keypoints in your model image!");
                }

                KeyPointsList.Add(modelKeyPoints);
                KeyPointsList.Add(observedKeyPoints);
            }
            catch (Exception e)
            {
                Log.WriteLine("SURF_BruteForceMatcher: " + e.Message);
                Console.WriteLine(e.Message);
                throw e;
            }
            return(KeyPointsList);
        }
コード例 #2
0
        public Tuple <Image <Bgr, byte>, HomographyMatrix> DrawHomography(Image <Gray, byte> model, Image <Gray, byte> observed, double uniquenessThreshold)
        {
            HomographyMatrix  homography = null;
            Image <Bgr, Byte> result     = observed.Convert <Bgr, byte>();

            SURFDetector     surfCPU = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <byte>    mask;
            int k = 2;

            modelKeyPoints = surfCPU.DetectKeyPointsRaw(model, null);             // Extract features from the object image
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(model, null, modelKeyPoints);

            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observed, null);             // Extract features from the observed image

            if (modelKeyPoints.Size <= 0)
            {
                throw new System.ArgumentException("Can't find any keypoints in your model image!");
            }

            if (observedKeyPoints.Size > 0)
            {
                Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observed, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float> (DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int> (observedDescriptors.Rows, k);

                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte> (dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 10)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 10)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                result = Features2DToolbox.DrawMatches(model, modelKeyPoints, observed, observedKeyPoints,
                                                       indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
            }

            return(new Tuple <Image <Bgr, byte>, HomographyMatrix>(result, homography));
        }
コード例 #3
0
        public SurfDetector(String model)
        {
            surfCPU = new SURFDetector(500, false);

            Image <Bgr, Byte> color = new Image <Bgr, byte>(model);

            modelImage = color.Convert <Gray, Byte>();

            //extract features from the object image
            modelKeyPoints   = surfCPU.DetectKeyPointsRaw(modelImage, null);
            modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            matcher = new BruteForceMatcher <float>(DistanceType.L2);
            matcher.Add(modelDescriptors);

            //for computers with awesome GPUs...
            if (GpuInvoke.HasCuda)
            {
                /*surfGPU = new GpuSURFDetector(500, 4, 4, false, 0.01f, true);
                *  gpuModelImage = new GpuImage<Gray, byte>(modelImage);
                *  gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null);
                *  gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints);
                *  matcher_gpu = new GpuBruteForceMatcher<float>(DistanceType.L2);
                *  modelKeyPoints = new VectorOfKeyPoint();
                *  surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);*/
            }
        }
コード例 #4
0
 public float[,] GetDescriptors(Image image)
 {
     using (var grayImage = new Image <Gray, byte>(new Bitmap(image, _size, _size)))
     {
         var modelKeyPoints = _surfDetector.DetectKeyPointsRaw(grayImage, null);
         return(_surfDetector.ComputeDescriptorsRaw(grayImage, null, modelKeyPoints).Data);
     }
 }
コード例 #5
0
      private static bool IsModelInObserved( Image<Gray, byte> modelImage, Image<Gray, byte> observedImage, double similarityThreshold = 0.075 )
      {
         var surfCpu = new SURFDetector(500, false);

         Matrix<byte> mask;
         int k = 2;
         double uniquenessThreshold = 0.8;

         //extract features from the object image
         var modelKeyPoints = surfCpu.DetectKeyPointsRaw( modelImage, null );
         Matrix<float> modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

         // extract features from the observed image
         var observedKeyPoints = surfCpu.DetectKeyPointsRaw( observedImage, null );
         Matrix<float> observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
         BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
         matcher.Add( modelDescriptors );

         var indices = new Matrix<int>( observedDescriptors.Rows, k );
         using ( var dist = new Matrix<float>( observedDescriptors.Rows, k ) )
         {
            matcher.KnnMatch( observedDescriptors, indices, dist, k, null );
            mask = new Matrix<byte>( dist.Rows, 1 );
            mask.SetValue( 255 );
            Features2DToolbox.VoteForUniqueness( dist, uniquenessThreshold, mask );
         }

         int keypointMatchCount = CvInvoke.cvCountNonZero( mask );
         if ( keypointMatchCount >= 4 )
         {
            keypointMatchCount = Features2DToolbox.VoteForSizeAndOrientation( modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20 );
            if ( keypointMatchCount >= 4 )
            {
               Features2DToolbox.GetHomographyMatrixFromMatchedFeatures( modelKeyPoints, observedKeyPoints, indices, mask, 2 );
            }
         }

         var similarity = (double)keypointMatchCount / observedKeyPoints.Size;
         return similarity > similarityThreshold;
      }
コード例 #6
0
        /// <summary>
        /// 計算特徵點
        /// </summary>
        /// <param name="srcImage">來源影像</param>
        /// <returns>回傳特徵類別</returns>
        public static SURFFeatureData CalSURFFeature(Image <Bgr, Byte> srcImage)
        {
            SURFDetector     surfCPU = new SURFDetector(new MCvSURFParams(1200, false)); //預設500
            VectorOfKeyPoint keyPoints;
            Matrix <float>   descriptors = null;

            using (Image <Gray, Byte> grayImg = srcImage.Convert <Gray, Byte>())
            {
                keyPoints   = surfCPU.DetectKeyPointsRaw(grayImg, null);
                descriptors = surfCPU.ComputeDescriptorsRaw(grayImg, null, keyPoints);
            }
            return(new SURFFeatureData(srcImage.Copy(), keyPoints, descriptors));
        }
コード例 #7
0
        Matrix <float> ComputeSingleDescriptors(string fileName)
        {
            Matrix <float> descs;

            detector = new SURFDetector(surfHessianThresh, surfExtendedFlag);

            using (Image <Gray, byte> img = new Image <Gray, byte>(fileName))
            {
                VectorOfKeyPoint keyPoints = detector.DetectKeyPointsRaw(img, null);
                descs = detector.ComputeDescriptorsRaw(img, null, keyPoints);
            }
            return(descs);
        }
コード例 #8
0
        public List <Keypoint> usingSurf(Bitmap image)
        {
            SURFDetector       surf           = new SURFDetector(750, false);
            Image <Gray, Byte> modelImage     = new Image <Gray, byte>(new Bitmap(image));
            VectorOfKeyPoint   modelKeyPoints = surf.DetectKeyPointsRaw(modelImage, null);

            MKeyPoint[] keypoints = modelKeyPoints.ToArray();

            Keypoint        key;
            List <Keypoint> keypointsList = new List <Keypoint>();

            foreach (MKeyPoint keypoint in keypoints)
            {
                key = new Keypoint(keypoint.Point.X, keypoint.Point.Y, keypoint.Size);
                keypointsList.Add(key);
            }

            return(keypointsList);
        }
コード例 #9
0
        /// <summary>
        /// 計算特徵點
        /// </summary>
        /// <param name="srcImage">來源影像</param>
        /// <returns>回傳特徵類別</returns>
        public static SURFFeatureData CalSURFFeature(Image <Bgr, Byte> srcImage)
        {
            SURFDetector     surfCPU = new SURFDetector(new MCvSURFParams(1200, false)); //預設500
            VectorOfKeyPoint keyPoints;
            Matrix <float>   descriptors = null;
            Stopwatch        watch;

            watch = Stopwatch.StartNew();
            using (Image <Gray, Byte> grayImg = srcImage.Convert <Gray, Byte>())
            {
                keyPoints   = surfCPU.DetectKeyPointsRaw(grayImg, null);
                descriptors = surfCPU.ComputeDescriptorsRaw(grayImg, null, keyPoints);
            }
            watch.Stop();
            Console.WriteLine("\nExtract SURF time=> " + watch.ElapsedMilliseconds.ToString() + "ms");

            //抽取出的特徵點數量
            Console.WriteLine("keypoint size:" + keyPoints.Size);
            return(new SURFFeatureData(srcImage.Copy(), keyPoints, descriptors));
        }
コード例 #10
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Image <Bgr, Byte> Draw(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime)
        {
            Stopwatch        watch;
            HomographyMatrix homography = null;

            SURFDetector     surfCPU = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))
                                                        using (Stream stream = new Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                                                                }

                                                            observedKeyPoints = new VectorOfKeyPoint();
                                                            surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                                                                }
                                                            }

                                                            watch.Stop();
                                                        }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int>(observedDescriptors.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                watch.Stop();
            }

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null)
            {  //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            #endregion

            matchTime = watch.ElapsedMilliseconds;

            return(result);
        }
コード例 #11
0
    public Image <Gray, byte> ObjectTrackingSurf(Image <Gray, byte> liveImg, Image <Gray, byte> templateImg, bool showOnLiveImg)
    {
        vkpLiveKeyPoint    = surfDetector.DetectKeyPointsRaw(liveImg, null);
        mtxLiveDescriptors = surfDetector.ComputeDescriptorsRaw(liveImg, null, vkpLiveKeyPoint);

        vkpTemplateKeyPoint    = surfDetector.DetectKeyPointsRaw(templateImg, null);
        mtxTemplateDescriptors = surfDetector.ComputeDescriptorsRaw(templateImg, null, vkpTemplateKeyPoint);

        bruteForceMatcher = new BruteForceMatcher <Single> (DistanceType.L2);
        bruteForceMatcher.Add(mtxTemplateDescriptors);

        mtxMatchIndices = new Matrix <int> (mtxLiveDescriptors.Rows, KNumNearestNeighbors);
        mtxDistance     = new Matrix <Single> (mtxLiveDescriptors.Rows, KNumNearestNeighbors);

        bruteForceMatcher.KnnMatch(mtxLiveDescriptors, mtxMatchIndices, mtxDistance, KNumNearestNeighbors, null);

        mtxMask = new Matrix <Byte> (mtxDistance.Rows, 1);
        mtxMask.SetValue(255);
        Features2DToolbox.VoteForUniqueness(mtxDistance, UniquenessThreshold, mtxMask);

        NumNonZeroElements = CvInvoke.cvCountNonZero(mtxMask);
        if (NumNonZeroElements >= 4)
        {
            NumNonZeroElements = Features2DToolbox.VoteForSizeAndOrientation(vkpTemplateKeyPoint,
                                                                             vkpLiveKeyPoint,
                                                                             mtxMatchIndices,
                                                                             mtxMask,
                                                                             ScaleIncrement,
                                                                             RotationBins);
            if (NumNonZeroElements >= 4)
            {
                homographyMatrix = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(vkpTemplateKeyPoint,
                                                                                            vkpLiveKeyPoint,
                                                                                            mtxMatchIndices,
                                                                                            mtxMask,
                                                                                            RansacReprojectionThreshold);
            }
        }
        //templateImgCopy = templateImg.Copy ();
        //templateImgCopy.Draw (new Rectangle (1, 1, templateImgCopy.Width - 3, templateImgCopy.Height - 3), boxGray, 2);
        liveImgCopy = liveImg.Copy();         //.ConcateHorizontal(templateImgCopy);

        if (homographyMatrix != null)
        {
            rect.X       = 0;
            rect.Y       = 0;
            rect.Width   = templateImg.Width;
            rect.Height  = templateImg.Height;
            pointsF[0].X = rect.Left; pointsF[0].Y = rect.Top;
            pointsF[1].X = rect.Right; pointsF[1].Y = rect.Top;
            pointsF[2].X = rect.Right; pointsF[2].Y = rect.Bottom;
            pointsF[3].X = rect.Left; pointsF[3].Y = rect.Bottom;

            homographyMatrix.ProjectPoints(pointsF);
            //Debug.Log("live w: "+ liveImgCopy.Width + "live h: " + liveImgCopy.Height);
            //Debug.Log ("pf0: " + pointsF[0] + "pf1: "+ pointsF[1] + " pf2: " + pointsF[2] + " pf3: " + pointsF[3]);

            centerPointF.X = 0;
            centerPointF.Y = 0;
            for (int i = 0; i < pointsF.Length; ++i)
            {
                centerPointF.X += pointsF[i].X;
                centerPointF.Y += pointsF[i].Y;
            }
            centerPointF.X = centerPointF.X / 4f;
            centerPointF.Y = centerPointF.Y / 4f;
            //Debug.Log("centerF: " + centerPointF);
            points[0] = Point.Round(pointsF[0]);
            points[1] = Point.Round(pointsF[1]);
            points[2] = Point.Round(pointsF[2]);
            points[3] = Point.Round(pointsF[3]);

            liveImgCopy.DrawPolyline(points, true, boxGray, 4);
        }
        if (showOnLiveImg)
        {
            return(liveImgCopy);
        }
        else
        {
            return(templateImgCopy);
        }
    }
コード例 #12
0
ファイル: Form1.cs プロジェクト: kimmyungsup/emguCV-FAST
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Image <Bgr, Byte> Draw(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, int state, out long matchTime, out int p)
        {
            Stopwatch        watch;
            HomographyMatrix homography = null;


            SURFDetector     surfCPU = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            if (state == 1)
            {
                uniquenessThreshold = 0.8;
            }



            //extract features from the object image
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }



            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 1)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 1)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            p = mask.ManagedArray.OfType <byte>().ToList().Where(q => q > 0).Count();


            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            matchTime = watch.ElapsedMilliseconds;



            return(result);
        }
コード例 #13
0
        void ComputeDescriptor(Image <Gray, byte> sourceImage, ref Matrix <float> matrix)
        {
            VectorOfKeyPoint keyPoints = detector.DetectKeyPointsRaw(sourceImage, null);

            matrix = detector.ComputeDescriptorsRaw(sourceImage, null, keyPoints);
        }
コード例 #14
0
ファイル: DrawMatches.cs プロジェクト: AdrianaStancu/Rubik
        public static Image <Bgr, Byte> Parallelogram(String modelImageFileName, String observedImageFileName, out long matchTime)
        {
            //Image<Gray, Byte> cannyEdges = gray.Canny(cannyThreshold, cannyThresholdLinking);
            //Load the image from file
            Image <Bgr, Byte> observedImage = new Image <Bgr, byte>(observedImageFileName);
            Stopwatch         watch;
            HomographyMatrix  homography = null;

            watch = Stopwatch.StartNew();

            Image <Gray, Byte> graySoft = observedImage.Convert <Gray, Byte>();//.PyrDown().PyrUp();
            //ImageViewer.Show(graySoft, "graysoft");
            //Image<Gray, Byte> gray = graySoft.SmoothGaussian(3);
            //ImageViewer.Show(gray, "graysoft");
            //gray = gray.AddWeighted(graySoft, 1.5, -0.5, 0);
            //ImageViewer.Show(graySoft, "graysoft");

            Gray cannyThreshold             = new Gray(149);
            Gray cannyThresholdLinking      = new Gray(149);
            Gray circleAccumulatorThreshold = new Gray(1000);

            Image <Gray, Byte> cannyEdges = graySoft.Canny(cannyThreshold, cannyThresholdLinking);
            Image <Gray, Byte> modelImage = new Image <Gray, Byte>(modelImageFileName).Canny(cannyThreshold, cannyThresholdLinking);
            SURFDetector       surfCPU    = new SURFDetector(200, false);
            VectorOfKeyPoint   modelKeyPoints;
            VectorOfKeyPoint   observedKeyPoints;
            Matrix <int>       indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.99;

            //extract features from the object image
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(cannyEdges, null);
            Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(cannyEdges, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                //nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            //Image<Bgr, Byte> modelImage2 = new Image<Bgr, Byte>(modelImageFileName);
            //Image<Bgr, Byte> observedImage2 = new Image<Bgr, Byte>(observedImageFileName);

            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, cannyEdges, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            if (homography != null)
            {
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            watch.Stop();
            matchTime = watch.ElapsedMilliseconds;
            return(result);
        }
コード例 #15
0
        public override Rectangle[] find(Image <Bgr, Byte> image)
        {
            Image <Gray, Byte> gray = image.Convert <Gray, Byte>();

            if (true)
            {
                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(gray, null);
                Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(gray, null, observedKeyPoints);


                indices = new Matrix <int>(observedDescriptors.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }
            }
            else // USE CUDA!
            {
                // extract features from the observed image

                /*GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte>(image);
                 * GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null);
                 * GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints);
                 * GpuMat<int> gpuMatchIndices = new GpuMat<int>(gpuObservedDescriptors.Size.Height, k, 1, true);
                 * GpuMat<float> gpuMatchDist = new GpuMat<float>(gpuObservedDescriptors.Size.Height, k, 1, true);
                 * GpuMat<Byte> gpuMask = new GpuMat<byte>(gpuMatchIndices.Size.Height, 1, 1);
                 * Stream stream = new Stream();
                 *
                 * matcher_gpu.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                 * indices = new Matrix<int>(gpuMatchIndices.Size);
                 * mask = new Matrix<byte>(gpuMask.Size);
                 *
                 * //gpu implementation of voteForUniquess
                 * GpuMat<float> col0 = gpuMatchDist.Col(0);
                 * GpuMat<float> col1 = gpuMatchDist.Col(1);
                 * GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                 * GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                 *
                 * observedKeyPoints = new VectorOfKeyPoint();
                 * surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);
                 *
                 * stream.WaitForCompletion();
                 *
                 * gpuMask.Download(mask);
                 * gpuMatchIndices.Download(indices);
                 *
                 * if (GpuInvoke.CountNonZero(gpuMask) >= 4) {
                 * int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                 * if (nonZeroCount >= 4)
                 *  homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                 * }*/
            }

            if (homography != null)
            {
                Rectangle[] rect = { modelImage.ROI };
                return(rect.ToArray());
            }

            return(null);
        }
コード例 #16
0
ファイル: Form1.cs プロジェクト: kittawee/Final-Project
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Image <Bgr, Byte>  frame         = _capture.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            Image <Gray, Byte> grayframe     = frame.Convert <Gray, Byte>();
            Image <Gray, Byte> modelImage    = new Image <Gray, byte>("DataPlate/" + 10 + ".jpg");
            Image <Gray, Byte> observedImage = grayframe;
            Stopwatch          watch;
            HomographyMatrix   homography = null;
            SURFDetector       surfCPU    = new SURFDetector(500, false);
            VectorOfKeyPoint   modelKeyPoints;
            VectorOfKeyPoint   observedKeyPoints;
            Matrix <int>       indices;
            Matrix <float>     dist;
            Matrix <byte>      mask;


            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))

                    #region SURF
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher matcher = new GpuBruteForceMatcher(GpuBruteForceMatcher.DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, 2, 1))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuMatchIndices.Size, 1))
                                                {
                                                    observedKeyPoints = new VectorOfKeyPoint();
                                                    surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);
                                                    matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, 2, null);
                                                    indices = new Matrix <int>(gpuMatchIndices.Size);
                                                    dist    = new Matrix <float>(indices.Size);
                                                    gpuMatchIndices.Download(indices);
                                                    gpuMatchDist.Download(dist);

                                                    mask = new Matrix <byte>(dist.Rows, 1);

                                                    mask.SetValue(255);

                                                    Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                                                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                                                    if (nonZeroCount >= 4)
                                                    {
                                                        nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                        if (nonZeroCount >= 4)
                                                        {
                                                            homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                                                        }
                                                    }

                                                    watch.Stop();
                                                }
                            }
                #endregion
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                //MKeyPoint[] kpts = modelKeyPoints.ToArray();
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);
                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                Matrix <float>    observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
                BruteForceMatcher matcher             = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32);
                matcher.Add(modelDescriptors);
                int k = 2;
                indices = new Matrix <int>(observedDescriptors.Rows, k);

                dist = new Matrix <float>(observedDescriptors.Rows, k);
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

                mask = new Matrix <byte>(dist.Rows, 1);

                mask.SetValue(255);

                Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 20)
                {
                    nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 20)
                    {
                        homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                        XMLData();
                    }
                    else
                    {
                        textBox1.Text = string.Empty;
                        textBox2.Text = string.Empty;
                        textBox3.Text = string.Empty;
                        textBox4.Text = string.Empty;
                        textBox5.Text = string.Empty;
                    }
                }
                watch.Stop();
                #region draw the projected region on the image
                if (homography != null)
                {  //draw a rectangle along the projected model
                    Rectangle rect = modelImage.ROI;
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    homography.ProjectPoints(pts);
                    frame.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 2);
                }
                #endregion
                CaptureImageBox.Image = frame;
                DataImageBox.Image    = modelImage;
            }
        }
コード例 #17
0
        public static bool FindModelImageInObservedImage(Image <Gray, byte> modelImage, Image <Gray, byte> observedImage)
        {
            var surfCpu = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGpu = new GpuSURFDetector(surfCpu.SURFParams, 0.01f);
                using (GpuImage <Gray, byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGpu.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGpu.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher <float> matcher = new GpuBruteForceMatcher <float>(DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGpu.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);

                                // extract features from the observed image
                                using (GpuImage <Gray, byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGpu.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGpu.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuObservedDescriptors.Size.Height, k, 1, true))
                                                    using (GpuMat <Byte> gpuMask = new GpuMat <byte>(gpuMatchIndices.Size.Height, 1, 1))
                                                        using (var stream = new Emgu.CV.GPU.Stream())
                                                        {
                                                            matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                                                            indices = new Matrix <int>(gpuMatchIndices.Size);
                                                            mask    = new Matrix <byte>(gpuMask.Size);

                                                            //gpu implementation of voteForUniquess
                                                            using (GpuMat <float> col0 = gpuMatchDist.Col(0))
                                                                using (GpuMat <float> col1 = gpuMatchDist.Col(1))
                                                                {
                                                                    GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                                                                    GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                                                                }

                                                            observedKeyPoints = new VectorOfKeyPoint();
                                                            surfGpu.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                            //wait for the stream to complete its tasks
                                                            //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                                                            stream.WaitForCompletion();

                                                            gpuMask.Download(mask);
                                                            gpuMatchIndices.Download(indices);

                                                            if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                                                            {
                                                                int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                                if (nonZeroCount >= 4)
                                                                {
                                                                    Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                                                                }
                                                                if ((double)nonZeroCount / mask.Height > 0.02)
                                                                {
                                                                    return(true);
                                                                }
                                                            }
                                                        }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCpu.DetectKeyPointsRaw(modelImage, null);
                Matrix <float> modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                // extract features from the observed image
                observedKeyPoints = surfCpu.DetectKeyPointsRaw(observedImage, null);
                Matrix <float>            observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int>(observedDescriptors.Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                if ((double)nonZeroCount / mask.Height > 0.02)
                {
                    return(true);
                }
            }

            //Draw the matched keypoints
            //var result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, indices, new Bgr(0, 0, 255), new Bgr(255, 0, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
            //result.Save( @"C:\Users\D.Markachev\Desktop\bleh-keypoints.jpg" );

            return(false);
        }
コード例 #18
0
        public Image <Bgr, Byte> DrawResult(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out double area, int minarea, out Point center)
        {
            center = new Point(320, 240);
            Stopwatch watch;

            area = 0;
            double modelarea = (modelImage.ROI.Right - modelImage.ROI.Left) * (modelImage.ROI.Bottom - modelImage.ROI.Top);
            //单应矩阵
            HomographyMatrix homography = null;

            //surf算法检测器
            SURFDetector surfCPU = new SURFDetector(500, false);

            //原图与实际图中的关键点
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;

            Matrix <int>  indices;
            Matrix <byte> mask;

            //knn匹配的系数
            int k = 2;
            //滤波系数
            double uniquenessThreshold = 0.8;


            //从标记图中,提取surf特征点与描述子
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // 从实际图片提取surf特征点与描述子
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);

            if (observedDescriptors == null)
            {
                watch.Stop(); matchTime = watch.ElapsedMilliseconds;
                return(null);
            }

            //使用BF匹配算法,匹配特征向量
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);
            indices = new Matrix <int>(observedDescriptors.Rows, k);
            //通过特征向量筛选匹配对
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                //最近邻2点特征向量匹配
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                //匹配成功的,将特征点存入mask
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                //通过滤波系数,过滤非特征点,剩余特征点存入mask
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 10)
            {
                //过滤旋转与变形系数异常的特征点,剩余存入mask
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 10)
                {
                    //使用剩余特征点,构建单应矩阵
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();
            // }

            //画出匹配的特征点
            //Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,indices, new Bgr(0, 0, 255), new Bgr(0, 255, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
            Image <Bgr, byte> result = null;

            System.Drawing.Bitmap bm = observedImage.ToBitmap();
            result = new Image <Bgr, byte>(bm);
            #region draw the projected region on the image
            //画出单应矩阵
            if (homography != null)
            {
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                //根据整个图片的旋转、变形情况,计算出原图中四个顶点转换后的坐标,并画出四边形
                homography.ProjectPoints(pts);
                area = getarea(pts); double xsum = 0; double ysum = 0;
                foreach (PointF point in pts)
                {
                    xsum += point.X; ysum += point.Y;
                }
                center = new Point(Convert.ToInt32(xsum / 4), Convert.ToInt32(ysum / 4));
                if (area > minarea)
                {
                    Image <Bgr, byte> temp = new Image <Bgr, Byte>(result.Width, result.Height);
                    temp.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);

                    //temp.Save("D:\\temp\\" + (++index) + ".jpg");

                    int a = CountContours(temp.ToBitmap());
                    if (a == 2)
                    {
                        result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
                    }
                    else
                    {
                        matchTime = 0; area = 0; return(result);
                    }
                }
            }
            else
            {
                area = 0;
            }
            #endregion

            matchTime = watch.ElapsedMilliseconds;

            return(result);
        }
コード例 #19
0
        static void Run()
        {
            Image <Gray, Byte> modelImage    = new Image <Gray, byte>("box.png");
            Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");
            Stopwatch          watch;
            HomographyMatrix   homography = null;

            SURFDetector surfCPU = new SURFDetector(500, false);

            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <float>   dist;
            Matrix <byte>    mask;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher matcher = new GpuBruteForceMatcher(GpuBruteForceMatcher.DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, 2, 1))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuMatchIndices.Size, 1))
                                                {
                                                    observedKeyPoints = new VectorOfKeyPoint();
                                                    surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                    matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, 2, null);

                                                    indices = new Matrix <int>(gpuMatchIndices.Size);
                                                    dist    = new Matrix <float>(indices.Size);
                                                    gpuMatchIndices.Download(indices);
                                                    gpuMatchDist.Download(dist);

                                                    mask = new Matrix <byte>(dist.Rows, 1);

                                                    mask.SetValue(255);

                                                    Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                                                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                                                    if (nonZeroCount >= 4)
                                                    {
                                                        nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                        if (nonZeroCount >= 4)
                                                        {
                                                            homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                                                        }
                                                    }

                                                    watch.Stop();
                                                }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                //MKeyPoint[] kpts = modelKeyPoints.ToArray();
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);

                BruteForceMatcher matcher = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32);
                matcher.Add(modelDescriptors);
                int k = 2;
                indices = new Matrix <int>(observedDescriptors.Rows, k);
                dist    = new Matrix <float>(observedDescriptors.Rows, k);
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

                mask = new Matrix <byte>(dist.Rows, 1);

                mask.SetValue(255);

                Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                    }
                }

                watch.Stop();
            }

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DTracker.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DTracker.KeypointDrawType.NOT_DRAW_SINGLE_POINTS);

            #region draw the projected region on the image
            if (homography != null)
            { //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            #endregion

            ImageViewer.Show(result, String.Format("Matched using {0} in {1} milliseconds", GpuInvoke.HasCuda ? "GPU" : "CPU", watch.ElapsedMilliseconds));
        }
コード例 #20
0
//..........metoda żeby SURF zrobić........................................................................................
        public void DoSURFDetectAndUpdateForm(object sender, EventArgs e)
        {
            try
            {
                imgSceneColor             = captureWebcam.QueryFrame(); //try pobrać jedną klatkę z obrazu kamery
                lbPreparingCamera.Visible = false;
            }
            catch (Exception ex)                                    //jak się nie da to error wyświetlamy
            {
                this.Text = ex.Message;
            }


            if (imgSceneColor == null)
            {
                this.Text = "error, nie wczytano obrazu z kamery";      //gdy nie odczytano następnej klatki do zmiennej obrazka
            }
            if (imgToFindColor == null)                                 //jeśli jeszcze nie mamy obrazka do znalezienia...
            {
                ibResult.Image = imgSceneColor.ToBitmap();              //...to wywołaj obraz sceny do imageBoxu
            }
            //gdy dotarliśmy aż tutaj, obydwa obrazki są OK i możemy rozpocząć SURF detection

            SURFDetector surfDetector = new SURFDetector(500, false);   //objekt surf, parametr treshold(jak duże punkty bierze pod uwagę i extended flag

            Image <Gray, Byte> imgSceneGray  = null;                    //szary obraz sceny
            Image <Gray, Byte> imgToFindGray = null;                    //szary obrazek do znalezienia

            VectorOfKeyPoint vkpSceneKeyPoints;                         //vektor punktów na obrazie sceny
            VectorOfKeyPoint vkpToFindKeyPoints;                        //vektor punktów na obrazku do znalezienia

            Matrix <Single> mtxSceneDescriptors;                        //macierz deskryptorów do pytania o najbliższe sąsiedztwo
            Matrix <Single> mtxToFindDescriptor;                        //macierz deskryptorów dla szukanego obrazka

            Matrix <int>    mtxMatchIndices;                            //macierz ze wskaźnikami deskryptorów, będzie wypełniana przy trenowaniu deskryptorów (KnnMatch())
            Matrix <Single> mtxDistance;                                //macierz z wartościami odległości, po treningu jak wyżej
            Matrix <Byte>   mtxMask;                                    //input i output dla funkcji VoteForUniqueness(), wskazującej, który rząd pasuje

            BruteForceMatcher <Single> bruteForceMatcher;               //dla każdego deskryptora w pierwszym zestawie, matcher szuka...
                                                                        //...najbliższego deskryptora w drugim zestawie ustawionym przez trening każdego jednego

            HomographyMatrix homographyMatrix = null;                   //dla ProjectPoints() aby ustawić lokalizację znalezionego obrazka w scenie
            int    intKNumNearestNeighbors    = 2;                      //k, liczba najbliższego sąsiedztwa do przeszukania
            double dblUniquenessThreshold     = 0.8;                    //stosunek różncy dystansu dla porównania, żeby wypadło unikalne

            int intNumNonZeroElements;                                  //jako wartość zwracana dla liczby nie-zerowych elementów obu w macierzy maski,...
                                                                        //...także z wywołania GetHomographyMatrixFromMatchedFeatures()

            //parametry do używania przy wywołaniach VoteForSizeAndOrientation()

            double dblScareIncrement = 1.5;                      //określa różnicę w skali dla sąsiadujących komórek
            int    intRotationBins   = 20;                       //liczba komórek dla rotacji z 360 stopni (jeśli =20 to każda komórka pokrywa 18 stopni (20*18=360))

            double dblRansacReprojectionThreshold = 2.0;         //do użycia z GetHomographyMatrixFromMatchedFeatures(), max. dozwolony błąd odwzorowania...
                                                                 //...aby uznać parę punktów za ?inlier?

            Rectangle rectImageToFind = new Rectangle();         //prostokąt obejmujący cały obrazek do znalezienia

            PointF [] pointsF;                                   //4 punkty określające ramkę wokół lokacji znalezionego obrazka na scenie (float)
            Point []  points;                                    //4 punkty, to samo, ale (int)

            imgSceneGray = imgSceneColor.Convert <Gray, Byte>(); //ta sama scena do Graya

            if (isImgToFind == true)
            {
                try
                {
                    imgToFindGray = imgToFindColor.Convert <Gray, Byte>();       // obrazek do znalezienia do Graya
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.ToString());
                }

                vkpSceneKeyPoints   = surfDetector.DetectKeyPointsRaw(imgSceneGray, null);                       //wykrywa punkty w scenie, drugi param. to maska, jeśli null to nie potrzebna
                mtxSceneDescriptors = surfDetector.ComputeDescriptorsRaw(imgSceneGray, null, vkpSceneKeyPoints); //oblicza deskrptory sceny, param. to obraz sceny...
                //...maska, punkty na scenie

                vkpToFindKeyPoints = surfDetector.DetectKeyPointsRaw(imgToFindGray, null);                          //wykrywa punkty na obrazku do znalezienia, drugi param. to...
                //...maska, null bo nie potrzebna

                mtxToFindDescriptor = surfDetector.ComputeDescriptorsRaw(imgToFindGray, null, vkpToFindKeyPoints);                //oblicza aby znaleźć deskryptory(szukany obrazek, maska, szukanego o. punkty)

                bruteForceMatcher = new BruteForceMatcher <Single>(DistanceType.L2);                                              //objekt brute force matchera z L2, kwadrat odległ. Euklidesowej
                bruteForceMatcher.Add(mtxToFindDescriptor);                                                                       //dodaj macierz dla szukanych deskryptorów do brute force matchera

                if (mtxSceneDescriptors != null)                                                                                  //gdy obraz nie ma cech np. ściana
                {
                    mtxMatchIndices = new Matrix <int>(mtxSceneDescriptors.Rows, intKNumNearestNeighbors);                        //objekt macierzy indeksów/komórek (wiersze, kolumny)
                    mtxDistance     = new Matrix <Single>(mtxSceneDescriptors.Rows, intKNumNearestNeighbors);                     //to samo z dystansami

                    bruteForceMatcher.KnnMatch(mtxSceneDescriptors, mtxMatchIndices, mtxDistance, intKNumNearestNeighbors, null); //znajduje k-najbliższy match, (jak null to maska nie potrzebna)

                    mtxMask = new Matrix <Byte>(mtxDistance.Rows, 1);                                                             //objekt macierzy maski
                    mtxMask.SetValue(255);                                                                                        //ustawia wartości wszystkich elementów w macierzy maski

                    Features2DToolbox.VoteForUniqueness(mtxDistance, dblUniquenessThreshold, mtxMask);                            //filtruje pasujące cechy tj. czy match NIE jest unikalny to jest odrzucany

                    intNumNonZeroElements = CvInvoke.cvCountNonZero(mtxMask);                                                     //pobierz liczbę nie-zerowych elementów w macierzy maski
                    if (intNumNonZeroElements >= 4)
                    {
                        //eliminuje dopasowanye cechy, których skla i rotacja nie zgadzają się ze skalą i rotacją większości
                        intNumNonZeroElements = Features2DToolbox.VoteForSizeAndOrientation(vkpToFindKeyPoints, vkpSceneKeyPoints, mtxMatchIndices, mtxMask, dblScareIncrement, intRotationBins);
                        if (intNumNonZeroElements >= 4)             //jeśli ciągle są co najmniej 4 nie-zerowe elementy

                        //pobierz homography matrix używając RANSAC (random sample consensus)
                        {
                            homographyMatrix = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(vkpToFindKeyPoints, vkpSceneKeyPoints, mtxMatchIndices, mtxMask, dblRansacReprojectionThreshold);
                        }
                    }

                    imgCopyOfImageToFindWithBorder = imgToFindColor.Copy();     //robi kopię obrazka do znalezienia aby na tej kopi rysować, bez zmieniania oryginalnego obrazka

                    //rysuje 2pix ramkę wkoło kopi obrazka do znalezienia, używając takiego samego koloru jaki ma box na znaleziony obrazek
                    imgCopyOfImageToFindWithBorder.Draw(new Rectangle(1, 1, imgCopyOfImageToFindWithBorder.Width - 3, imgCopyOfImageToFindWithBorder.Height - 3), bgrFoundImageColor, 2);

                    //rysowanie obrazu sceny i obrazka do znalezienia razem na obrazie rezultatu
                    //3 warunki w zależności od tego, który checkBox jest zaznaczony (rysuj punkty i/lub rysuj linie)
                    if (ckDrawKeyPoints.Checked == true && ckDrawMatchingLines.Checked == true)
                    {
                        //używa DrawMatches() aby połączyć obraz sceny z obrazkiem do znalezienia, potem rysuje punkty i linie
                        imgResult = Features2DToolbox.DrawMatches(imgCopyOfImageToFindWithBorder,
                                                                  vkpToFindKeyPoints,
                                                                  imgSceneColor,
                                                                  vkpSceneKeyPoints,
                                                                  mtxMatchIndices,
                                                                  bgrMatchingLineColor,
                                                                  bgrKeyPointColor,
                                                                  mtxMask,
                                                                  Features2DToolbox.KeypointDrawType.DEFAULT);
                    }
                    else if (ckDrawKeyPoints.Checked == true && ckDrawMatchingLines.Checked == false)
                    {
                        //rysuje scenę z punktami na obrazie rezultatu
                        imgResult = Features2DToolbox.DrawKeypoints(imgSceneColor,
                                                                    vkpSceneKeyPoints,
                                                                    bgrKeyPointColor,
                                                                    Features2DToolbox.KeypointDrawType.DEFAULT);
                        //potem rysuje punkty na kopi obrazka do znalezienia
                        imgCopyOfImageToFindWithBorder = Features2DToolbox.DrawKeypoints(imgCopyOfImageToFindWithBorder,
                                                                                         vkpToFindKeyPoints,
                                                                                         bgrKeyPointColor,
                                                                                         Features2DToolbox.KeypointDrawType.DEFAULT);
                        //potem łączy kopię obrazka do znaleienia na obrazie rezultatu
                        imgResult = imgResult.ConcateHorizontal(imgCopyOfImageToFindWithBorder);
                    }
                    else if (ckDrawKeyPoints.Checked == false && ckDrawMatchingLines.Checked == false)
                    {
                        imgResult = imgSceneColor;                                                  //dołącza scenę do obrazu rezultatu
                        imgResult = imgResult.ConcateHorizontal(imgCopyOfImageToFindWithBorder);    //wiąże kopię szukanego obrazka na obrazie rezultatu
                    }
                    else
                    {
                        MessageBox.Show("Błąd");     //tu już nie powinno nigdy dojść
                    }
                }
                else
                {
                    imgResult = imgSceneColor;                                                  //dołącza scenę do obrazu rezultatu
                    imgResult = imgResult.ConcateHorizontal(imgCopyOfImageToFindWithBorder);    //wiąże kopię szukanego obrazka na obrazie rezultatu
                }

                if (homographyMatrix != null)    //sprawdzanie czy na pewno coś w tej macierzy jest
                {
                    //rysuje ramkę na kawałku sceny z obrazu rezultatu, w miejscu gdzie jest znaleziony szukany obrazek
                    rectImageToFind.X      = 0;     //na starcie ustawia rozmiar prostokąta na pełny rozmiar obrazka do znalezienia
                    rectImageToFind.Y      = 0;
                    rectImageToFind.Width  = imgToFindGray.Width;
                    rectImageToFind.Height = imgToFindGray.Height;

                    //tworzymy obiekt -> array (szereg) tablica na PointF odpowiadające prostokątom
                    pointsF = new PointF[] { new PointF(rectImageToFind.Left, rectImageToFind.Top),
                                             new PointF(rectImageToFind.Right, rectImageToFind.Top),
                                             new PointF(rectImageToFind.Right, rectImageToFind.Bottom),
                                             new PointF(rectImageToFind.Left, rectImageToFind.Bottom) };

                    //ProjectionPoints() ustawia ptfPointsF(przez referencję) na bycie lokacją ramki na fragmencie sceny gdzie jest znaleziony szukany obrazek
                    homographyMatrix.ProjectPoints(pointsF);

                    //konwersja z PointF() do Point() bo ProjectPoints() używa typ PointF() a DrawPolyline() używa Point()
                    points = new Point[] { Point.Round(pointsF[0]),
                                           Point.Round(pointsF[1]),
                                           Point.Round(pointsF[2]),
                                           Point.Round(pointsF[3]) };

                    //rysowanie ramki wkoło znalezionego obrazka na fragmencie sceny obrazu rezultatu
                    imgResult.DrawPolyline(points, true, new Bgr(0, 255, 0), 2);

                    //rysowanie czerwonego myślnika na środku obiektu
                    int x, y, x1, y1, xW, yW;

                    x  = Convert.ToInt32(points[0].X);
                    y  = Convert.ToInt32(points[0].Y);
                    x1 = Convert.ToInt32(points[2].X);
                    y1 = Convert.ToInt32(points[2].Y);

                    xW  = x1 - x;
                    xW /= 2;
                    xW += x;
                    yW  = y1 - y;
                    yW /= 2;
                    yW += y;
                    Point [] pp = new Point[] { new Point(xW, yW), new Point(xW + 10, yW) };    //rysowanie środka wykrytego obiektu
                    imgResult.DrawPolyline(pp, true, new Bgr(0, 0, 255), 5);

                    XX = xW.ToString();
                    YY = yW.ToString();
                    //////////gdy obiekt znika z pola widzenia
                    if (xW == 0 || yW == 0 || xW < -200 || yW < -200 || xW > 800 || yW > 800)
                    {
                        targetLost(-1);
                    }
                    else
                    {
                        targetLost(1);
                    }
                    //////////
                }
                else
                {
                    targetLost(-1);     //strzał w 10!
                }
                //koniec SURF, update całego form

                ibResult.Image = imgResult.ToBitmap();          //pokazanie rezultatu na imageBoxie
            }
        }
コード例 #21
0
ファイル: DrawMatches.cs プロジェクト: AdrianaStancu/Rubik
        public static Image <Bgr, Byte> Draw(String modelImageFileName, String observedImageFileName, out long matchTime)
        {
            Image <Gray, Byte> modelImage    = new Image <Gray, byte>(modelImageFileName);
            Image <Gray, Byte> observedImage = new Image <Gray, byte>(observedImageFileName);
            Stopwatch          watch;
            HomographyMatrix   homography = null;

            SURFDetector surfCPU = new SURFDetector(600, false);

            //SIFTDetector surfCPU = new SIFTDetector();
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            //extract features from the object image
            modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                //nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            Image <Bgr, Byte> modelImage2    = new Image <Bgr, Byte>(modelImageFileName);
            Image <Bgr, Byte> observedImage2 = new Image <Bgr, Byte>(observedImageFileName);

            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage2, modelKeyPoints, observedImage2, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            if (homography != null)
            {
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            for (int i = 0; i < observedKeyPoints.Size; ++i)
            {
                Color color      = Color.FromArgb((int)observedImage2[(int)observedKeyPoints[i].Point.Y, (int)observedKeyPoints[i].Point.X].Red, (int)observedImage2[(int)observedKeyPoints[i].Point.Y, (int)observedKeyPoints[i].Point.X].Green, (int)observedImage2[(int)observedKeyPoints[i].Point.Y, (int)observedKeyPoints[i].Point.X].Blue);
                float hue        = color.GetHue();
                float sat        = color.GetSaturation();
                float bright     = color.GetBrightness();
                float satThr     = (float)0.0f / 240.0f;
                float brightTrh  = (float)40.0f / 240.0f;
                float brightThr2 = (float)15.0f / 24.0f;
                if (sat < satThr && bright < brightTrh)
                {
                    continue;
                }
                if (bright > brightThr2)
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.White), -1);
                    continue;
                }
                if (hue > 230)//rosu
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Red), -1);
                }
                //else if(hue>180)//mov
                //    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Purple), -1);
                else if (hue > 120)//albastru
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Blue), -1);
                }
                else if (hue > 60) //verde
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Yellow), -1);
                }
                else if (hue > 30)//galben
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Yellow), -1);
                }
                else
                {
                    result.Draw(new CircleF(observedKeyPoints[i].Point, 4), new Bgr(Color.Red), -1);
                }
            }

            matchTime = watch.ElapsedMilliseconds;

            return(result);
        }