Example #1
0
        public static int FindMatch2(Mat modelImage, TemplateContainer.ImageData template, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            Stopwatch watch;

            homography = null;
            BriefDescriptorExtractor descriptor = new BriefDescriptorExtractor();

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
            //using (UMat uObservedImage = template.image.Mat.GetUMat(AccessType.Read))
            {
                //extract features from the object image
                Mat modelDescriptors = new Mat();
                featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                watch = Stopwatch.StartNew();

                // extract features from the observed image

                //featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                observedKeyPoints   = template.keyPointsSurf;
                observedDescriptors = template.descriptorSurf;
                // Bruteforce, slower but more accurate
                // You can use KDTree for faster matching with slight loss in accuracy
                using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                    using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                        using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                        {
                            matcher.Add(modelDescriptors);

                            matcher.KnnMatch(observedDescriptors, matches, k, null);
                            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                            nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 10)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                           matches, mask, 1.8, 18);
                                if (nonZeroCount >= 12)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                          observedKeyPoints, matches, mask, 2);
                                }
                            }
                        }
                watch.Stop();
            }
            matchTime = watch.ElapsedMilliseconds;
            return(nonZeroCount);
        }
Example #2
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        //        public static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime)
        //        {

        //            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
        //            {
        //                Mat mask;
        //                FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
        //                   out mask, out homography);

        //                //Draw the matched keypoint
        //                Mat result = new Mat();
        //                Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
        //                   matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);

        //                #region draw the projected region on the image

        //                if (homography != null)
        //                {
        //                    //draw a rectangle along the projected model
        //                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
        //                    PointF[] pts = new PointF[]
        //                    {
        //                  new PointF(rect.Left, rect.Bottom),
        //                  new PointF(rect.Right, rect.Bottom),
        //                  new PointF(rect.Right, rect.Top),
        //                  new PointF(rect.Left, rect.Top)
        //                    };
        //                    pts = CvInvoke.PerspectiveTransform(pts, homography);

        //#if NETFX_CORE
        //               Point[] points = Extensions.ConvertAll<PointF, Point>(pts, Point.Round);
        //#else
        //                    Point[] points = Array.ConvertAll<PointF, Point>(pts, Point.Round);
        //#endif
        //                    using (VectorOfPoint vp = new VectorOfPoint(points))
        //                    {
        //                        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
        //                    }
        //                }
        //                #endregion

        //                return result;

        //            }
        //        }



        public static int MatchResult2(Mat modelImage, TemplateContainer.ImageData observedImage, out long matchTime)
        {
            //Mat homography;
            //VectorOfKeyPoint modelKeyPoints;
            //VectorOfKeyPoint observedKeyPoints;
            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                Mat mask;
                int nonZeroCount = FindMatch2(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                                              out mask, out homography);

                bool result;
                if (homography != null)
                {
                    return(nonZeroCount);
                }
                else
                {
                    return(0);
                }
                #region draw the projected region on the image



                //if (homography != null)
                //{
                //    //draw a rectangle along the projected model
                //    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                //    PointF[] pts = new PointF[]
                //    {
                //  new PointF(rect.Left, rect.Bottom),
                //  new PointF(rect.Right, rect.Bottom),
                //  new PointF(rect.Right, rect.Top),
                //  new PointF(rect.Left, rect.Top)
                //    };
                //    pts = CvInvoke.PerspectiveTransform(pts, homography);

#if NETFX_CORE
                Point[] points = Extensions.ConvertAll <PointF, Point>(pts, Point.Round);
#else
                //Point[] points = Array.ConvertAll<PointF, Point>(pts, Point.Round);
#endif
                //    using (VectorOfPoint vp = new VectorOfPoint(points))
                //    {
                //        CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                //    }
                //}
                #endregion

                return(0);
            }
        }