Exemplo n.º 1
0
        public Tuple <Image <Bgr, byte>, HomographyMatrix> DrawHomography(Image <Gray, byte> model, Image <Gray, byte> observed, double uniquenessThreshold)
        {
            HomographyMatrix  homography = null;
            Image <Bgr, Byte> result     = observed.Convert <Bgr, byte>();

            SURFDetector     surfCPU = new SURFDetector(500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <byte>    mask;
            int k = 2;

            modelKeyPoints = surfCPU.DetectKeyPointsRaw(model, null);             // Extract features from the object image
            Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(model, null, modelKeyPoints);

            observedKeyPoints = surfCPU.DetectKeyPointsRaw(observed, null);             // Extract features from the observed image

            if (modelKeyPoints.Size <= 0)
            {
                throw new System.ArgumentException("Can't find any keypoints in your model image!");
            }

            if (observedKeyPoints.Size > 0)
            {
                Matrix <float>            observedDescriptors = surfCPU.ComputeDescriptorsRaw(observed, null, observedKeyPoints);
                BruteForceMatcher <float> matcher             = new BruteForceMatcher <float> (DistanceType.L2);
                matcher.Add(modelDescriptors);

                indices = new Matrix <int> (observedDescriptors.Rows, k);

                using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) {
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix <byte> (dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 10)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 10)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                }

                result = Features2DToolbox.DrawMatches(model, modelKeyPoints, observed, observedKeyPoints,
                                                       indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
            }

            return(new Tuple <Image <Bgr, byte>, HomographyMatrix>(result, homography));
        }
        //主要是在這端對比的
        public void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints,
                              out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            int          k = 2;
            double       uniquenessThreshold = 0.8;
            SURFDetector surfCPU             = new SURFDetector(500, false); //設定處理特徵值的方式
            Stopwatch    watch;                                              //監看處理時間

            homography = null;                                               //如果相同,取得四邊形

            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints); //modelKeyPoints : 算出 特徵點? //modelDescriptors :



            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix <float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints); //observedKeyPoints : 取得特徵點 //

            //ImagePrecess processor = new ImagePrecess(observedImage.ToBitmap(),320,240);
            //observedDescriptors = processor.GetImageFeature();
            //observedKeyPoints=processor.GetImageVectorOfKeyPoint();


            watch = Stopwatch.StartNew();
            //
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null); //取得對比
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);//會把剛剛match完的結果抓來看是不是不明確或是不確定的,而跑完的結果放在mask中。
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }
            watch.Stop();
            matchTime = watch.ElapsedMilliseconds;
        }
        public static List <ImageSearchResult> SearchImageForObjects(WorldObject modelObject, string imageToSearch)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            int nonZeroThreshold = 10;

            ObjectFeatures targetImageFeatures = DetectFeatures_Brisk(imageToSearch);

            Mat mask;

            List <ImageSearchResult> searchResults = new List <ImageSearchResult>();

            foreach (ObjectView view in modelObject.Views)
            {
                if (view == null)
                {
                    continue;
                }

                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                BFMatcher matcher = new BFMatcher(DistanceType.L2);
                matcher.Add(view.Features.Descriptors);

                matcher.KnnMatch(targetImageFeatures.Descriptors, matches, 2, null);

                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

                mask.SetTo(new MCvScalar(255));

                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                int nonZeroCount = CvInvoke.CountNonZero(mask);

                if (nonZeroCount >= nonZeroThreshold)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(view.Features.KeyPoints,
                                                                               targetImageFeatures.KeyPoints, matches, mask, 1.5, 20);

                    if (nonZeroCount >= nonZeroThreshold)
                    {
                        Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(view.Features.KeyPoints,
                                                                                                  targetImageFeatures.KeyPoints, matches, mask, 2);

                        searchResults.Add(new ImageSearchResult(view, homography, matches, targetImageFeatures, mask));
                    }
                }
            }

            return(searchResults);
        }
Exemplo n.º 4
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="modelImage"></param>
        /// <param name="observedImage"></param>
        /// <param name="matchTime"></param>
        /// <param name="modelKeyPoints"></param>
        /// <param name="observedKeyPoints"></param>
        /// <param name="matches"></param>
        /// <param name="mask"></param>
        /// <param name="homography"></param>
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    //extract features from the object image
                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    // Bruteforce, slower but more accurate
                    // You can use KDTree for faster matching with slight loss in accuracy
                    using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                        using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                            using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                            {
                                matcher.Add(modelDescriptors);
                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }
                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 5
0
        public Image <Bgr, byte> ReturnCompared(out Image <Bgr, byte> def, out Image <Bgr, byte> twistdef)
        {
            var image      = sourceImage.Copy();
            var twistedImg = additionalImage.Copy();
            //генератор описания ключевых точек
            Brisk        descriptor = new Brisk();
            GFTTDetector detector   = new GFTTDetector(40, 0.01, 5, 3, true);
            //поскольку в данном случае необходимо посчитать обратное преобразование
            //базой будет являться изменённое изображение
            VectorOfKeyPoint GFP1           = new VectorOfKeyPoint();
            UMat             baseDesc       = new UMat();
            var              twistedImgGray = twistedImg.Convert <Gray, byte>();
            var              baseImgGray    = image.Convert <Gray, byte>();
            UMat             bimg           = twistedImgGray.Mat.GetUMat(AccessType.Read);
            VectorOfKeyPoint GFP2           = new VectorOfKeyPoint();
            UMat             twistedDesc    = new UMat();
            UMat             timg           = baseImgGray.Mat.GetUMat(AccessType.Read);

            //получение необработанной информации о характерных точках изображений
            detector.DetectRaw(bimg, GFP1);
            //генерация описания характерных точек изображений
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);


            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            //массив для хранения совпадений характерных точек
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            //добавление описания базовых точек
            matcher.Add(baseDesc);
            //сравнение с описанием изменённых
            matcher.KnnMatch(twistedDesc, matches, 2, null);


            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            //определение уникальных совпадений
            Mat resM = new Mat(image.Height, image.Width * 2, DepthType.Cv8U, 3);
            var res  = resM.ToImage <Bgr, byte>();

            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(GFP1, GFP1, matches, mask, 1.5, 20);

            Features2DToolbox.DrawMatches(twistedImg, GFP1, image, GFP2, matches, res, new MCvScalar(255, 0,
                                                                                                     0), new MCvScalar(255, 0, 0), mask);
            def      = image;
            twistdef = twistedImg;
            return(res);
        }
Exemplo n.º 6
0
        public static void FindMatch(
            Mat observedImage,
            out long matchTime,
            out VectorOfKeyPoint observedKeyPoints,
            VectorOfVectorOfDMatch matches,
            out Mat mask,
            out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;
            double hessianThresh       = 300;

            Stopwatch watch;

            homography = null;

            //modelKeyPoints = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
            {
                KAZE featureDetector = new KAZE();

                watch = Stopwatch.StartNew();

                Mat observedDescriptors = new Mat();
                featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                using (LinearIndexParams ip = new LinearIndexParams())
                    using (SearchParams sp = new SearchParams())
                        using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                        {
                            matcher.Add(modelDescriptors);

                            matcher.KnnMatch(observedDescriptors, matches, k, null);
                            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                }
                            }
                            watch.Stop();
                        }
                matchTime = watch.ElapsedMilliseconds;
            }
        }
Exemplo n.º 7
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 500;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read))
                {
                    SURF surfCPU = new SURF(hessianThresh);
                    //extract features from the object image
                    UMat modelDescriptors = new UMat();
                    surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    UMat observedDescriptors = new UMat();
                    surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                   matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                  observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }

            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 8
0
        /// <summary>
        /// 匹配較快速但精確度較低
        /// </summary>
        /// <param name="template">樣板的特徵點類別</param>
        /// <param name="observedScene">被觀察的場景匹配的特徵點</param>
        /// <returns>回傳匹配的資料類別</returns>
        public static SURFMatchedData MatchSURFFeatureByFLANN(SURFFeatureData template, SURFFeatureData observedScene)
        {
            Matrix <byte>    mask;
            int              k = 2;
            double           uniquenessThreshold = 0.3;
            Matrix <int>     indices;
            HomographyMatrix homography = null;
            Stopwatch        watch;
            Matrix <float>   dists;

            try
            {
                watch = Stopwatch.StartNew();
                #region FLANN Match CPU
                //match
                Index flann = new Index(template.GetDescriptors(), 4);

                indices = new Matrix <int>(observedScene.GetDescriptors().Rows, k);
                using (dists = new Matrix <float>(observedScene.GetDescriptors().Rows, k))
                {
                    flann.KnnSearch(observedScene.GetDescriptors(), indices, dists, k, 2);
                    mask = new Matrix <byte>(dists.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dists, uniquenessThreshold, mask);
                }
                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                Console.WriteLine("-----------------\nVoteForUniqueness pairCount => " + nonZeroCount.ToString() + "\n-----------------");
                if (nonZeroCount >= 4) //原先是4
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(template.GetKeyPoints(), observedScene.GetKeyPoints(), indices, mask, 1.2, 30);
                    Console.WriteLine("VoteForSizeAndOrientation pairCount => " + nonZeroCount.ToString() + "\n-----------------");
                    //filter out all unnecessary pairs based on distance between pairs

                    if (nonZeroCount >= 30)                                                                                                                             //原先是4
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(template.GetKeyPoints(), observedScene.GetKeyPoints(), indices, mask, 5); //原先是5
                    }
                }
                #endregion
                watch.Stop();
                Console.WriteLine("Cal SURF Match time => " + watch.ElapsedMilliseconds.ToString() + "\n-----------------");


                return(new SURFMatchedData(indices, homography, mask, nonZeroCount, template));
            }
            catch (CvException ex)
            {
                System.Windows.Forms.MessageBox.Show(ex.ErrorMessage);
                return(null);
            }
        }
Exemplo n.º 9
0
        private void CalculateMatch(string username, string keyDescClass)
        {
            try
            {
                string decKeyClass = DecryptText(keyDescClass);

                KeypointsDescriptors kpDescriptors = DeSerializeObject <KeypointsDescriptors>(decKeyClass);

                Matrix <byte>    useDescriptors = kpDescriptors.descriptors;
                VectorOfKeyPoint useKeyPoints   = kpDescriptors.keyPoints;


                using (var db = new BioMexDatabaseEntities1())
                {
                    BFMatcher     matcher = new Emgu.CV.Features2D.BFMatcher(DistanceType.L2);
                    Matrix <byte> mask;

                    User use = (from person in db.Users where ((person.US_USERNAME == username)) select person).FirstOrDefault();

                    var regUser = new UserClass(use.US_USERNAME, use.US_PASSWORD, use.IdealFeatures.FirstOrDefault().IF_PASSWORD_COUNT, use.IdealFeatures.FirstOrDefault().IF_SHIFT_CLASS,
                                                use.IdealFeatures.FirstOrDefault().IF_TYPING_SPEED, 0, DeserializeIntegers(use.IdealFeatures.FirstOrDefault().IF_KEY_ORDER.ToString()).ToArray(), DeserializeIntegers(use.IdealFeatures.FirstOrDefault().IF_KEY_LATENCIES.ToString()).ToArray(),
                                                DeserializeIntegers(use.IdealFeatures.FirstOrDefault().IF_KEY_PRESS_DURATION.ToString()).ToArray(), DeserializeIntegers(use.IdealFeatures.FirstOrDefault().IF_PAIRED_KEYS.ToString()).ToArray());

                    if (use != null && use.US_USERNAME.Equals(username))
                    {
                        matcher.Add(useDescriptors);

                        Matrix <byte>    observedDesc      = DeSerializeObject <Matrix <byte> >(use.Features.FirstOrDefault().FaceFeature.FF_DESCRIPTORS);
                        VectorOfKeyPoint observedKeypoints = DeSerializeObject <VectorOfKeyPoint>(use.Features.FirstOrDefault().FaceFeature.FF_KEY_POINTS);

                        Matrix <int> indices = new Matrix <int>(observedDesc.Rows, 2);

                        VectorOfVectorOfDMatch vectMatch = new VectorOfVectorOfDMatch();

                        using (Matrix <float> Dist = new Matrix <float>(observedDesc.Rows, 2))
                        {
                            matcher.KnnMatch(observedDesc, vectMatch, 2, null);
                            mask = new Matrix <byte>(Dist.Rows, 1);
                            mask.SetValue(255);
                            Features2DToolbox.VoteForUniqueness(vectMatch, 0.8, mask.ToUMat().ToMat(AccessType.Read));
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine("Error while matching");
                System.Diagnostics.Debug.WriteLine(ex.Message);
            }
        }
Exemplo n.º 10
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="observedKeyPoints"></param>
        /// <param name="modelKeyPoint"></param>
        /// <returns></returns>
        public static VectorOfPoint Matching(VectorOfKeyPoint observedKeyPoints, VectorOfKeyPoint modelKeyPoints, Mat observedDescriptors, Mat modelDescriptors, Size modelSize)
        {
            KAZE featureDetector = new KAZE();

            using (var ip = new LinearIndexParams())
                using (var sp = new SearchParams())
                    using (var matcher = new FlannBasedMatcher(ip, sp))
                        using (var matches = new VectorOfVectorOfDMatch())
                        {
                            Mat homography = new Mat();
                            matcher.Add(modelDescriptors);

                            matcher.KnnMatch(observedDescriptors, matches, 2, null);
                            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, 0.80, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                }
                            }

                            if (homography != null && !homography.Size.IsEmpty)
                            {
                                Rectangle rect = new Rectangle(Point.Empty, modelSize);
                                PointF[]  pts  = new PointF[]
                                {
                                    new PointF(rect.Left, rect.Bottom),
                                    new PointF(rect.Right, rect.Bottom),
                                    new PointF(rect.Right, rect.Top),
                                    new PointF(rect.Left, rect.Top)
                                };

                                pts = CvInvoke.PerspectiveTransform(pts, homography);

                                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);

                                return(new VectorOfPoint(points));
                            }
                            else
                            {
                                return(new VectorOfPoint());
                            }
                        }
        }
Exemplo n.º 11
0
        /// <summary>
        ///
        ///
        /// TODO: thresholds must be set
        /// </summary>
        /// <param name="model"></param>
        /// <param name="imageToSearch"></param>
        /// <returns></returns>
        public static bool SearchImageForObjects(List <ObjectFeatures> model, Bitmap image)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            ObjectFeatures targetImageFeatures = DetectFeatures(image);

            Mat mask;

            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            foreach (ObjectFeatures of in model)
            {
                matcher.Add(of.Descriptors);

                matcher.KnnMatch(targetImageFeatures.Descriptors, matches, 2, null);

                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

                mask.SetTo(new MCvScalar(255));

                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                int nonZeroCount = CvInvoke.CountNonZero(mask);

                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(of.KeyPoints,
                                                                               targetImageFeatures.KeyPoints, matches, mask, 1.5, 20);

                    if (nonZeroCount >= 4)
                    {
                        return(true);

                        /*
                         * Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(view.Features.KeyPoints,
                         *  targetImageFeatures.KeyPoints, matches, mask, 2);
                         *
                         * searchResults.Add(new ImageSearchResult(view, homography, matches));
                         */
                    }
                }
            }

            return(false);
        }
        public Tuple <Image <Bgr, byte>, HomographyMatrix> SURFMatcher_KNN(Image <Gray, byte> model, Image <Gray, byte> observed, SURFDetector surfCPU, List <VectorOfKeyPoint> keyPointsList, double uniquenessThreshold, int TM)
        {
            HomographyMatrix  homography        = null;
            Image <Bgr, Byte> result            = null;
            VectorOfKeyPoint  modelKeyPoints    = keyPointsList.First <VectorOfKeyPoint>();
            VectorOfKeyPoint  observedKeyPoints = keyPointsList.Last <VectorOfKeyPoint>();;
            Matrix <int>      indices;
            Matrix <byte>     mask;
            int k = 2;
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);


            try
            {
                result = observed.Convert <Bgr, byte>();
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(model, null, modelKeyPoints);
                matcher.Add(modelDescriptors);
                if (observedKeyPoints.Size > 0)
                {
                    Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observed, null, observedKeyPoints);
                    indices = new Matrix <int>(observedDescriptors.Rows, k);

                    using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
                    {
                        matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                        mask = new Matrix <byte>(dist.Rows, 1);
                        mask.SetValue(255);
                        Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                    }

                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                    if (nonZeroCount >= TM)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                        if (nonZeroCount >= TM)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                        }
                    }
                    result = Features2DToolbox.DrawMatches(model, modelKeyPoints, observed, observedKeyPoints,
                                                           indices, new Bgr(100, 200, 214), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
                }
                return(new Tuple <Image <Bgr, byte>, HomographyMatrix>(result, homography));
            }
            catch (Exception e)
            {
                throw e;
            }
        }
Exemplo n.º 13
0
        /// <summary>
        /// 使用BruteForce匹配(較精確但較慢)
        /// </summary>
        /// <param name="template">樣板的特徵點類別</param>
        /// <param name="observedScene">被觀察的場景匹配的特徵點</param>
        /// <returns>回傳匹配的資料類別</returns>
        public static SURFMatchedData MatchSURFFeatureByBruteForce(SURFFeatureData template, SURFFeatureData observedScene)
        {
            Matrix <byte>    mask;
            int              k = 2;
            double           uniquenessThreshold = 0.5; //default:0.8
            Matrix <int>     indices;
            HomographyMatrix homography = null;
            Stopwatch        watch;

            try
            {
                watch = Stopwatch.StartNew();
                #region bruteForce match for CPU
                //match
                BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2Sqr); //default:L2
                matcher.Add(template.GetDescriptors());

                indices = new Matrix <int>(observedScene.GetDescriptors().Rows, k);
                using (Matrix <float> dist = new Matrix <float>(observedScene.GetDescriptors().Rows, k))
                {
                    matcher.KnnMatch(observedScene.GetDescriptors(), indices, dist, k, null);
                    mask = new Matrix <byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                Console.WriteLine("-----------------\nVoteForUniqueness pairCount => " + nonZeroCount.ToString() + "\n-----------------");
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(template.GetKeyPoints(), observedScene.GetKeyPoints(), indices, mask, 1.5, 30); //default:1.5 , 10, scale increment:1.5 rotatebin:50
                    Console.WriteLine("VoteForSizeAndOrientation pairCount => " + nonZeroCount.ToString() + "\n-----------------");
                    if (nonZeroCount >= 25)                                                                                                                    //defalut :4 , modify: 15
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(template.GetKeyPoints(), observedScene.GetKeyPoints(), indices, mask, 5);
                    }
                }
                #endregion
                watch.Stop();
                Console.WriteLine("Cal SURF Match time => " + watch.ElapsedMilliseconds.ToString() + "\n-----------------");

                return(new SURFMatchedData(indices, homography, mask, nonZeroCount, template));
            }
            catch (CvException ex)
            {
                System.Windows.Forms.MessageBox.Show(ex.ErrorMessage);
                return(null);
            }
        }
        public static void FindMatch(string pageFile, string templateFile)
        {
            Image <Rgb, byte> page     = getPreprocessedImage(pageFile);
            Image <Rgb, byte> template = getPreprocessedImage(templateFile);

            var detector = new ORBDetector();
            VectorOfKeyPoint templateKeyPoints = new VectorOfKeyPoint();
            Mat templateDescriptors            = new Mat();

            detector.DetectAndCompute(template, null, templateKeyPoints, templateDescriptors, false);

            VectorOfKeyPoint pageKeyPoints = new VectorOfKeyPoint();
            Mat pageDescriptors            = new Mat();

            detector.DetectAndCompute(page, null, pageKeyPoints, pageDescriptors, false);
            using (var matcher = new BFMatcher(DistanceType.L1))
            {
                matcher.Add(templateDescriptors);
                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                //VectorOfDMatch matches2 = new VectorOfDMatch();
                //matcher.Match(pageDescriptors, matches2);


                matcher.KnnMatch(pageDescriptors, matches, 2, null);

                Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
                Mat homography   = new Mat();
                int nonZeroCount = CvInvoke.CountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(templateKeyPoints, pageKeyPoints, matches, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(templateKeyPoints, pageKeyPoints, matches, mask, 2);
                    }
                }

                Mat result = new Mat();
                Features2DToolbox.DrawMatches(template, templateKeyPoints, page, pageKeyPoints, matches, result, new MCvScalar(0, 255, 0), new MCvScalar(255, 0, 0), mask, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

                //Features2DToolbox.DrawMatches(template, templateKeyPoints, page, pageKeyPoints, matches2, result, new MCvScalar(0, 255, 0), new MCvScalar(255, 0, 0), null, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

                MainForm.This.PageBox.Image = result.ToBitmap();
            }
        }
Exemplo n.º 15
0
        public void FindMatch(Image <Gray, byte> modelImage, Image <Gray, byte> observedImage, double hessianThresh, int k,
                              double uniquenessThreshold, VectorOfVectorOfDMatch matches, out VectorOfKeyPoint modelKeyPoints,
                              out VectorOfKeyPoint observedKeyPoints, out Mat mask, out Mat homography)
        {
            homography        = null;
            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            CudaSURFDetector surfCuda = new CudaSURFDetector((float)hessianThresh);

            using (GpuMat gpuModelImage = new GpuMat(modelImage))
                //extract features from the object image
                using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null))
                    using (
                        GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                        using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2))
                        {
                            surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);

                            // extract features from the observed image
                            using (GpuMat gpuObservedImage = new GpuMat(observedImage))
                                using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null))
                                    using (
                                        GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null,
                                                                                                       gpuObservedKeyPoints))
                                    {
                                        matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);

                                        surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                        mask.SetTo(new MCvScalar(255));
                                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                        int nonZeroCount = CvInvoke.CountNonZero(mask);
                                        if (nonZeroCount >= 4)
                                        {
                                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                                       matches, mask, 1.5, 20);
                                            if (nonZeroCount >= 4)
                                            {
                                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                                      observedKeyPoints, matches, mask, 2);
                                            }
                                        }
                                    }
                        }
        }
Exemplo n.º 16
0
Arquivo: SURF.cs Projeto: okeanz/IPS
        public static void FindMatchWM(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, Feature2D computer, Feature2D detector)
        {
            Stopwatch watch;

            modelKeyPoints    = new VectorOfKeyPoint(); // точки на модели
            observedKeyPoints = new VectorOfKeyPoint(); // точки на большем изображении
            homography        = null;
            int k = 2;


            using (Mat uModelImage = modelImage.Clone())
                using (Mat uObservedImage = observedImage.Clone())
                {
                    //получаем дескрипторы из первого изображения
                    Mat modelDescriptors = new Mat();
                    DetectAndCompute(uModelImage, out modelKeyPoints, out modelDescriptors, detector, computer);

                    watch = Stopwatch.StartNew();

                    // ... из второго изображения
                    Mat observedDescriptors = new Mat();
                    DetectAndCompute(uObservedImage, out observedKeyPoints, out observedDescriptors, detector, computer);


                    BFMatcher matcher = new BFMatcher(DistanceType.L2); // "сравниватель" дескрипторов на 2-х изображениях
                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null); // сравнение
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, 0.8, mask); // построениии маски (см ниже)

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                   matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, // получение предположительной зоны, куда должна встать модель
                                                                                                  observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 17
0
        public IDrawer FindMatch(KeyFrame keyFrame, Image <Bgr, Byte> observedImage, List <KeyFrame> keyframes = null)
        {
            if (keyFrame.KeyPoints == null)
            {
                keyFrame.KeyPoints = new VectorOfKeyPoint(CPU.Detect(keyFrame.Frame));
            }
            if (keyFrame.Descriptors == null)
            {
                keyFrame.Descriptors = new Mat();
                descriptor.Compute(keyFrame.Frame, keyFrame.KeyPoints, keyFrame.Descriptors);
            }
            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint(CPU.Detect(observedImage));
            descriptor.Compute(observedImage, observedKeyPoints, observedDescriptors);
            matcher.Add(keyFrame.Descriptors);

            matcher.KnnMatch(observedDescriptors, matches, k, null);
            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(keyFrame.KeyPoints, observedKeyPoints,
                                                                           matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(keyFrame.KeyPoints, observedKeyPoints, matches, mask, 2);
                }

                nonZeroCount = CvInvoke.CountNonZero(mask);

                if (nonZeroCount < 9)
                {
                    homography = null;
                }

                //if (keyframes != null && homography == null)
                //    keyframes.Add(new KeyFrame() { Frame = observedImage, KeyPoints = observedKeyPoints, Descriptors = observedDescriptors });
            }

            return(this);
        }
Exemplo n.º 18
0
        protected Mat GetHomography(VectorOfKeyPoint keyPoints1, VectorOfKeyPoint keyPoints2, VectorOfVectorOfDMatch matches, Mat mask)
        {
            Mat homography = null;

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.9, mask);
            int i = CvInvoke.CountNonZero(mask);

            if (i >= 4)
            {
                i = Features2DToolbox.VoteForSizeAndOrientation(keyPoints1, keyPoints2, matches, mask, 1.5, 20);
                if (i >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(keyPoints1, keyPoints2, matches, mask, 2);
                }
            }
            return(homography);
        }
Exemplo n.º 19
0
        private void MatchKeypoints(Mat frame, out Mat mask, out Mat homography)
        {
            homography = null;
            FindFeatures(frame, out frameKeyPoints, out frameDescriptors);

            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            featureMatcher.KnnMatch(frameDescriptors, matches, 2, null);

            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255.0));

            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                  frameKeyPoints, matches, mask, 2);

            featureMatcher.Clear();
            featureMatcher.Add(modelDescriptors);
        }
        //====================底下都是重新整理的class=====================

        //找出兩張圖是不是一樣
        //model : 部分圖片(要找尋部分)
        //observe : 整體 ,model 可能在 observe裡面的其中一個區塊
        public bool TwoImageIsMatch(Matrix <float> modelDescriptors, Matrix <float> observedDescriptors, int width, int Height, VectorOfKeyPoint modelKeyPoints, VectorOfKeyPoint observedKeyPoints)
        {
            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;
            Matrix <int>  indices;
            //
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null); //取得對比
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            //indices = new Matrix<int>(observedDescriptors.Rows, k);
            HomographyMatrix homography = null;
            int nonZeroCount            = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }
            if (homography != null)
            {
                if (HomographySizeIsLegal(homography, width, Height))
                {
                    return(true);
                }
            }

            return(false);
        }
Exemplo n.º 21
0
        private static Mat FindMatchWithoutCuda(Mat modelImage, Mat observedImage, VectorOfKeyPoint modelKeyPoints, VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, Mat homography, int k, double uniquenessThreshold, double hessianThresh)
        {
            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    SURF surfCPU = new SURF(hessianThresh, upright: true);

                    UMat modelDescriptors;
                    if (!FindDescriptors(surfCPU, modelKeyPoints, uModelImage, out modelDescriptors))
                    {
                        Logger.Log(LogType.Error, "Feature Descriptor for Model image is empty. Is the image too small?");
                        return(mask = null);
                    }

                    UMat observedDescriptors;
                    if (!FindDescriptors(surfCPU, observedKeyPoints, uObservedImage, out observedDescriptors))
                    {
                        Logger.Log(LogType.Error, "Feature Descriptor for Observed image is empty. Is the image too small?");
                        return(mask = null);
                    }

                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);
                    matcher.KnnMatch(observedDescriptors, matches, k, null);

                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                        }
                    }
                }

            return(homography);
        }
Exemplo n.º 22
0
        private void FindMatch(
            string modelImagePath, Mat observedImage,
            VectorOfVectorOfDMatch matches,
            out Mat homography
            )
        {
            int    k = 20;
            double uniquenessThreshold = 0.80;

            //Stopwatch watch;
            homography = null;
            var matcher = _modelMatcher[modelImagePath];

            using (var observedFeature = ExtractFeatures(observedImage))
            {
                matcher.KnnMatch(observedFeature.Descriptors, matches, k, null);
                using (var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1))
                {
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= k / 2)
                    {
                        foreach (var modelFeature in matcher.Features)
                        {
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelFeature.KeyPoints, observedFeature.KeyPoints,
                                                                                       matches, mask, 1.5, 20);
                            //Console.WriteLine("2:" + nonZeroCount.ToString());
                            if (nonZeroCount >= k)
                            {
                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelFeature.KeyPoints,
                                                                                                      observedFeature.KeyPoints, matches, mask, 2);
                                break;
                            }
                        }
                        //Console.WriteLine("1:"+nonZeroCount.ToString());
                    }
                }
            }
        }
Exemplo n.º 23
0
        public void FindMatches(Image <Rgb, byte> SubMap, out VectorOfKeyPoint VectorSubMapKeyPoint,
                                out Mat SubMapDiscriptors, out VectorOfVectorOfDMatch matches,
                                out Mat mask, out System.Drawing.Rectangle zone, out Mat homography, int k, double uniquenessThreshold, SIFTParametrs parametrs)
        {
            VectorSubMapKeyPoint = new VectorOfKeyPoint();
            SubMapDiscriptors    = new Mat();
            matches = new VectorOfVectorOfDMatch();
            zone    = new System.Drawing.Rectangle();
            using (SIFT siftCPU = new SIFT(parametrs.nFeatures, parametrs.nOctaveLayers,
                                           parametrs.contrastThreshold, parametrs.edgeThreshold, parametrs.sigma))
            {
                siftCPU.DetectAndCompute(SubMap, null, VectorSubMapKeyPoint, SubMapDiscriptors, false);
            }
            matches = new VectorOfVectorOfDMatch();
            using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                    using (Emgu.CV.Features2D.DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                    {
                        matcher.Add(SubMapDiscriptors);
                        matcher.KnnMatch(MapDiscriptors, matches, k, null);
                    }

            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            homography = null;

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(VectorSubMapKeyPoint, VectorMapKeyPoint,
                                                                           matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(
                        VectorSubMapKeyPoint, VectorMapKeyPoint, matches, mask, 2);
                }
            }
        }
        //取得mask,用兩張圖的點做個比對
        public Matrix <byte> GetMask(Matrix <float> modelDescriptors, Matrix <float> observedDescriptors)
        {
            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;
            Matrix <int>  indices;
            //
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null); //取得對比
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }
            return(mask);
        }
Exemplo n.º 25
0
        public uint Similar(IImageLocalFeatures feature1, IImageLocalFeatures feature2)
        {
            int                       k = 2;
            double                    uniquenessThreshold = 0.8;
            Matrix <byte>             mask;
            Matrix <int>              indices;
            BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(_distanceType);

            matcher.Add(feature1.Descriptors);

            indices = new Matrix <int>(feature2.Descriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(feature2.Descriptors.Rows, k))
            {
                matcher.KnnMatch(feature2.Descriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            return((uint)CvInvoke.cvCountNonZero(mask));
        }
Exemplo n.º 26
0
        public int  FindMatch1(Mat modelImage, Mat observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k                   = 2;
            int    nonZeroCount        = 0;
            double uniquenessThreshold = 0.80;

            homography        = null;
            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();
            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))

                {
                    var featureDetector  = new ORBDetector(9000);
                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    using (var matcher = new BFMatcher(DistanceType.Hamming, false))
                    {
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                        nonZeroCount = CvInvoke.CountNonZero(mask);
                        if (nonZeroCount >= 4)
                        {
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                       matches, mask, 1.5, 20);
                            //if (nonZeroCount >= 4)
                            //    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                            //        observedKeyPoints, matches, mask, 2);
                        }
                    }
                }
            return(nonZeroCount);
        }
Exemplo n.º 27
0
        public Image <Bgr, byte> PointHomo(Image <Bgr, byte> image, Image <Bgr, byte> image2)
        {
            Image <Gray, byte> baseImgGray    = image.Convert <Gray, byte>();
            Image <Gray, byte> twistedImgGray = image2.Convert <Gray, byte>();
            Brisk            descriptor       = new Brisk();
            GFTTDetector     detector         = new GFTTDetector(40, 0.01, 5, 3, true);
            VectorOfKeyPoint GFP1             = new VectorOfKeyPoint();
            UMat             baseDesc         = new UMat();
            UMat             bimg             = twistedImgGray.Mat.GetUMat(AccessType.Read);
            VectorOfKeyPoint GFP2             = new VectorOfKeyPoint();
            UMat             twistedDesc      = new UMat();
            UMat             timg             = baseImgGray.Mat.GetUMat(AccessType.Read);

            detector.DetectRaw(bimg, GFP1);
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            matcher.Add(baseDesc);
            matcher.KnnMatch(twistedDesc, matches, 2, null);
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            int nonZeroCount      = Features2DToolbox.VoteForSizeAndOrientation(GFP1, GFP1, matches, mask, 1.5, 20);
            Image <Bgr, byte> res = image.CopyBlank();

            Features2DToolbox.DrawMatches(image2, GFP1, image, GFP2, matches, res, new MCvScalar(255, 0, 0), new MCvScalar(255, 0, 0), mask);

            Mat homography;

            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(GFP1, GFP2, matches, mask, 2);
            var destImage = new Image <Bgr, byte>(image2.Size);

            CvInvoke.WarpPerspective(image2, destImage, homography, destImage.Size);

            return(destImage);
        }
Exemplo n.º 28
0
      private static bool IsModelInObserved( Image<Gray, byte> modelImage, Image<Gray, byte> observedImage, double similarityThreshold = 0.075 )
      {
         var surfCpu = new SURFDetector(500, false);

         Matrix<byte> mask;
         int k = 2;
         double uniquenessThreshold = 0.8;

         //extract features from the object image
         var modelKeyPoints = surfCpu.DetectKeyPointsRaw( modelImage, null );
         Matrix<float> modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

         // extract features from the observed image
         var observedKeyPoints = surfCpu.DetectKeyPointsRaw( observedImage, null );
         Matrix<float> observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
         BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
         matcher.Add( modelDescriptors );

         var indices = new Matrix<int>( observedDescriptors.Rows, k );
         using ( var dist = new Matrix<float>( observedDescriptors.Rows, k ) )
         {
            matcher.KnnMatch( observedDescriptors, indices, dist, k, null );
            mask = new Matrix<byte>( dist.Rows, 1 );
            mask.SetValue( 255 );
            Features2DToolbox.VoteForUniqueness( dist, uniquenessThreshold, mask );
         }

         int keypointMatchCount = CvInvoke.cvCountNonZero( mask );
         if ( keypointMatchCount >= 4 )
         {
            keypointMatchCount = Features2DToolbox.VoteForSizeAndOrientation( modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20 );
            if ( keypointMatchCount >= 4 )
            {
               Features2DToolbox.GetHomographyMatrixFromMatchedFeatures( modelKeyPoints, observedKeyPoints, indices, mask, 2 );
            }
         }

         var similarity = (double)keypointMatchCount / observedKeyPoints.Size;
         return similarity > similarityThreshold;
      }
Exemplo n.º 29
0
        private void SURFfeature(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography)
        {
            int          k = 2;
            double       uniquenessThreshold = 0.8;
            SURFDetector surfCPU             = new SURFDetector(300, false);

            homography = null;

            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix <float>            observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
            BruteForceMatcher <float> matcher             = new BruteForceMatcher <float>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }
        }
Exemplo n.º 30
0
        private double DetectTemplate(Mat observedImage, TemplateContainer.ImageData template)
        {
            orbDetector.DetectAndCompute(observedImage, null, observedKeyPoints, observedDescriptor, false);

            if (template.keyPointsOrb.Size > 0 && observedKeyPoints.Size > 0)
            {
                BFMatcher matcher = new BFMatcher(DistanceType.L2);
                matcher.Add(template.descriptorOrb);

                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
                matcher.KnnMatch(observedDescriptor, matches, 2, null);

                //Copied
                Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                if (matches.Size == 0)
                {
                    return(0.0);
                }
                else
                {
                    int    nonZeroCount           = CvInvoke.CountNonZero(mask);
                    double nonZeroCountNormalized = 1.0 * nonZeroCount / template.keyPointsOrb.Size;
                    if (nonZeroCount > 3)
                    {
                        nonZeroCount           = Features2DToolbox.VoteForSizeAndOrientation(template.keyPointsOrb, observedKeyPoints, matches, mask, 1.8, 18);
                        nonZeroCountNormalized = 1.0 * nonZeroCount / template.keyPointsOrb.Size;
                        return(nonZeroCount);
                    }
                    return(0.0);
                }
            }
            else
            {
                return(0.0);
            }
        }