Exemplo n.º 1
0
        static (KeyPoint[], Mat) FeatureCommand(Mat source)
        {
            // 特徴量検出アルゴリズム
            var feature = KAZE.Create();

            var magnification = 2;

            using var _ex = source.Resize(new Size(source.Width * magnification, source.Height * magnification));
            // ガンマ補正
            var gamma = 1.8;

            byte[] _gammaLut = new byte[256];
            for (int i = 0; i < _gammaLut.Length; i++)
            {
                _gammaLut[i] = (byte)(255d * Math.Pow(i / 255d, 1d / gamma));
            }
            using Mat _temp_gammaImage = new Mat();
            Cv2.LUT(_ex, _gammaLut, _temp_gammaImage);
            // 特徴量計算
            KeyPoint[] keyPoints;              // 特徴点
            Mat        descriptor = new Mat(); // 特徴量

            feature.DetectAndCompute(_temp_gammaImage, null, out keyPoints, descriptor);
            //var _featureImage = new Mat();
            //Cv2.DrawKeypoints(_temp_gammaImage, _keypoint, _featureImage);

            return(keyPoints, descriptor);
        }
        public static void FindMatch(
            Mat modelImage,
            Mat observedImage,
            out VectorOfKeyPoint modelKeyPoints,
            out VectorOfKeyPoint observedKeyPoints,
            VectorOfVectorOfDMatch matches,
            out Mat mask,
            out Mat homography)
        {
            int    k = 9;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    //extract features from the object image
                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                        using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                            using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                            {
                                matcher.Add(modelDescriptors);

                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                               matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                              observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }
                    watch.Stop();
                }
        }
        public void New3()
        {
            var descriptorExtractor = KAZE.Create();
            var descriptorMatcher   = new BFMatcher();

            new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher);
        }
Exemplo n.º 4
0
        public override void RunTest()
        {
            var gray  = new Mat(ImagePath.Lenna, ImreadModes.Grayscale);
            var kaze  = KAZE.Create();
            var akaze = AKAZE.Create();

            var kazeDescriptors  = new Mat();
            var akazeDescriptors = new Mat();

            KeyPoint[] kazeKeyPoints = null, akazeKeyPoints = null;
            var        kazeTime      = MeasureTime(() =>
                                                   kaze.DetectAndCompute(gray, null, out kazeKeyPoints, kazeDescriptors));
            var akazeTime = MeasureTime(() =>
                                        akaze.DetectAndCompute(gray, null, out akazeKeyPoints, akazeDescriptors));

            var dstKaze  = new Mat();
            var dstAkaze = new Mat();

            Cv2.DrawKeypoints(gray, kazeKeyPoints, dstKaze);
            Cv2.DrawKeypoints(gray, akazeKeyPoints, dstAkaze);

            using (new Window(String.Format("KAZE [{0:F2}ms]", kazeTime.TotalMilliseconds), dstKaze))
                using (new Window(String.Format("AKAZE [{0:F2}ms]", akazeTime.TotalMilliseconds), dstAkaze))
                {
                    Cv2.WaitKey();
                }
        }
Exemplo n.º 5
0
    // Use this for initialization
    void Start()
    {
        Texture2D boxTexture        = Resources.Load <Texture2D>("box");
        Texture2D boxInSceneTexture = Resources.Load <Texture2D>("box_in_scene");

        Mat box = new Mat();

        boxTexture.ToOutputArray(box, FlipType.Vertical, typeof(Gray));

        Mat boxInScene = new Mat();

        boxInSceneTexture.ToOutputArray(boxInScene, FlipType.Vertical, typeof(Gray));

        long      time;
        Texture2D texture;

        using (Emgu.CV.Features2D.Feature2D f2d = new KAZE())
            using (Mat img = FeatureMatchingExample.DrawMatches.Draw(box, boxInScene, f2d, out time))
            {
                texture = img.ToTexture2D();
            }

        RenderTexture(texture);
        ResizeTexture(texture);
    }
Exemplo n.º 6
0
        public void TestBOWKmeansTrainer()
        {
            Image <Gray, byte> box      = EmguAssert.LoadImage <Gray, byte>("box.png");
            Feature2D          detector = new KAZE();
            VectorOfKeyPoint   kpts     = new VectorOfKeyPoint();
            Mat descriptors             = new Mat();

            detector.DetectAndCompute(box, null, kpts, descriptors, false);

            BOWKMeansTrainer trainer = new BOWKMeansTrainer(100, new MCvTermCriteria(), 3, CvEnum.KMeansInitType.PPCenters);

            trainer.Add(descriptors);
            Mat vocabulary = new Mat();

            trainer.Cluster(vocabulary);

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            BOWImgDescriptorExtractor extractor = new BOWImgDescriptorExtractor(detector, matcher);

            extractor.SetVocabulary(vocabulary);

            Mat descriptors2 = new Mat();

            extractor.Compute(box, kpts, descriptors2);
        }
Exemplo n.º 7
0
 public void New5()
 {
     using var ip = new LinearIndexParams();
     using var sp = new SearchParams();
     using (var descriptorExtractor = KAZE.Create())
         using (var descriptorMatcher = new FlannBasedMatcher(ip, sp))
             using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
 }
        public void New5()
        {
            var descriptorExtractor = KAZE.Create();
            LinearIndexParams ip    = new LinearIndexParams();
            SearchParams      sp    = new SearchParams();
            var descriptorMatcher   = new FlannBasedMatcher(ip, sp);

            new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher);
        }
Exemplo n.º 9
0
        public void GetKeypoints(Mat gray)
        {
            var kaze            = KAZE.Create();
            var kazeDescriptors = new Mat();

            kaze.DetectAndCompute(gray, null, out kazeKeyPoints, kazeDescriptors);

            var dstKaze = new Mat();

            Cv2.DrawKeypoints(gray, kazeKeyPoints, dstKaze);
        }
Exemplo n.º 10
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="source"></param>
        /// <returns></returns>
        public static VectorOfKeyPoint FindFeature(Mat source)
        {
            var vectorOfKeyPoint = new VectorOfKeyPoint();

            using (UMat uImage = source.GetUMat(AccessType.Read))
                using (var imageDescriptors = new Mat())
                {
                    var kaze = new KAZE();
                    kaze.DetectAndCompute(uImage, null, vectorOfKeyPoint, imageDescriptors, false);
                }
            return(vectorOfKeyPoint);
        }
Exemplo n.º 11
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="observedKeyPoints"></param>
        /// <param name="modelKeyPoint"></param>
        /// <returns></returns>
        public static VectorOfPoint Matching(VectorOfKeyPoint observedKeyPoints, VectorOfKeyPoint modelKeyPoints, Mat observedDescriptors, Mat modelDescriptors, Size modelSize)
        {
            KAZE featureDetector = new KAZE();

            using (var ip = new LinearIndexParams())
                using (var sp = new SearchParams())
                    using (var matcher = new FlannBasedMatcher(ip, sp))
                        using (var matches = new VectorOfVectorOfDMatch())
                        {
                            Mat homography = new Mat();
                            matcher.Add(modelDescriptors);

                            matcher.KnnMatch(observedDescriptors, matches, 2, null);
                            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, 0.80, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                }
                            }

                            if (homography != null && !homography.Size.IsEmpty)
                            {
                                Rectangle rect = new Rectangle(Point.Empty, modelSize);
                                PointF[]  pts  = new PointF[]
                                {
                                    new PointF(rect.Left, rect.Bottom),
                                    new PointF(rect.Right, rect.Bottom),
                                    new PointF(rect.Right, rect.Top),
                                    new PointF(rect.Left, rect.Top)
                                };

                                pts = CvInvoke.PerspectiveTransform(pts, homography);

                                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);

                                return(new VectorOfPoint(points));
                            }
                            else
                            {
                                return(new VectorOfPoint());
                            }
                        }
        }
Exemplo n.º 12
0
        public static void Init(Mat ModelImage)
        {
            double hessianThresh = 300;

            uModelImage      = ModelImage.GetUMat(AccessType.Read);
            modelDescriptors = new Mat();
            modelImage       = ModelImage.Clone();
            KAZE featureDetector = new KAZE();

            modelKeyPoints   = new VectorOfKeyPoint();
            modelDescriptors = new Mat();

            featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
        }
Exemplo n.º 13
0
        /// <summary>
        /// Detects the feature set and caches it
        /// </summary>
        /// <param name="featureDetector"></param>
        /// <param name="image"></param>
        /// <returns></returns>
        public static FeatureSet Detect(KAZE featureDetector, Image <Gray, byte> image)
        {
            using (UMat uModelImage = image.ToUMat())
            {
                Mat descriptors = new Mat();
                var keyPoints   = new VectorOfKeyPoint();
                featureDetector.DetectAndCompute(uModelImage, null, keyPoints, descriptors, false);

                return(new FeatureSet()
                {
                    image = image,
                    descriptors = descriptors,
                    keyPoints = keyPoints,
                });
            }
        }
Exemplo n.º 14
0
        public Frame BuildFrameFromFile(FileInfo file)
        {
            IEnumerable <MKeyPoint> kazeKeyPoints;

            using (var frame = new Image <Bgr, byte>(file.FullName)) {
                using (var kaze = new KAZE(true, true)) {
                    kazeKeyPoints = kaze.Detect(frame);
                }
            }

            return(new Frame()
            {
                Id = ResolveFrameId(file),
                KazePoints = kazeKeyPoints.ToList(),
            });
        }
Exemplo n.º 15
0
        private static void Feature()
        {
            Mat  img  = new Mat("data/lenna.png", ImreadModes.GrayScale);
            KAZE kaze = KAZE.Create();


            KeyPoint[] keyPoints;
            Mat        descriptors = new Mat();

            kaze.DetectAndCompute(img, null, out keyPoints, descriptors);

            Mat dst = new Mat();

            Cv2.DrawKeypoints(img, keyPoints, dst);
            Window.ShowImages(dst);
        }
Exemplo n.º 16
0
        private async void OnButtonClicked(Object sender, EventArgs args)
        {
            Mat[] images = await LoadImages(new String[] { "box.png", "box_in_scene.png" }, new string[] { "Pick a model image from", "Pick a observed image from" });

            if (images == null || images[0] == null || images[1] == null)
            {
                return;
            }
            SetMessage("Please wait...");
            SetImage(null);
            Task <Tuple <Mat, long> > t = new Task <Tuple <Mat, long> >(
                () =>
            {
                long time;
                Emgu.CV.Features2D.Feature2D featureDetectorExtractor;
                String pickedFeature2D = GetSelectedFeatrure2D();

                if (pickedFeature2D.Equals("SIFT"))
                {
                    featureDetectorExtractor = new SIFT();
                }
                else
                {
                    featureDetectorExtractor = new KAZE();
                }

                Mat matchResult = DrawMatches.Draw(images[0], images[1], featureDetectorExtractor, out time);
                featureDetectorExtractor.Dispose();
                return(new Tuple <Mat, long>(matchResult, time));
            });

            t.Start();

            var result = await t;

            foreach (var img in images)
            {
                img.Dispose();
            }

            SetImage(t.Result.Item1);
            String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU";

            SetMessage(String.Format("Detected with {1} using {2} in {0} milliseconds.", t.Result.Item2, computeDevice, GetSelectedFeatrure2D()));
        }
        public AlgorithmResult DetectKaze(
            string filename,
            KeypointType kpsType,
            float threshold,
            int octaves,
            int sublevels)
        {
            AlgorithmResult   result      = new AlgorithmResult();
            Image <Bgr, byte> image       = ImageHelper.GetImage(filename);
            Image <Bgr, byte> resultImage = new Image <Bgr, byte>(filename);

            // Get features from image
            var kaze      = new KAZE(false, false, threshold, octaves, sublevels, Diffusivity.PmG2);
            var keyPoints = kaze.Detect(image);

            DrawKeypoints(
                image,
                new VectorOfKeyPoint(keyPoints),
                resultImage,
                new Bgr(Color.FromArgb(255, 77, 77)),
                GetKeypointDraw(kpsType));

            result.ImageArray = ImageHelper.SetImage(resultImage);
            result.KeyDatas   = new List <KeyPointModel>();
            result.KeyDatas.AddRange(keyPoints.Select(k => new KeyPointModel()
            {
                X        = k.Point.X,
                Y        = k.Point.Y,
                Size     = k.Size,
                Angle    = k.Angle,
                Response = k.Response,
                Octave   = k.Octave,
                ClassId  = k.ClassId
            }));

            return(result);
        }
Exemplo n.º 18
0
        public Mat Run(Mat img1, Mat img2)
        {
            Mat img3 = new Mat(Math.Max(img1.Height, img2.Height), img2.Width + img1.Width, MatType.CV_8UC3).SetTo(0);

            using (var descriptors1 = new Mat())
                using (var descriptors2 = new Mat())
                    using (var matcher = new BFMatcher(NormTypes.L2SQR))
                        using (var kaze = KAZE.Create())
                        {
                            kaze.DetectAndCompute(img1, null, out keypoints1, descriptors1);
                            kaze.DetectAndCompute(img2, null, out keypoints2, descriptors2);

                            if (descriptors1.Width > 0 && descriptors2.Width > 0)
                            {
                                DMatch[][] matches = matcher.KnnMatch(descriptors1, descriptors2, 2);
                                using (Mat mask = new Mat(matches.Length, 1, MatType.CV_8U))
                                {
                                    mask.SetTo(Scalar.White);
                                    int nonZero = Cv2.CountNonZero(mask);
                                    VoteForUniqueness(matches, mask);
                                    nonZero = Cv2.CountNonZero(mask);
                                    nonZero = VoteForSizeAndOrientation(keypoints2, keypoints1, matches, mask, 1.5f, 10);

                                    List <Point2f> obj             = new List <Point2f>();
                                    List <Point2f> scene           = new List <Point2f>();
                                    List <DMatch>  goodMatchesList = new List <DMatch>();
                                    //iterate through the mask only pulling out nonzero items because they're matches
                                    MatIndexer <byte> maskIndexer = mask.GetGenericIndexer <byte>();
                                    for (int i = 0; i < mask.Rows; i++)
                                    {
                                        if (maskIndexer[i] > 0)
                                        {
                                            obj.Add(keypoints1[matches[i][0].QueryIdx].Pt);
                                            scene.Add(keypoints2[matches[i][0].TrainIdx].Pt);
                                            goodMatchesList.Add(matches[i][0]);
                                        }
                                    }

                                    List <Point2d> objPts   = obj.ConvertAll(Point2fToPoint2d);
                                    List <Point2d> scenePts = scene.ConvertAll(Point2fToPoint2d);
                                    if (nonZero >= 4)
                                    {
                                        Mat homography = Cv2.FindHomography(objPts, scenePts, HomographyMethods.Ransac, 1.5, mask);
                                        nonZero = Cv2.CountNonZero(mask);

                                        if (homography != null && homography.Width > 0)
                                        {
                                            Point2f[] objCorners = { new Point2f(0,                 0),
                                                                     new Point2f(img1.Cols,         0),
                                                                     new Point2f(img1.Cols, img1.Rows),
                                                                     new Point2f(0,         img1.Rows) };

                                            Point2d[] sceneCorners = MyPerspectiveTransform3(objCorners, homography);

                                            //This is a good concat horizontal
                                            using (Mat left = new Mat(img3, new Rect(0, 0, img1.Width, img1.Height)))
                                                using (Mat right = new Mat(img3, new Rect(img1.Width, 0, img2.Width, img2.Height)))
                                                {
                                                    img1.CopyTo(left);
                                                    img2.CopyTo(right);

                                                    byte[] maskBytes = new byte[mask.Rows * mask.Cols];
                                                    mask.GetArray(out maskBytes);
                                                    Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, goodMatchesList, img3, Scalar.All(-1), Scalar.All(-1), maskBytes, DrawMatchesFlags.NotDrawSinglePoints);

                                                    List <List <Point> > listOfListOfPoint2D = new List <List <Point> >();
                                                    List <Point>         listOfPoint2D       = new List <Point>();
                                                    listOfPoint2D.Add(new Point(sceneCorners[0].X + img1.Cols, sceneCorners[0].Y));
                                                    listOfPoint2D.Add(new Point(sceneCorners[1].X + img1.Cols, sceneCorners[1].Y));
                                                    listOfPoint2D.Add(new Point(sceneCorners[2].X + img1.Cols, sceneCorners[2].Y));
                                                    listOfPoint2D.Add(new Point(sceneCorners[3].X + img1.Cols, sceneCorners[3].Y));
                                                    listOfListOfPoint2D.Add(listOfPoint2D);
                                                    img3.Polylines(listOfListOfPoint2D, true, Scalar.LimeGreen, 2);

                                                    //This works too
                                                    //Cv2.Line(img3, scene_corners[0] + new Point2d(img1.Cols, 0), scene_corners[1] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                                    //Cv2.Line(img3, scene_corners[1] + new Point2d(img1.Cols, 0), scene_corners[2] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                                    //Cv2.Line(img3, scene_corners[2] + new Point2d(img1.Cols, 0), scene_corners[3] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                                    //Cv2.Line(img3, scene_corners[3] + new Point2d(img1.Cols, 0), scene_corners[0] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                                }
                                        }
                                    }
                                }
                            }
                            return(img3);
                        }
        }
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints,
                                     VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out long score)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    // KdTree for faster results / less accuracy
                    using (var ip = new Emgu.CV.Flann.KdTreeIndexParams())
                        using (var sp = new SearchParams())
                            using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                            {
                                matcher.Add(modelDescriptors);

                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                // Calculate score based on match size
                                score = 0;
                                for (int i = 0; i < matches.Size; i++)
                                {
                                    if (mask.GetData(i)[0] == 0)
                                    {
                                        continue;
                                    }
                                    foreach (var e in matches[i].ToArray())
                                    {
                                        ++score;
                                    }
                                }

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }
                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 20
0
        private static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            try
            {
                using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                    using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                    {
                        KAZE featureDetector = new KAZE();

                        //extract features from the object image
                        Mat modelDescriptors = new Mat();
                        featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                        watch = Stopwatch.StartNew();

                        // extract features from the observed image
                        Mat observedDescriptors = new Mat();
                        featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                        // Bruteforce, slower but more accurate
                        // You can use KDTree for faster matching with slight loss in accuracy
                        using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                            using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                                using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                                {
                                    matcher.Add(modelDescriptors);

                                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                    mask.SetTo(new MCvScalar(255));
                                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                                    if (nonZeroCount >= 4)
                                    {
                                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                                   matches, mask, 1.5, 20);
                                        if (nonZeroCount >= 4)
                                        {
                                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                                  observedKeyPoints, matches, mask, 2);
                                        }
                                    }
                                }
                        watch.Stop();
                    }
            }
            catch (Exception e)
            {
                throw e;
            }

            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 21
0
        public static void FindMatch(Mat modelImage, Mat modelImage2, Mat observedImage,
                                     out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography,
                                     out VectorOfKeyPoint modelKeyPoints2, VectorOfVectorOfDMatch matches2, out Mat mask2, out Mat homography2)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography  = null;
            homography2 = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            modelKeyPoints2   = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read)) // Создаем объект модели изображения
                using (UMat uModelImage2 = modelImage2.GetUMat(AccessType.Read))
                    using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                    {
                        KAZE featureDetector = new KAZE();

                        //извлекаем точки интереса из изображения объекта
                        Mat modelDescriptors = new Mat();
                        featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                        Mat modelDescriptors2 = new Mat();
                        featureDetector.DetectAndCompute(uModelImage2, null, modelKeyPoints2, modelDescriptors2, false);

                        watch = Stopwatch.StartNew();

                        // извлекаем точки интереса из исследуемого изображения
                        Mat observedDescriptors = new Mat();
                        featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);


                        // Bruteforce, slower but more accurate
                        // You can use KDTree for faster matching with slight loss in accuracy
                        using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                            using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                                using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                                {
                                    matcher.Add(modelDescriptors);

                                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                    mask.SetTo(new MCvScalar(255));
                                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                                    if (nonZeroCount >= 4)
                                    {
                                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                                   matches, mask, 1.5, 20);
                                        if (nonZeroCount >= 4)
                                        {
                                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                                  observedKeyPoints, matches, mask, 2);
                                        }
                                    }
                                }

                        using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                            using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                                using (DescriptorMatcher matcher2 = new FlannBasedMatcher(ip, sp))
                                {
                                    matcher2.Add(modelDescriptors2);

                                    matcher2.KnnMatch(observedDescriptors, matches2, k, null);
                                    mask2 = new Mat(matches2.Size, 1, DepthType.Cv8U, 1);
                                    mask2.SetTo(new MCvScalar(255));
                                    Features2DToolbox.VoteForUniqueness(matches2, uniquenessThreshold, mask2);

                                    int nonZeroCount = CvInvoke.CountNonZero(mask2);
                                    if (nonZeroCount >= 4)
                                    {
                                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints2, observedKeyPoints,
                                                                                                   matches2, mask2, 1.5, 20);
                                        if (nonZeroCount >= 4)
                                        {
                                            homography2 = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints2,
                                                                                                                   observedKeyPoints, matches2, mask2, 2);
                                        }
                                    }
                                }


                        watch.Stop();
                    }
            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 22
0
 public void New3()
 {
     using (var descriptorExtractor = KAZE.Create())
         using (var descriptorMatcher = new BFMatcher())
             using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
 }
Exemplo n.º 23
0
        public static VectorOfPoint Process(Mat logo, Mat observedImage)
        {
            VectorOfPoint    vp                = null;
            Mat              homography        = null;
            VectorOfKeyPoint logoKeyPoints     = new VectorOfKeyPoint();
            VectorOfKeyPoint observedKeyPoints = new VectorOfKeyPoint();
            Mat              mask;
            int              k = 2;
            double           uniquenessThreshold = 0.80;

            using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
            {
                using (UMat uModelImage = logo.GetUMat(AccessType.Read))
                    using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                    {
                        KAZE featureDetector = new KAZE();

                        //extract features from the object image
                        Mat modelDescriptors = new Mat();
                        featureDetector.DetectAndCompute(uModelImage, null, logoKeyPoints, modelDescriptors, false);


                        // extract features from the observed image
                        Mat observedDescriptors = new Mat();
                        featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                        // Bruteforce, slower but more accurate
                        // You can use KDTree for faster matching with slight loss in accuracy
                        using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                            using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                                using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                                {
                                    matcher.Add(modelDescriptors);

                                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                                    mask = new Mat(matches.Size, 1, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
                                    mask.SetTo(new MCvScalar(255));
                                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                    int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(logoKeyPoints, observedKeyPoints,
                                                                                                   matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(logoKeyPoints,
                                                                                                              observedKeyPoints, matches, mask, 2);
                                    }
                                }
                    }

                if (homography != null)
                {
                    //draw a rectangle along the projected model
                    Rectangle rect = new Rectangle(Point.Empty, logo.Size);
                    PointF[]  pts  = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    pts = CvInvoke.PerspectiveTransform(pts, homography);
                    Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
                    vp = new VectorOfPoint(points);
                }
                return(vp);
            }
        }
Exemplo n.º 24
0
            public ImageData(string path, string correspondingValue, Emgu.CV.Features2D.ORBDetector orbDetector, KAZE featureDetector)
            {
                this.path  = path;
                this.image = new Image <Bgr, byte>(path);
                orbDetector.DetectAndCompute(image, null, keyPointsOrb, descriptorOrb, false);
                UMat uObservedImage = image.Mat.GetUMat(AccessType.Read);

                //featureDetector.DetectAndCompute(image, null, keyPointsSurf, descriptorSurf, false);
                featureDetector.DetectAndCompute(uObservedImage, null, keyPointsSurf, descriptorSurf, false);
                this.correspondingValue = correspondingValue;
            }
Exemplo n.º 25
0
        private static VectorOfPoint ProcessImageFLANN(Image <Gray, byte> template, Image <Gray, byte> sceneImage)
        {
            try
            {
                // initialization
                VectorOfPoint    finalPoints        = null;
                Mat              homography         = null;
                VectorOfKeyPoint templateKeyPoints  = new VectorOfKeyPoint();
                VectorOfKeyPoint sceneKeyPoints     = new VectorOfKeyPoint();
                Mat              tempalteDescriptor = new Mat();
                Mat              sceneDescriptor    = new Mat();

                Mat    mask;
                int    k = 2;
                double uniquenessthreshold     = 0.80;
                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                // feature detectino and description
                KAZE featureDetector = new KAZE();
                featureDetector.DetectAndCompute(template, null, templateKeyPoints, tempalteDescriptor, false);
                featureDetector.DetectAndCompute(sceneImage, null, sceneKeyPoints, sceneDescriptor, false);


                // Matching

                //KdTreeIndexParams ip = new KdTreeIndexParams();
                //var ip = new AutotunedIndexParams();
                var               ip      = new LinearIndexParams();
                SearchParams      sp      = new SearchParams();
                FlannBasedMatcher matcher = new FlannBasedMatcher(ip, sp);


                matcher.Add(tempalteDescriptor);
                matcher.KnnMatch(sceneDescriptor, matches, k);

                mask = new Mat(matches.Size, 1, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));

                Features2DToolbox.VoteForUniqueness(matches, uniquenessthreshold, mask);

                int count = Features2DToolbox.VoteForSizeAndOrientation(templateKeyPoints, sceneKeyPoints, matches, mask, 1.5, 20);

                if (count >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(templateKeyPoints,
                                                                                          sceneKeyPoints, matches, mask, 5);
                }

                if (homography != null)
                {
                    System.Drawing.Rectangle rect = new System.Drawing.Rectangle(System.Drawing.Point.Empty, template.Size);
                    PointF[] pts = new PointF[]
                    {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    pts = CvInvoke.PerspectiveTransform(pts, homography);
                    System.Drawing.Point[] points = Array.ConvertAll <PointF, System.Drawing.Point>(pts, System.Drawing.Point.Round);
                    finalPoints = new VectorOfPoint(points);
                }

                return(finalPoints);
            }
            catch (Exception ex)
            {
                throw new Exception(ex.Message);
            }
        }
Exemplo n.º 26
0
        /// <summary>
        /// The method used to discover similarities amongst the images, and populating arrays.
        /// </summary>
        /// <param name="m_modelImage"> The model image (library basic). </param>
        /// <param name="m_observedImage"> The observed image (test).  </param>
        /// <param name="d_matchTime"> The output total time for computing the homography matrix. </param>
        /// <param name="v_modelKeyPoints"></param>
        /// <param name="v_observedKeyPoints"></param>
        /// <param name="v_matches"></param>
        /// <param name="m_mask"></param>
        /// <param name="m_homography"></param>
        /// <param name="l_score"> Field contains the score of matching. </param>
        public static void FindMatch(Mat m_modelImage, Mat m_observedImage, out double d_matchTime, out VectorOfKeyPoint v_modelKeyPoints,
                                     out VectorOfKeyPoint v_observedKeyPoints, VectorOfVectorOfDMatch v_matches, out Mat m_mask,
                                     out Mat m_homography, out long l_score)
        {
            ErrInfLogger.LockInstance.InfoLog("Start of the FindMatch");

            TimerAbstraction _tim = new TimerRefinedAbstraction();

            _tim._iTimer = new TimerFractional();

            m_homography = null;

            v_modelKeyPoints    = new VectorOfKeyPoint();
            v_observedKeyPoints = new VectorOfKeyPoint();

            KAZE featureDetector = new KAZE();

            Mat modelDescriptors = new Mat();

            featureDetector.DetectAndCompute(m_modelImage, null, v_modelKeyPoints, modelDescriptors, false);

            _tim.MeasureStart();

            Mat observedDescriptors = new Mat();

            featureDetector.DetectAndCompute(m_observedImage, null, v_observedKeyPoints, observedDescriptors, false);

            // KdTree for faster results / less accuracy
            using (KdTreeIndexParams ip = new KdTreeIndexParams())
                using (SearchParams sp = new SearchParams())
                    using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                    {
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, v_matches, SettingsContainer.Instance.i_K, null);
                        m_mask = new Mat(v_matches.Size, 1, DepthType.Cv8U, 1);
                        m_mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(v_matches, SettingsContainer.Instance.d_UniquenessThreshold, m_mask);

                        // Calculate score based on matches size
                        // ---------------------------------------------->
                        l_score = 0;
                        for (int i = 0; i < v_matches.Size; i++)
                        {
                            if (m_mask.GetData(i)[0] == 0)
                            {
                                continue;
                            }
                            foreach (var e in v_matches[i].ToArray())
                            {
                                ++l_score;
                            }
                        }
                        // <----------------------------------------------

                        int nonZeroCount = CvInvoke.CountNonZero(m_mask);
                        if (nonZeroCount >= 4)
                        {
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(v_modelKeyPoints, v_observedKeyPoints, v_matches,
                                                                                       m_mask, 1.5, 20);
                            if (nonZeroCount >= 4)
                            {
                                m_homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(v_modelKeyPoints, v_observedKeyPoints,
                                                                                                        v_matches, m_mask, 2);
                            }
                        }
                    }
            _tim.MeasureStop();
            d_matchTime = Math.Round(_tim.MeasureResult().TotalMilliseconds, 2);
            _tim.MeasureRestart();

            ErrInfLogger.LockInstance.InfoLog("End of the FindMatch");
        }
Exemplo n.º 27
0
        //public static long Classify(VectorOfKeyPoint modelKeyPoints, Mat modelDescriptors, Mat observedImage, double uniquenessThreshold, int k, int detectionType)
        public static long Classify(Mat modelDescriptors, Mat observedImage, double uniquenessThreshold, int k, int detectionType)
        {
            var score = 0L;

            using (var matches = new VectorOfVectorOfDMatch())
            {
                Mat mask = null;
                //Mat homography = null;
                var observedKeyPoints = new VectorOfKeyPoint();
                var obsImage          = new Mat();
                CvInvoke.Threshold(observedImage, obsImage, 127.0, 255.0, ThresholdType.BinaryInv);
                using (UMat uObservedImage = obsImage.GetUMat(AccessType.Read))
                {
                    switch (detectionType)
                    {
                    default:
                        using (var featureDetector = new SIFT(0, 3, 0.04, 10.0, 1.6))
                        {
                            var observedDescriptors = new Mat();
                            featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                            using (var ip = new KdTreeIndexParams())
                                using (var sp = new SearchParams())
                                    using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                                    {
                                        matcher.Add(modelDescriptors);
                                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                        mask.SetTo(new MCvScalar(255));
                                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                                        score = 0;
                                        for (int i = 0; i < matches.Size; i++)
                                        {
                                            if (mask.GetData(i)[0] == 0)
                                            {
                                                continue;
                                            }
                                            foreach (var e in matches[i].ToArray())
                                            {
                                                ++score;
                                            }
                                        }
                                        //var nonZeroCount = CvInvoke.CountNonZero(mask);
                                        //if (nonZeroCount >= 4)
                                        //{
                                        //    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                        //    if (nonZeroCount >= 4)
                                        //        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                        //}
                                    }
                        }
                        break;

                    case 1:
                        using (var featureDetector = new KAZE())
                        {
                            var observedDescriptors = new Mat();
                            featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                            using (var ip = new KdTreeIndexParams())
                                using (var sp = new SearchParams())
                                    using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                                    {
                                        matcher.Add(modelDescriptors);
                                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                        mask.SetTo(new MCvScalar(255));
                                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                                        score = 0;
                                        for (int i = 0; i < matches.Size; i++)
                                        {
                                            if (mask.GetData(i)[0] == 0)
                                            {
                                                continue;
                                            }
                                            foreach (var e in matches[i].ToArray())
                                            {
                                                ++score;
                                            }
                                        }
                                        //var nonZeroCount = CvInvoke.CountNonZero(mask);
                                        //if (nonZeroCount >= 4)
                                        //{
                                        //    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                        //    if (nonZeroCount >= 4)
                                        //        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                        //}
                                    }
                        }
                        break;
                    }
                }
            }
            return(score);
        }
Exemplo n.º 28
0
        //img1:test image; img2:ref img
        public float MatchTemplate(Mat img1, Mat img2, bool ishowImageMatchTemplate, string s = "Match")
        {
            float matchRate = 0.0f;

            using (var descriptors1 = new Mat())
                using (var descriptors2 = new Mat())
                    using (var matcher = new BFMatcher(NormTypes.L2SQR))
                        using (var kaze = KAZE.Create())
                        {
                            KeyPoint[] keypoints1, keypoints2;
                            kaze.DetectAndCompute(img1, null, out keypoints1, descriptors1);
                            kaze.DetectAndCompute(img2, null, out keypoints2, descriptors2);

                            DMatch[][] matches = matcher.KnnMatch(descriptors1, descriptors2, 2);
                            using (Mat mask = new Mat(matches.Length, 1, MatType.CV_8U))
                            {
                                mask.SetTo(new Scalar(255));
                                int nonZero = Cv2.CountNonZero(mask);
                                VoteForUniqueness(matches, mask);
                                nonZero = Cv2.CountNonZero(mask);
                                nonZero = VoteForSizeAndOrientation(keypoints2, keypoints1, matches, mask, 1.5f, 20);

                                List <Point2f> obj             = new List <Point2f>();
                                List <Point2f> scene           = new List <Point2f>();
                                List <DMatch>  goodMatchesList = new List <DMatch>();
                                //iterate through the mask only pulling out nonzero items because they're matches
                                for (int i = 0; i < mask.Rows; i++)
                                {
                                    MatIndexer <byte> maskIndexer = mask.GetGenericIndexer <byte>();
                                    if (maskIndexer[i] > 0)
                                    {
                                        obj.Add(keypoints1[matches[i][0].QueryIdx].Pt);
                                        scene.Add(keypoints2[matches[i][0].TrainIdx].Pt);
                                        goodMatchesList.Add(matches[i][0]);
                                    }
                                }

                                List <Point2d> objPts   = obj.ConvertAll(Point2fToPoint2d);
                                List <Point2d> scenePts = scene.ConvertAll(Point2fToPoint2d);
                                if (nonZero >= 4)
                                {
                                    Mat homography = Cv2.FindHomography(objPts, scenePts, HomographyMethods.Ransac, 1.5, mask);
                                    nonZero = Cv2.CountNonZero(mask);

                                    //calculate match rate by how many match points exist
                                    //matchRate = (float)nonZero / keypoints2.Count();
                                    matchRate = 1 - (float)(keypoints2.Count() - nonZero) / (keypoints2.Count() + nonZero);

                                    if (homography != null && ishowImageMatchTemplate == true)
                                    {
                                        Point2f[] objCorners = { new Point2f(0,                 0),
                                                                 new Point2f(img1.Cols,         0),
                                                                 new Point2f(img1.Cols, img1.Rows),
                                                                 new Point2f(0,         img1.Rows) };

                                        Point2d[] sceneCorners = MyPerspectiveTransform3(objCorners, homography);

                                        //This is a good concat horizontal
                                        using (Mat img3 = new Mat(Math.Max(img1.Height, img2.Height), img2.Width + img1.Width, MatType.CV_8UC3))
                                            using (Mat left = new Mat(img3, new Rect(0, 0, img1.Width, img1.Height)))
                                                using (Mat right = new Mat(img3, new Rect(img1.Width, 0, img2.Width, img2.Height)))
                                                {
                                                    img1.CopyTo(left);
                                                    img2.CopyTo(right);

                                                    byte[] maskBytes = new byte[mask.Rows * mask.Cols];
                                                    mask.GetArray(0, 0, maskBytes);
                                                    Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, goodMatchesList, img3, Scalar.All(-1), Scalar.All(-1), maskBytes, DrawMatchesFlags.NotDrawSinglePoints);


                                                    //List<List<Point>> listOfListOfPoint2D = new List<List<Point>>();
                                                    //List<Point> listOfPoint2D = new List<Point>();
                                                    //listOfPoint2D.Add(new Point(sceneCorners[0].X + img1.Cols, sceneCorners[0].Y));
                                                    //listOfPoint2D.Add(new Point(sceneCorners[1].X + img1.Cols, sceneCorners[1].Y));
                                                    //listOfPoint2D.Add(new Point(sceneCorners[2].X + img1.Cols, sceneCorners[2].Y));
                                                    //listOfPoint2D.Add(new Point(sceneCorners[3].X + img1.Cols, sceneCorners[3].Y));
                                                    //listOfListOfPoint2D.Add(listOfPoint2D);
                                                    //img3.Polylines(listOfListOfPoint2D, true, Scalar.LimeGreen, 2);


                                                    Cv2.ImShow(s, img3.Resize(new Size(img3.Rows / 2, img3.Cols / 2)));
                                                    Cv2.WaitKey(0);
                                                    Cv2.DestroyWindow(s);

                                                    //Window.ShowImages(img3.Resize(new Size(img3.Rows / 2, img3.Cols / 2)));
                                                    //Window.WaitKey(0);
                                                    //Window.DestroyAllWindows();
                                                }
                                    }
                                }
                            }
                        }

            return(matchRate);
        }