Exemple #1
0
        private static void BForceMatcherSample()
        {
            var src1 = new Mat("data/match1.png");
            var src2 = new Mat("data/match2.png");

            var gray1 = new Mat();
            var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            var fast = new FastFeatureDetector(10);
            var descriptorExtractor = new BriefDescriptorExtractor(32);

            var descriptors1 = new Mat();
            var descriptors2 = new Mat();

            KeyPoint[] keypoints1 = fast.Run(gray1, null);
            descriptorExtractor.Compute(gray1, ref keypoints1, descriptors1);

            KeyPoint[] keypoints2 = fast.Run(gray2, null);
            descriptorExtractor.Compute(gray2, ref keypoints2, descriptors2);

            // Match descriptor vectors
            var bfMatcher = new BFMatcher(NormType.L2, false);

            DMatch[][] bfMatches = bfMatcher.KnnMatch(descriptors1, descriptors2, 3, null, false);
            bfMatches.ToString();

            var view = new Mat();

            Cv2.DrawMatches(src1, keypoints1, src2, keypoints2, bfMatches, view);
            Window.ShowImages(view);
        }
Exemple #2
0
        public V2()
        {
            ProcessFunction = (object obj) =>
            {
                dynamic prt          = obj as dynamic;
                var     descriptors1 = prt.descriptors1;
                var     descriptors2 = prt.descriptors2;
                var     keypoints1   = prt.keypoints1;
                var     keypoints2   = prt.keypoints2;
                Mat     gray1        = prt.gray1;
                Mat     gray2        = prt.gray2;
                // Match descriptor vectors
                var      bfMatcher    = new BFMatcher(NormTypes.L2, false);
                var      flannMatcher = new FlannBasedMatcher();
                DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
                DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

                // Draw matches
                var bfView = new Mat();
                Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
                var flannView = new Mat();
                Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);
                return(new { bfView, flannView });
            };

            PostProcessAction = () =>
            {
                Console.WriteLine("V2 terminou de processar!! Tempo gasto: {0}" + Environment.NewLine, ElapsedTime);
            };
        }
Exemple #3
0
        private void MatchBySift(Mat src1, Mat src2)
        {
            using var gray1 = new Mat();
            using var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY);

            using var sift = SIFT.Create();

            // Detect the keypoints and generate their descriptors using SIFT
            using var descriptors1 = new Mat <float>();
            using var descriptors2 = new Mat <float>();
            sift.DetectAndCompute(gray1, null, out var keypoints1, descriptors1);
            sift.DetectAndCompute(gray2, null, out var keypoints2, descriptors2);

            // Match descriptor vectors
            using var bfMatcher    = new BFMatcher(NormTypes.L2, false);
            using var flannMatcher = new FlannBasedMatcher();
            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            using var bfView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            using var flannView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SIFT matching (by BFMather)", bfView))
                using (new Window("SIFT matching (by FlannBasedMatcher)", flannView))
                {
                    Cv2.WaitKey();
                }
        }
        private void MatchBySurf(Mat src1, Mat src2)
        {
            Mat gray1 = new Mat();
            Mat gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            SURF surf = new SURF(500, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            MatOfFloat descriptors1 = new MatOfFloat();
            MatOfFloat descriptors2 = new MatOfFloat();

            surf.Run(gray1, null, out keypoints1, descriptors1);
            surf.Run(gray2, null, out keypoints2, descriptors2);

            // Matching descriptor vectors with a brute force matcher
            BFMatcher matcher = new BFMatcher(NormType.L2, false);

            DMatch[] matches = matcher.Match(descriptors1, descriptors2);

            // Draw matches
            Mat view = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, matches, view);

            using (new Window("SURF matching", WindowMode.AutoSize, view))
            {
                Cv2.WaitKey();
            }
        }
Exemple #5
0
        private Mat MatchBySurf(Mat src1, Mat src2)
        {
            using var gray1 = new Mat();
            using var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY);

            //using var surf = SURF.Create(200, 4, 2, true);
            using var surf = AKAZE.Create();

            // Detect the keypoints and generate their descriptors using SURF
            using var descriptors1 = new Mat <float>();
            using var descriptors2 = new Mat <float>();
            surf.DetectAndCompute(gray1, null, out var keypoints1, descriptors1);
            surf.DetectAndCompute(gray2, null, out var keypoints2, descriptors2);

            // Match descriptor vectors
            using var bfMatcher = new BFMatcher(NormTypes.L2, false);
            DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            var bfView = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView, flags: DrawMatchesFlags.NotDrawSinglePoints);

            return(bfView);
        }
Exemple #6
0
        /// <summary>
        /// SrcとTargetのマッチングを行う。
        /// </summary>
        public void RunMutching()
        {
            // Akazeで特徴抽出
            var akaze            = AKAZE.Create();
            var descriptorSrc    = new Mat();
            var descriptorTarget = new Mat();

            akaze.DetectAndCompute(SrcMat, null, out KeyPtsSrc, descriptorSrc);
            akaze.DetectAndCompute(TargetMat, null, out KeyPtsTarget, descriptorTarget);

            // 総当たりマッチング実行
            var matcher = DescriptorMatcher.Create("BruteForce");
            var matches = matcher.Match(descriptorSrc, descriptorTarget);

            // 結果を昇順にソートし、上位からある割合(UseRate)の結果のみを使用する。
            SelectedMatched = matches
                              .OrderBy(p => p.Distance)
                              .Take((int)(matches.Length * UseRate));

            // SrcとTargetの対応する特徴点を描画する
            Cv2.DrawMatches(
                SrcMat, KeyPtsSrc,
                TargetMat, KeyPtsTarget,
                SelectedMatched, MatchingResultMat);
        }
Exemple #7
0
        private void MatchBySurf(Mat src1, Mat src2)
        {
            using var gray1 = new Mat();
            using var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY);

            using var surf = SURF.Create(200, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            using var descriptors1 = new Mat <float>();
            using var descriptors2 = new Mat <float>();
            surf.DetectAndCompute(gray1, null, out keypoints1, descriptors1);
            surf.DetectAndCompute(gray2, null, out keypoints2, descriptors2);

            // Match descriptor vectors
            using var bfMatcher    = new BFMatcher(NormTypes.L2, false);
            using var flannMatcher = new FlannBasedMatcher();
            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            using var bfView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            using var flannView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SURF matching (by BFMather)", WindowMode.AutoSize, bfView))
                using (new Window("SURF matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView))
                {
                    Cv2.WaitKey();
                }
        }
Exemple #8
0
        public void Run(Mat gray1, Mat gray2, Mat dst2, bool useBFMatcher, int pointsToMatch)
        {
            var sift = SIFT.Create(pointsToMatch);

            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new Mat();
            var        descriptors2 = new Mat();

            sift.DetectAndCompute(gray1, null, out keypoints1, descriptors1);
            sift.DetectAndCompute(gray2, null, out keypoints2, descriptors2);

            if (useBFMatcher)
            {
                var      bfMatcher = new BFMatcher(NormTypes.L2, false);
                DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2);
                Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, dst2);
            }
            else
            {
                var      flannMatcher = new FlannBasedMatcher();
                DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);
                Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, dst2);
            }
            kp1 = keypoints1;
            kp2 = keypoints2;
        }
 private void FindAndDrawMatches()
 {
     using (var template = new Mat("Images\\Circle_Template.bmp", ImreadModes.GrayScale))
     {
         using (var surf = SURF.Create(1000))
         {
             using (var templateDescriptors = new Mat())
             {
                 surf.DetectAndCompute(template, null, out KeyPoint[] templateKeyPoints, templateDescriptors);
                 using (var image = new Mat("Images\\Circle.bmp", ImreadModes.GrayScale))
                 {
                     using (var imageDescriptors = new Mat())
                     {
                         surf.DetectAndCompute(image, null, out KeyPoint[] imageKeyPoints, imageDescriptors);
                         using (var matcher = new BFMatcher())
                         {
                             var matches = matcher.Match(imageDescriptors, templateDescriptors);
                             using (var overLay = image.Overlay())
                             {
                                 Cv2.DrawMatches(image, imageKeyPoints, template, templateKeyPoints, matches, overLay);
                                 this.Result.Source = overLay.ToBitmapSource();
                             }
                         }
                     }
                 }
             }
         }
     }
 }
Exemple #10
0
        public void Run(Mat gray1, Mat gray2, Mat dst, int hessianThreshold, bool useBFMatcher)
        {
            var surf = SURF.Create(hessianThreshold, 4, 2, true);

            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new Mat();
            var        descriptors2 = new Mat();

            surf.DetectAndCompute(gray1, null, out keypoints1, descriptors1);
            surf.DetectAndCompute(gray2, null, out keypoints2, descriptors2);

            if (useBFMatcher)
            {
                if (descriptors1.Rows > 0 && descriptors2.Rows > 0) // occasionally there is nothing to match!
                {
                    var      bfMatcher = new BFMatcher(NormTypes.L2, false);
                    DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2);
                    Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, dst);
                }
            }
            else
            {
                var flannMatcher = new FlannBasedMatcher();
                if (descriptors1.Width > 0 && descriptors2.Width > 0)
                {
                    DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);
                    Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, dst);
                }
            }
        }
        void OnFast()
        {
            Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_01.jpg");
            Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_02.jpg");

            Mat image1 = new Mat(), image2 = new Mat();

            Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY);
            Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY);
            KeyPoint[] keyPoint1 = Cv2.FAST(image1, 50, true);
            KeyPoint[] keyPoint2 = Cv2.FAST(image2, 50, true);
            using (Mat descriptor1 = new Mat())
                using (Mat descriptor2 = new Mat())
                    using (var orb = ORB.Create(50))
                        using (var matcher = new BFMatcher())
                        {
                            orb.Compute(image1, ref keyPoint1, descriptor1);
                            orb.Compute(image2, ref keyPoint2, descriptor2);
                            Debug.Log(string.Format("keyPoints has {0},{1} items.", keyPoint1.Length, keyPoint2.Length));
                            Debug.Log(string.Format("descriptor has {0},{1} items.", descriptor1.Rows, descriptor2.Rows));

                            List <DMatch> goodMatchePoints = new List <DMatch>();
                            var           dm = matcher.KnnMatch(descriptor1, descriptor2, 2);

                            #region matched 175
                            for (int i = 0; i < dm.Length; i++)
                            {
                                if (dm[i][0].Distance < 0.6 * dm[i][1].Distance)
                                {
                                    goodMatchePoints.Add(dm[i][0]);
                                }
                            }
                            #endregion

                            #region matched 90
                            float minRatio = 1.0f / 1.5f;
                            for (int i = 0; i < dm.Length; i++)
                            {
                                DMatch bestMatch     = dm[i][0];
                                DMatch betterMatch   = dm[i][1];
                                float  distanceRatio = bestMatch.Distance / betterMatch.Distance;
                                if (distanceRatio < minRatio)
                                {
                                    goodMatchePoints.Add(bestMatch);
                                }
                            }
                            #endregion

                            var dstMat = new Mat();
                            Debug.Log(string.Format("matchePoints has {0} items", goodMatchePoints.Count));
                            Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, goodMatchePoints, dstMat);
                            t2d = Utils.MatToTexture2D(dstMat);
                        }

            Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

            SrcSprite.sprite = dst_sp;
        }
        static Mat MatcheWindow(Mat template,
                                double gammaPower,
                                double distanceThreshold)
        {
            using var source = new Mat();
            // カメラ画像取り込み
            camera.Read(source);
            //Cv2.Resize(source, source, new Size(2592, 1944));
            Cv2.Rotate(source, source, RotateFlags.Rotate90Clockwise);
            using var trim = source;//.SubMat(new Rect(20, 195, 1880, 873));

            // ガンマ補正
            using var gammaDst = AdjustGamma(trim, gammaPower);

            // テンプレート画像の特徴量計算
            (var tempKey, var tempDesc) = FeatureCommand(template);
            // カメラ画像の特徴量計算
            (var srcKey, var srcDesc) = FeatureCommand(gammaDst);

            // 特徴点マッチャー
            DescriptorMatcher matcher = DescriptorMatcher.Create("BruteForce");

            // 特徴量マッチング 上位2位
            DMatch[][] matches = matcher.KnnMatch(tempDesc, srcDesc, 2);

            // 閾値で対応点を絞り込む
            List <DMatch>  goodMatches;
            List <Point2f> goodTemplateKeyPoints, goodSourceKeyPoints;

            (goodMatches, goodTemplateKeyPoints, goodSourceKeyPoints) =
                FilterMatchGoodScore(tempKey, srcKey, matches, distanceThreshold);
            Console.WriteLine("matches: {0},goodMatches: {1}", matches.Length, goodMatches.Count);

            // ロバスト推定してホモグラフィーを算出する
            Mat homoGraphy = LookupHomoGraphy(goodTemplateKeyPoints, goodSourceKeyPoints);

            // 対象物体画像からコーナーを取得する
            var cornerPoints = LookupCornerFromTargetObjectImage(template, homoGraphy);
            var rect         = new Rect(cornerPoints[0].X,
                                        cornerPoints[0].Y,
                                        template.Width / 2,
                                        template.Height / 2);

            // 枠描画
            gammaDst.Rectangle(rect, Scalar.Pink, 3);


            //マッチングした特徴量同士を線でつなぐ
            using var output = new Mat();
            Cv2.DrawMatches(template, tempKey, gammaDst, srcKey, goodMatches, output);
            var output2 = new Mat();

            Cv2.Resize(output, output2, new Size(), 0.5, 0.5);

            return(output2);
        }
Exemple #13
0
        public FeatureMatching(Mat src, Mat target)
        {
            // 画像初期化
            SrcMat    = src.Clone();
            TargetMat = target.Clone();
            ResultMat = new Mat();

            // 重心初期化
            PtSrc    = new System.Drawing.PointF(0.0f, 0.0f);
            PtTarget = new System.Drawing.PointF(0.0f, 0.0f);

            // 特徴点抽出
            var akaze            = AKAZE.Create();
            var descriptorSrc    = new Mat();
            var descriptorTarget = new Mat();

            akaze.DetectAndCompute(SrcMat, null, out KeyPtsSrc, descriptorSrc);
            akaze.DetectAndCompute(TargetMat, null, out KeyPtsTarget, descriptorTarget);

            // マッチング実行
            var matcher = DescriptorMatcher.Create("BruteForce");
            var matches = matcher.Match(descriptorSrc, descriptorTarget);

            // 結果を昇順にソートし、上位半分の結果を使用する。
            var selectedMatches = matches
                                  .OrderBy(p => p.Distance)
                                  //.Take(matches.Length / 2);
                                  .Take(1);

            // Src - Target 対応画像作成
            Cv2.DrawMatches(SrcMat, KeyPtsSrc, TargetMat, KeyPtsTarget, selectedMatches, ResultMat);

            // 特徴点の重心を求める (Src)
            foreach (var item in selectedMatches)
            {
                int idx = item.QueryIdx;
                PtSrc.X += KeyPtsSrc[idx].Pt.X;
                PtSrc.Y += KeyPtsSrc[idx].Pt.Y;
            }
            PtSrc.X /= (float)selectedMatches.Count();
            PtSrc.Y /= (float)selectedMatches.Count();

            // 特徴点の重心を求める (Target)
            foreach (var item in selectedMatches)
            {
                int idx = item.TrainIdx;
                PtTarget.X += KeyPtsTarget[idx].Pt.X;
                PtTarget.Y += KeyPtsTarget[idx].Pt.Y;
            }
            PtTarget.X /= (float)selectedMatches.Count();
            PtTarget.Y /= (float)selectedMatches.Count();
        }
Exemple #14
0
        public static List <System.Drawing.Point> func(Bitmap bitmap1, Bitmap bitmap2)
        {
            //Mat img1 = new Mat(@"roll/0.png", ImreadModes.Unchanged);
            //Mat img2 = new Mat(@"roll/1.png", ImreadModes.Unchanged);
            Mat  img1 = BitmapToMat(bitmap1);
            Mat  img2 = BitmapToMat(bitmap2);
            SIFT sift = SIFT.Create(20);

            //KeyPoint[] k = sift.Detect(img1);
            // Detect the keypoints and generate their descriptors using SIFT
            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new Mat <float>();
            var        descriptors2 = new Mat <float>();

            sift.DetectAndCompute(img1, null, out keypoints1, descriptors1);
            sift.DetectAndCompute(img2, null, out keypoints2, descriptors2);

            // Match descriptor vectors
            var bfMatcher    = new BFMatcher(NormTypes.L2, false);
            var flannMatcher = new FlannBasedMatcher();

            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            var bfView = new Mat();

            Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, bfMatches, bfView);
            var flannView = new Mat();

            Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, flannMatches, flannView);

            using (new Window("SIFT matching (by BFMather)", bfView))
                using (new Window("SIFT matching (by FlannBasedMatcher)", flannView))
                {
                    Cv2.WaitKey();
                }
            List <System.Drawing.Point> points = new List <System.Drawing.Point>();

            foreach (DMatch match in bfMatches)
            {
                System.Drawing.Point p = new System.Drawing.Point();
                p.X = (int)(keypoints1[match.QueryIdx].Pt.X - keypoints2[match.TrainIdx].Pt.X);
                p.Y = (int)(keypoints1[match.QueryIdx].Pt.Y - keypoints2[match.TrainIdx].Pt.Y);
                points.Add(p);
            }

            return(points);
        }
Exemple #15
0
        static void Main(string[] args)
        {
            var img1 = new Mat(@"..\..\Images\left.png", ImreadModes.GrayScale);

            Cv2.ImShow("Left", img1);
            Cv2.WaitKey(1); // do events

            var img2 = new Mat(@"..\..\Images\right.png", ImreadModes.GrayScale);

            Cv2.ImShow("Right", img2);
            Cv2.WaitKey(1); // do events


            // detecting keypoints
            // FastFeatureDetector, StarDetector, SIFT, SURF, ORB, BRISK, MSER, GFTTDetector, DenseFeatureDetector, SimpleBlobDetector
            // SURF = Speeded Up Robust Features
            var detector   = SURF.Create(hessianThreshold: 400); //A good default value could be from 300 to 500, depending from the image contrast.
            var keypoints1 = detector.Detect(img1);
            var keypoints2 = detector.Detect(img2);

            // computing descriptors, BRIEF, FREAK
            // BRIEF = Binary Robust Independent Elementary Features
            var extractor    = BriefDescriptorExtractor.Create();
            var descriptors1 = new Mat();
            var descriptors2 = new Mat();

            extractor.Compute(img1, ref keypoints1, descriptors1);
            extractor.Compute(img2, ref keypoints2, descriptors2);

            // matching descriptors
            var matcher = new BFMatcher();
            var matches = matcher.Match(descriptors1, descriptors2);

            // drawing the results
            var imgMatches = new Mat();

            Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches);
            Cv2.ImShow("Matches", imgMatches);
            Cv2.WaitKey(1); // do events


            Cv2.WaitKey(0);

            Cv2.DestroyAllWindows();
            img1.Dispose();
            img2.Dispose();
        }
Exemple #16
0
        private void image_maatching(Mat img1, Mat img2)
        {
            Cv2.ImShow("Matches1", img1);
            Cv2.ImShow("Matches2", img2);

            var detector = SURF.Create(hessianThreshold: 300, 4, 2, true, false); //A good default value could be from 300 to 500, depending from the image contrast.

            KeyPoint[] keypoints1 = null;
            KeyPoint[] keypoints2 = null;

            Mat descriptors1 = new Mat();
            Mat descriptors2 = new Mat();

            detector.DetectAndCompute(img1, null, out keypoints1, descriptors1);
            detector.DetectAndCompute(img2, null, out keypoints2, descriptors2);

            var matcher = new BFMatcher();
            var matches = matcher.Match(descriptors1, descriptors2);

            float max_dist        = 50;
            int   cntSuccessPoint = 0;

            for (int i = 0; i < matches.Length; i++)
            {
                log_write("matches[i].Distance:" + Convert.ToString(max_dist) + "--" + Convert.ToString(matches[i].Distance));
                if ((matches[i].Distance * 100) < max_dist)
                {
                    cntSuccessPoint = cntSuccessPoint + 1;
                }
            }// end for

            double rate = (cntSuccessPoint * 100) / matches.Length;

            log_write("유사율:" + Convert.ToString(rate) + "---" + Convert.ToString(cntSuccessPoint) + "/" + Convert.ToString(matches.Length));

            var imgMatches = new Mat();

            Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches);
            Cv2.ImShow("Matches3", imgMatches);
        }
Exemple #17
0
        private void MatchBySurf(Mat src1, Mat src2)
        {
            var gray1 = new Mat();
            var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            var surf = new SURF(500, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new MatOfFloat();
            var        descriptors2 = new MatOfFloat();

            surf.Run(gray1, null, out keypoints1, descriptors1);
            surf.Run(gray2, null, out keypoints2, descriptors2);

            // Match descriptor vectors
            var bfMatcher    = new BFMatcher(NormType.L2, false);
            var flannMatcher = new FlannBasedMatcher();

            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            var bfView = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            var flannView = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SURF matching (by BFMather)", WindowMode.AutoSize, bfView))
                using (new Window("SURF matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView))
                {
                    Cv2.WaitKey();
                }
        }
Exemple #18
0
        public void siftcharacterors(Mat src1, Mat src2)
        {
            Mat gray1 = new Mat();
            Mat gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY);

            var siftdemo = SIFT.Create();

            //寻找特征点
            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new MatOfFloat();
            var        descriptors2 = new MatOfFloat();

            siftdemo.DetectAndCompute(gray1, null, out keypoints1, descriptors1);
            siftdemo.DetectAndCompute(gray2, null, out keypoints2, descriptors2);

            var bfMatcher    = new BFMatcher(NormTypes.L2, false);
            var flannMatcher = new FlannBasedMatcher();

            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            var bfView = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            var flannView = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SIFT matching (by BFMather)", WindowMode.AutoSize, bfView))
            //using (new Window("SIFT matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView))
            {
                Cv2.WaitKey();
            }
        }
Exemple #19
0
    /// <summary>
    /// Orb特征提取
    /// </summary>
    void OnOrb()
    {
        Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/Textures/p1.jpg");
        Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + "/Textures/p2.jpg");

        //灰度图转换
        Mat image1 = new Mat(), image2 = new Mat();

        Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY);
        Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY);

        KeyPoint[] keyPoint1 = null;
        KeyPoint[] keyPoint2 = null;
        using (ORB orb = ORB.Create(500))
            using (Mat descriptor1 = new Mat())
                using (Mat descriptor2 = new Mat())
                    using (var matcher = new BFMatcher())
                    {
                        //特征点提取并计算
                        orb.DetectAndCompute(image1, new Mat(), out keyPoint1, descriptor1);
                        orb.DetectAndCompute(image2, new Mat(), out keyPoint2, descriptor2);
                        Debug.Log($"keyPoints has {keyPoint1.Length},{keyPoint2.Length} items.");
                        Debug.Log($"descriptor has {descriptor1.Rows},{descriptor2.Rows} items.");

                        //特征点匹配
                        DMatch[] matchePoints = null;
                        matchePoints = matcher.Match(descriptor1, descriptor2);

                        dstMat = new Mat();
                        Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, matchePoints, dstMat);
                        t2d = Utils.MatToTexture2D(dstMat);
                    }

        Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_srcImage.sprite         = dst_sp;
        m_srcImage.preserveAspect = true;
    }
Exemple #20
0
        private static void Surf(IplImage img1, IplImage img2)
        {
            Mat src  = new Mat(img1, true);
            Mat src2 = new Mat(img2, true);
            //Detect the keypoints and generate their descriptors using SURF
            SURF surf = new SURF(500, 4, 2, true);

            KeyPoint[] keypoints1, keypoints2;
            MatOfFloat descriptors1 = new MatOfFloat();
            MatOfFloat descriptors2 = new MatOfFloat();

            surf.Run(src, null, out keypoints1, descriptors1);
            surf.Run(src2, null, out keypoints2, descriptors2);
            // Matching descriptor vectors with a brute force matcher
            BFMatcher matcher = new BFMatcher(NormType.L2, false);

            DMatch[] matches = matcher.Match(descriptors1, descriptors2);//例外が発生する箇所
            Mat      view    = new Mat();

            Cv2.DrawMatches(src, keypoints1, src2, keypoints2, matches, view);

            Window.ShowImages(view);
        }
Exemple #21
0
        /// <summary>
        ///     预览匹配结果(仅在开启了<see cref="MatchArgument.PreviewMatchResult"/>配置时)
        /// </summary>
        /// <param name="argument"></param>
        /// <param name="matchResult"></param>
        /// <param name="sourceMat"></param>
        /// <param name="searchMat"></param>
        /// <param name="keySourcePoints"></param>
        /// <param name="keySearchPoints"></param>
        /// <param name="goodMatches"></param>
        internal static void PreviewDebugFeatureMatchResult(this MatchArgument argument, FeatureMatchResult matchResult, Mat sourceMat, Mat searchMat,
                                                            IEnumerable <KeyPoint> keySourcePoints, IEnumerable <KeyPoint> keySearchPoints,
                                                            IEnumerable <DMatch> goodMatches)
        {
            if (!argument.IsExtensionConfigEnabled(MatchArgument.PreviewMatchResult))
            {
                return;
            }

            using var image = new Mat(sourceMat, OpenCvSharp.Range.All);
            if (matchResult.Success)
            {
                foreach (var matchItem in matchResult.MatchItems)
                {
                    var rectangle = matchItem.Rectangle;
                    Cv2.Rectangle(image, new Point(rectangle.X, rectangle.Y), new Point(rectangle.Right, rectangle.Bottom), Scalar.RandomColor(), 3);
                }
            }

            using var imgMatch = new Mat();
            Cv2.DrawMatches(image, keySourcePoints, searchMat, keySearchPoints, goodMatches, imgMatch, flags: DrawMatchesFlags.NotDrawSinglePoints);
            PreviewMatchResultImage(imgMatch);
        }
        static void Main(string[] args)
        {
            var _videoCapture = new VideoCapture(1);

            var referenceTime = DateTime.Now;
            var cnt           = 0;

            while (true)
            {
                /*
                 * 通常の撮像は 1600 * 1200 Fps 5
                 * 処理速度の都合
                 * テンプレート画像の撮像は 2592 * 1944 Fps 2(FA 2592 * 1944 Fps 6)
                 */
                var frameWidth  = 2592;
                var frameHeight = 1944;
                var fps         = 6;
                if (_videoCapture.Fps != fps)
                {
                    _videoCapture.Fps = fps;
                }
                if (_videoCapture.FrameWidth != frameWidth)
                {
                    _videoCapture.FrameWidth = frameWidth;
                }
                if (_videoCapture.FrameHeight != frameHeight)
                {
                    _videoCapture.FrameHeight = frameHeight;
                }

                cnt++;
                Console.WriteLine("設定書き込み {0}回目 {1}秒", cnt, (DateTime.Now - referenceTime).TotalSeconds);

                if (_videoCapture.FrameWidth == frameWidth && _videoCapture.FrameHeight == frameHeight && Math.Abs(_videoCapture.Fps - fps) < 1)
                {
                    break;
                }

                if ((DateTime.Now - referenceTime).TotalSeconds > 30)
                {
                    throw new TimeoutException("videoCaptureのフレームサイズの設定がタイムアウトしました");
                }
            }
            int posMsec = (int)(1000 * 1 / _videoCapture.Fps);

            _videoCapture.Set(VideoCaptureProperties.PosMsec, posMsec);

            // 特徴点マッチャー
            DescriptorMatcher matcher = DescriptorMatcher.Create("BruteForce");

            // 特徴量検出アルゴリズム
            using var _temp = new Mat("escalator1st_20201119115525242.bmp");
            // 特徴量計算
            (var _temp_keypoint, var _temp_featureImage) = FeatureCommand(_temp);

            while (true)
            {
                using var frame = new Mat();
                _videoCapture.Read(frame);

                // 特徴量計算
                (var _frame_keypoint, var _frame_descriptor) = FeatureCommand(frame);

                // 特徴量マッチング 上位2位
                DMatch[][] matches = matcher.KnnMatch(_temp_featureImage, _frame_descriptor, 2);

                // 閾値で対応点を絞り込む
                List <DMatch>  goodMatches;
                List <Point2f> goodTemplateKeyPoints, goodSourceKeyPoints;
                (goodMatches, goodTemplateKeyPoints, goodSourceKeyPoints) =
                    FilterMatchGoodScore(_temp_keypoint, _frame_keypoint, matches);

                //マッチングした特徴量同士を線でつなぐ
                Mat output3 = new Mat();
                Cv2.DrawMatches(_temp, _temp_keypoint, frame, _frame_keypoint, goodMatches, output3);

                Cv2.ImShow("feature", output3);
                Cv2.WaitKey();
            }
        }
Exemple #23
0
        //img1:test image; img2:ref img
        public float MatchTemplate(Mat img1, Mat img2, bool ishowImageMatchTemplate, string s = "Match")
        {
            float matchRate = 0.0f;

            using (var descriptors1 = new Mat())
                using (var descriptors2 = new Mat())
                    using (var matcher = new BFMatcher(NormTypes.L2SQR))
                        using (var kaze = KAZE.Create())
                        {
                            KeyPoint[] keypoints1, keypoints2;
                            kaze.DetectAndCompute(img1, null, out keypoints1, descriptors1);
                            kaze.DetectAndCompute(img2, null, out keypoints2, descriptors2);

                            DMatch[][] matches = matcher.KnnMatch(descriptors1, descriptors2, 2);
                            using (Mat mask = new Mat(matches.Length, 1, MatType.CV_8U))
                            {
                                mask.SetTo(new Scalar(255));
                                int nonZero = Cv2.CountNonZero(mask);
                                VoteForUniqueness(matches, mask);
                                nonZero = Cv2.CountNonZero(mask);
                                nonZero = VoteForSizeAndOrientation(keypoints2, keypoints1, matches, mask, 1.5f, 20);

                                List <Point2f> obj             = new List <Point2f>();
                                List <Point2f> scene           = new List <Point2f>();
                                List <DMatch>  goodMatchesList = new List <DMatch>();
                                //iterate through the mask only pulling out nonzero items because they're matches
                                for (int i = 0; i < mask.Rows; i++)
                                {
                                    MatIndexer <byte> maskIndexer = mask.GetGenericIndexer <byte>();
                                    if (maskIndexer[i] > 0)
                                    {
                                        obj.Add(keypoints1[matches[i][0].QueryIdx].Pt);
                                        scene.Add(keypoints2[matches[i][0].TrainIdx].Pt);
                                        goodMatchesList.Add(matches[i][0]);
                                    }
                                }

                                List <Point2d> objPts   = obj.ConvertAll(Point2fToPoint2d);
                                List <Point2d> scenePts = scene.ConvertAll(Point2fToPoint2d);
                                if (nonZero >= 4)
                                {
                                    Mat homography = Cv2.FindHomography(objPts, scenePts, HomographyMethods.Ransac, 1.5, mask);
                                    nonZero = Cv2.CountNonZero(mask);

                                    //calculate match rate by how many match points exist
                                    //matchRate = (float)nonZero / keypoints2.Count();
                                    matchRate = 1 - (float)(keypoints2.Count() - nonZero) / (keypoints2.Count() + nonZero);

                                    if (homography != null && ishowImageMatchTemplate == true)
                                    {
                                        Point2f[] objCorners = { new Point2f(0,                 0),
                                                                 new Point2f(img1.Cols,         0),
                                                                 new Point2f(img1.Cols, img1.Rows),
                                                                 new Point2f(0,         img1.Rows) };

                                        Point2d[] sceneCorners = MyPerspectiveTransform3(objCorners, homography);

                                        //This is a good concat horizontal
                                        using (Mat img3 = new Mat(Math.Max(img1.Height, img2.Height), img2.Width + img1.Width, MatType.CV_8UC3))
                                            using (Mat left = new Mat(img3, new Rect(0, 0, img1.Width, img1.Height)))
                                                using (Mat right = new Mat(img3, new Rect(img1.Width, 0, img2.Width, img2.Height)))
                                                {
                                                    img1.CopyTo(left);
                                                    img2.CopyTo(right);

                                                    byte[] maskBytes = new byte[mask.Rows * mask.Cols];
                                                    mask.GetArray(0, 0, maskBytes);
                                                    Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, goodMatchesList, img3, Scalar.All(-1), Scalar.All(-1), maskBytes, DrawMatchesFlags.NotDrawSinglePoints);


                                                    //List<List<Point>> listOfListOfPoint2D = new List<List<Point>>();
                                                    //List<Point> listOfPoint2D = new List<Point>();
                                                    //listOfPoint2D.Add(new Point(sceneCorners[0].X + img1.Cols, sceneCorners[0].Y));
                                                    //listOfPoint2D.Add(new Point(sceneCorners[1].X + img1.Cols, sceneCorners[1].Y));
                                                    //listOfPoint2D.Add(new Point(sceneCorners[2].X + img1.Cols, sceneCorners[2].Y));
                                                    //listOfPoint2D.Add(new Point(sceneCorners[3].X + img1.Cols, sceneCorners[3].Y));
                                                    //listOfListOfPoint2D.Add(listOfPoint2D);
                                                    //img3.Polylines(listOfListOfPoint2D, true, Scalar.LimeGreen, 2);


                                                    Cv2.ImShow(s, img3.Resize(new Size(img3.Rows / 2, img3.Cols / 2)));
                                                    Cv2.WaitKey(0);
                                                    Cv2.DestroyWindow(s);

                                                    //Window.ShowImages(img3.Resize(new Size(img3.Rows / 2, img3.Cols / 2)));
                                                    //Window.WaitKey(0);
                                                    //Window.DestroyAllWindows();
                                                }
                                    }
                                }
                            }
                        }

            return(matchRate);
        }
Exemple #24
0
        private void bn_Match_Click(object sender, RoutedEventArgs e)
        {
            if (listImage.Count > 0)
            {
                SubWindow.Win_Matching win = new SubWindow.Win_Matching(listImage);
                if (win.ShowDialog() == true)
                {
                    int    mode     = win.cb_Mode.SelectedIndex;
                    int    idxSrc   = win.cb_Src.SelectedIndex;
                    int    idxTmpl  = win.cb_Tmpl.SelectedIndex;
                    string strTitle = listImage[_nSelWin].Title;
                    Mat    matSrc   = listImage[idxSrc].fn_GetImage();
                    Mat    matTmpl  = listImage[idxTmpl].fn_GetImage();
                    Mat    matDst   = new Mat();
                    int    width    = matSrc.Cols;
                    int    height   = matSrc.Rows;
                    timeStart = DateTime.Now;

                    if (mode == 0)// Template
                    {
                        Mat matResult = new Mat();
                        Cv2.MatchTemplate(matSrc, matTmpl, matResult, TemplateMatchModes.SqDiffNormed);

                        OpenCvSharp.Point matchLoc = new OpenCvSharp.Point();
                        unsafe
                        {
                            float *pData = (float *)matResult.DataPointer;
                            float  fMin  = 1.0f;
                            for (int stepY = 0; stepY < matResult.Rows; stepY++)
                            {
                                for (int stepX = 0; stepX < matResult.Cols; stepX++)
                                {
                                    if (fMin >= pData[stepY * matResult.Cols + stepX])
                                    {
                                        fMin       = pData[stepY * matResult.Cols + stepX];
                                        matchLoc.X = stepX;
                                        matchLoc.Y = stepY;
                                    }
                                }
                            }
                        }


                        matDst = matSrc.Clone();
                        Cv2.CvtColor(matDst, matDst, ColorConversionCodes.GRAY2BGR);

                        Cv2.Rectangle(matDst, new OpenCvSharp.Rect(matchLoc.X, matchLoc.Y, matTmpl.Cols, matTmpl.Rows), new Scalar(0, 255, 0));
                    }
                    else if (mode == 1)// SIFT
                    {
                        OpenCvSharp.Features2D.SIFT detector = OpenCvSharp.Features2D.SIFT.Create();
                        KeyPoint[] keypoint1, keypoint2;
                        Mat        matDescriptor1 = new Mat();
                        Mat        matDescriptor2 = new Mat();
                        detector.DetectAndCompute(matTmpl, new Mat(), out keypoint1, matDescriptor1);
                        detector.DetectAndCompute(matSrc, new Mat(), out keypoint2, matDescriptor2);
                        BFMatcher matcher  = new BFMatcher();
                        DMatch[]  dMatches = matcher.Match(matDescriptor1, matDescriptor2);
                        if (dMatches.Length > 0)
                        {
                            int      GOOD  = Math.Min(50, (int)(dMatches.Length * 0.1));
                            DMatch[] dGood = new DMatch[GOOD];
                            for (int step = 0; step < GOOD; step++)
                            {
                                dGood[step] = new DMatch();
                                dGood[step] = dMatches[step];
                            }

                            Cv2.DrawMatches(matTmpl, keypoint1, matSrc, keypoint2, dGood, matDst, Scalar.All(-1), Scalar.All(-1), new List <byte>(), DrawMatchesFlags.NotDrawSinglePoints);
                        }
                    }
                    else if (mode == 2)// SURF
                    {
                        OpenCvSharp.XFeatures2D.SURF detector = OpenCvSharp.XFeatures2D.SURF.Create(800);
                        KeyPoint[] keypoint1, keypoint2;
                        Mat        matDescriptor1 = new Mat();
                        Mat        matDescriptor2 = new Mat();
                        detector.DetectAndCompute(matTmpl, new Mat(), out keypoint1, matDescriptor1);
                        detector.DetectAndCompute(matSrc, new Mat(), out keypoint2, matDescriptor2);
                        BFMatcher matcher  = new BFMatcher();
                        DMatch[]  dMatches = matcher.Match(matDescriptor1, matDescriptor2);
                        if (dMatches.Length > 0)
                        {
                            int      GOOD  = Math.Min(50, (int)(dMatches.Length * 0.1));
                            DMatch[] dGood = new DMatch[GOOD];
                            for (int step = 0; step < GOOD; step++)
                            {
                                dGood[step] = new DMatch();
                                dGood[step] = dMatches[step];
                            }

                            Cv2.DrawMatches(matTmpl, keypoint1, matSrc, keypoint2, dGood, matDst, Scalar.All(-1), Scalar.All(-1), new List <byte>(), DrawMatchesFlags.NotDrawSinglePoints);
                        }
                    }

                    fn_WriteLog($"[Matching] {strTitle} ({(DateTime.Now - timeStart).TotalMilliseconds} ms)");
                    fn_NewImage(matDst, $"Matching {mode}");
                }
            }
        }
Exemple #25
0
        public void FindContours(string sLeftPictureFile, string sRightPictureFile)
        {
            Mat tokuLeft  = new Mat();
            Mat tokuRight = new Mat();
            Mat output    = new Mat();

            AKAZE akaze = AKAZE.Create();

            KeyPoint[] keyPointsLeft;
            KeyPoint[] keyPointsRight;

            Mat descriptorLeft  = new Mat();
            Mat descriptorRight = new Mat();

            DescriptorMatcher matcher; //マッチング方法

            DMatch[] matches;          //特徴量ベクトル同士のマッチング結果を格納する配列

            //画像をグレースケールとして読み込み、平滑化する
            Mat Lsrc = new Mat(sLeftPictureFile, ImreadModes.Color);

            //画像をグレースケールとして読み込み、平滑化する
            Mat Rsrc = new Mat(sRightPictureFile, ImreadModes.Color);

            //特徴量の検出と特徴量ベクトルの計算
            akaze.DetectAndCompute(Lsrc, null, out keyPointsLeft, descriptorLeft);
            akaze.DetectAndCompute(Rsrc, null, out keyPointsRight, descriptorRight);


            //画像1の特徴点をoutput1に出力
            Cv2.DrawKeypoints(Lsrc, keyPointsLeft, tokuLeft);
            Image imageLeftToku = BitmapConverter.ToBitmap(tokuLeft);

            pictureBox3.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox3.Image    = imageLeftToku;
            tokuLeft.SaveImage("result/LeftToku.jpg");



            //画像2の特徴点をoutput1に出力
            Cv2.DrawKeypoints(Rsrc, keyPointsRight, tokuRight);
            Image imageRightToku = BitmapConverter.ToBitmap(tokuRight);

            pictureBox4.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox4.Image    = imageRightToku;
            tokuRight.SaveImage("result/RightToku.jpg");

            //総当たりマッチング
            matcher = DescriptorMatcher.Create("BruteForce");
            matches = matcher.Match(descriptorLeft, descriptorRight);

            Cv2.DrawMatches(Lsrc, keyPointsLeft, Rsrc, keyPointsRight, matches, output);
            output.SaveImage(@"result\output.jpg");

            int size         = matches.Count();
            var getPtsSrc    = new Vec2f[size];
            var getPtsTarget = new Vec2f[size];

            int count = 0;

            foreach (var item in matches)
            {
                var ptSrc    = keyPointsLeft[item.QueryIdx].Pt;
                var ptTarget = keyPointsRight[item.TrainIdx].Pt;
                getPtsSrc[count][0]    = ptSrc.X;
                getPtsSrc[count][1]    = ptSrc.Y;
                getPtsTarget[count][0] = ptTarget.X;
                getPtsTarget[count][1] = ptTarget.Y;
                count++;
            }

            // SrcをTargetにあわせこむ変換行列homを取得する。ロバスト推定法はRANZAC。
            var hom = Cv2.FindHomography(
                InputArray.Create(getPtsSrc),
                InputArray.Create(getPtsTarget),
                HomographyMethods.Ransac);

            // 行列homを用いてSrcに射影変換を適用する。
            Mat WarpedSrcMat = new Mat();

            Cv2.WarpPerspective(
                Lsrc, WarpedSrcMat, hom,
                new OpenCvSharp.Size(Rsrc.Width, Rsrc.Height));

            WarpedSrcMat.SaveImage(@"result\Warap.jpg");

            //画像1の特徴点をoutput1に出力
            Image imageLeftSyaei = BitmapConverter.ToBitmap(WarpedSrcMat);

            pictureBox5.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox5.Image    = imageLeftSyaei;


            //画像2の特徴点をoutput1に出力
            Image imageRightSyaei = BitmapConverter.ToBitmap(Rsrc);

            pictureBox6.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox6.Image    = imageRightSyaei;


            Mat LmatFloat = new Mat();

            WarpedSrcMat.ConvertTo(LmatFloat, MatType.CV_16SC3);
            Mat[] LmatPlanes = LmatFloat.Split();

            Mat RmatFloat = new Mat();

            Rsrc.ConvertTo(RmatFloat, MatType.CV_16SC3);
            Mat[] RmatPlanes = RmatFloat.Split();

            Mat diff0 = new Mat();
            Mat diff1 = new Mat();
            Mat diff2 = new Mat();


            Cv2.Absdiff(LmatPlanes[0], RmatPlanes[0], diff0);
            Cv2.Absdiff(LmatPlanes[1], RmatPlanes[1], diff1);
            Cv2.Absdiff(LmatPlanes[2], RmatPlanes[2], diff2);

            Cv2.MedianBlur(diff0, diff0, 5);
            Cv2.MedianBlur(diff1, diff1, 5);
            Cv2.MedianBlur(diff2, diff2, 5);

            diff0.SaveImage("result/diff0.jpg");
            diff1.SaveImage("result/diff1.jpg");
            diff2.SaveImage("result/diff2.jpg");

            Mat wiseMat = new Mat();

            Cv2.BitwiseOr(diff0, diff1, wiseMat);
            Cv2.BitwiseOr(wiseMat, diff2, wiseMat);

            wiseMat.SaveImage("result/wiseMat.jpg");

            Mat openingMat = new Mat();

            Cv2.MorphologyEx(wiseMat, openingMat, MorphTypes.Open, new Mat());

            Mat dilationMat = new Mat();

            Cv2.Dilate(openingMat, dilationMat, new Mat());
            Cv2.Threshold(dilationMat, dilationMat, 100, 255, ThresholdTypes.Binary);
            dilationMat.SaveImage(@"result\dilationMat.jpg");

            Mat LaddMat = new Mat();
            Mat RaddMat = new Mat();

            Console.WriteLine(dilationMat.GetType());
            Console.WriteLine(Rsrc.GetType());

            // dilationMatはグレースケールなので合成先のMatと同じ色空間に変換する
            Mat dilationScaleMat = new Mat();
            Mat dilationColorMat = new Mat();

            Cv2.ConvertScaleAbs(dilationMat, dilationScaleMat);
            Cv2.CvtColor(dilationScaleMat, dilationColorMat, ColorConversionCodes.GRAY2RGB);

            Cv2.AddWeighted(WarpedSrcMat, 0.3, dilationColorMat, 0.7, 0, LaddMat);
            Cv2.AddWeighted(Rsrc, 0.3, dilationColorMat, 0.7, 0, RaddMat);

            Image LaddImage = BitmapConverter.ToBitmap(LaddMat);

            pictureBox7.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox7.Image    = LaddImage;

            Image RaddImage = BitmapConverter.ToBitmap(RaddMat);

            pictureBox8.SizeMode = PictureBoxSizeMode.Zoom;
            pictureBox8.Image    = RaddImage;

            RaddMat.SaveImage(@"result\Result.jpg");

            MessageBox.Show("Done!");
        }
Exemple #26
0
        private void стартToolStripMenuItem_Click(object sender, EventArgs e)
        {
            try
            {
                labelstrip.Text = "Получение изображения";


                // берем изображение из PictureBox и переводим bitmap в Mat
                Bitmap image1b = (Bitmap)objectPictureBox.Image;
                Bitmap image2b = (Bitmap)scenePictureBox.Image;
                Mat    image1  = BitmapConverter.ToMat(image1b);
                Mat    image2  = BitmapConverter.ToMat(image2b);

                //создаем SURF переменную
                SURF surf = new SURF(form.getThreshold(), form.getOctaves(), form.getOctavesLayer(), form.getDecriptors(), false);

                labelstrip.Text = "Нахождение особых точек и их дескриптеров";

                //создаем по 2 переменных для записи ключевых точек и дескриптеров
                Mat        descriptors1 = new Mat();
                Mat        descriptors2 = new Mat();
                KeyPoint[] points1, points2;

                //находим особые точки
                points1 = surf.Detect(image1);
                points2 = surf.Detect(image2);


                //нахождение дескрипторов точек
                surf.Compute(image1, ref points1, descriptors1);
                surf.Compute(image2, ref points2, descriptors2);

                //матчим массивы дескрипторов
                FlannBasedMatcher matcher = new FlannBasedMatcher();
                DMatch[]          matches;
                matches = matcher.Match(descriptors1, descriptors2);



                //Вычисление максимального и минимального расстояния среди всех дескрипторов
                double max_dist = 0; double min_dist = 100;

                for (int i = 0; i < descriptors1.Rows; i++)
                {
                    double dist = matches[i].Distance;
                    if (dist < min_dist)
                    {
                        min_dist = dist;
                    }
                    if (dist > max_dist)
                    {
                        max_dist = dist;
                    }
                }

                labelstrip.Text = "Отбор точек";

                // Отобрать только хорошие матчи, расстояние меньше чем 3 * min_dist

                List <DMatch> good_matches = new List <DMatch>();

                for (int i = 0; i < matches.Length; i++)
                {
                    if (matches[i].Distance < form.getMinDinst() * min_dist)
                    {
                        good_matches.Add(matches[i]);
                    }
                }



                Mat image3 = new Mat();
                Cv2.DrawMatches(image1, points1, image2, points2, good_matches, image3, Scalar.RandomColor(), Scalar.RandomColor(), null, DrawMatchesFlags.NotDrawSinglePoints);

                labelstrip.Text = "Локализация объекта";

                //Использование гомографии
                // Локализация объектов

                Point2f[] vector  = new Point2f[good_matches.Count];
                Point2d[] vector1 = new Point2d[good_matches.Count];
                Point2d[] vector2 = new Point2d[good_matches.Count];
                for (int i = 0; i < good_matches.Count; i++)
                {
                    vector[i]    = points1[good_matches[i].QueryIdx].Pt;
                    vector1[i].X = vector[i].X;
                    vector1[i].Y = vector[i].Y;
                    vector[i]    = points2[good_matches[i].TrainIdx].Pt;
                    vector2[i].X = vector[i].X;
                    vector2[i].Y = vector[i].Y;
                }

                Mat H = Cv2.FindHomography(vector1, vector2, HomographyMethod.Ransac);

                //Получить "углы" изображения с целевым объектом
                Point2d[] vector3 = new Point2d[4];
                vector3[0].X = 0; vector3[0].Y = 0;
                vector3[1].X = image1.Cols; vector3[1].Y = 0;
                vector3[2].X = image1.Cols; vector3[2].Y = image1.Rows;
                vector3[3].X = 0; vector3[3].Y = image1.Rows;
                Point2d pointtest; pointtest.X = 0; pointtest.Y = 0;


                List <Point2d> vector4 = new List <Point2d>()
                {
                    pointtest, pointtest, pointtest, pointtest
                };

                //Отобразить углы целевого объекта, используя найденное преобразование, на сцену
                Cv2.PerspectiveTransform(InputArray.Create(vector3), OutputArray.Create(vector4), H); //?

                Point2d point1;
                Point2d point2;
                int     k;
                for (int i = 0; i < 4; i++)
                {
                    if (i == 3)
                    {
                        k = 0;
                    }
                    else
                    {
                        k = i + 1;
                    }
                    point1.X = vector4[i].X + image1.Cols;
                    point1.Y = vector4[i].Y + 0;
                    point2.X = vector4[k].X + image1.Cols;
                    point2.Y = vector4[k].Y + 0;
                    Cv2.Line(image3, point1, point2, Scalar.RandomColor(), 4);
                }

                labelstrip.Text = "Объект локализован";

                Bitmap image3b = BitmapConverter.ToBitmap(image3);
                imageForm.getresultimage(image3b);

                imageForm.ShowDialog();
            }
            catch (Exception ex)
            {
                MessageBox.Show(Convert.ToString(ex));
                labelstrip.Text = "Произошла ошибка";
            }
        }
Exemple #27
0
        public Task <string> AuthenticateBio(bool?check)
        {
            int            matchesCounter = 0;
            OpenFileDialog openFileDialog = new OpenFileDialog();

            openFileDialog.Filter = "Image files (*.png;*.jpeg;*.jpg)|*.png;*.jpeg;*.jpg";
            if (openFileDialog.ShowDialog() == true)
            {
                //arquivos da pasta images
                var files = Directory.GetFiles(Path.GetDirectoryName(System.Diagnostics.Process.GetCurrentProcess().MainModule.FileName) + "\\Resources\\Images");
                //Source -- arquivo que escolhi e transformo ele em cinza
                Mat src = new Mat(Path.GetFullPath(openFileDialog.FileName), ImreadModes.Grayscale);
                //SURF - Speeded Up Robust Features
                var detector = SURF.Create(hessianThreshold: 400);
                //variaveis criadas em run-time, garbage collector cuida deles depois.
                var imgMatches = new Mat();
                //aqui é o matcher -- COMPARADOR
                var matcher = new BFMatcher();

                for (int i = 0; i < 4; i++)
                {
                    switch (i)
                    {
                    case 0:
                        Mat resSrcTermination = new Mat();
                        Mat resDstTermination = new Mat();
                        //pega o src, dá resize e joga em resSrc
                        Cv2.Resize(src, resSrcTermination, new Size(450, 450));
                        // pega a área de interesse
                        var srcTerminacao = new Mat(resSrcTermination, new Rect(75, 75, 150, 150));
                        foreach (var item in files)
                        {
                            //arquivo destinatario
                            Mat dst = new Mat(item, ImreadModes.Grayscale);
                            //pega o dst, dá resize e joga em resDst
                            Cv2.Resize(dst, resDstTermination, new Size(450, 450));
                            //pega a área de interesse
                            var resTerminacao = new Mat(resDstTermination, new Rect(75, 75, 150, 150));
                            // Keypoints - são as bolinhas
                            var keypoints1 = detector.Detect(srcTerminacao);
                            var keypoints2 = detector.Detect(resTerminacao);
                            // --------------------
                            if (keypoints1.Length == keypoints2.Length)
                            {
                                firstkp = keypoints1.Length;
                                matchesCounter++;
                                if (check ?? false)
                                {
                                    //Match das imagens filtradas
                                    var matches = matcher.Match(srcTerminacao, resTerminacao);
                                    try
                                    {
                                        //desenha as linhas entre os keypoints
                                        Cv2.DrawMatches(srcTerminacao, keypoints1, resTerminacao, keypoints2, matches, imgMatches);
                                        //mostra os matches
                                        Cv2.ImShow("Terminação", imgMatches);
                                    }
                                    catch
                                    {
                                    }
                                }
                                break;
                            }
                        }
                        break;

                    case 1:
                        Mat resSrcBifurcation = new Mat();
                        Mat resDstBifurcation = new Mat();
                        //pega o src, dá resize e joga em resSrc
                        Cv2.Resize(src, resSrcBifurcation, new Size(450, 450));
                        //
                        var srcBifurcacao = new Mat(resSrcBifurcation, new Rect(75, 250, 150, 150));
                        foreach (var item in files)
                        {
                            //arquivo destinatario
                            Mat dst = new Mat(item, ImreadModes.Grayscale);
                            //pega o dst, dá resize e joga em resDst
                            Cv2.Resize(dst, resDstBifurcation, new Size(450, 450));
                            //pega a área de interesse
                            var resBifurcacao = new Mat(resDstBifurcation, new Rect(75, 250, 150, 150));
                            // Keypoints - são as bolinhas
                            var keypoints1 = detector.Detect(srcBifurcacao);
                            var keypoints2 = detector.Detect(resBifurcacao);
                            // --------------------
                            if (keypoints1.Length == keypoints2.Length)
                            {
                                matchesCounter++;
                                secondkp = keypoints1.Length;
                                if (check ?? false)
                                {
                                    //Match das imagens filtradas
                                    var matches = matcher.Match(srcBifurcacao, resBifurcacao);
                                    try
                                    {
                                        //desenha as linhas entre os keypoints
                                        Cv2.DrawMatches(srcBifurcacao, keypoints1, resBifurcacao, keypoints2, matches, imgMatches);
                                        //mostra os matches
                                        Cv2.ImShow("Bifurcação", imgMatches);
                                    }
                                    catch
                                    {
                                    }
                                }
                                break;
                            }
                        }
                        break;

                    case 2:
                        Mat resSrcIndependency = new Mat();
                        Mat resDstIndependency = new Mat();
                        //pega o src, dá resize e joga em resSrc
                        Cv2.Resize(src, resSrcIndependency, new Size(450, 450));
                        // pega a área de interesse
                        var srcIndependency = new Mat(resSrcIndependency, new Rect(235, 250, 150, 120));
                        foreach (var item in files)
                        {
                            //arquivo destinatario
                            Mat dst = new Mat(item, ImreadModes.Grayscale);
                            //pega o dst, dá resize e joga em resDst
                            Cv2.Resize(dst, resDstIndependency, new Size(450, 450));
                            //pega a área de interesse
                            var resIndependency = new Mat(resDstIndependency, new Rect(235, 250, 150, 120));
                            // Keypoints - são as bolinhas
                            var keypoints1 = detector.Detect(srcIndependency);
                            var keypoints2 = detector.Detect(resIndependency);
                            // --------------------
                            if (keypoints1.Length == keypoints2.Length)
                            {
                                thirdkp = keypoints1.Length;
                                matchesCounter++;
                                if (check ?? false)
                                {
                                    //Match das imagens filtradas
                                    var matches = matcher.Match(srcIndependency, resIndependency);
                                    try
                                    {
                                        //desenha as linhas entre os keypoints
                                        Cv2.DrawMatches(srcIndependency, keypoints1, resIndependency, keypoints2, matches, imgMatches);
                                        //mostra os matches
                                        Cv2.ImShow("Independente", imgMatches);
                                    }
                                    catch
                                    {
                                    }
                                }
                                break;
                            }
                        }
                        break;

                    case 3:
                        Mat resSrcIsland = new Mat();
                        Mat resDstIsland = new Mat();
                        //pega o src, dá resize e joga em resSrc
                        Cv2.Resize(src, resSrcIsland, new Size(450, 450));
                        // pega a área de interesse
                        var srcIlha = new Mat(resSrcIsland, new Rect(220, 220, 150, 130));
                        foreach (var item in files)
                        {
                            //arquivo destinatario
                            Mat dst = new Mat(item, ImreadModes.Grayscale);
                            //pega o dst, dá resize e joga em resDst
                            Cv2.Resize(dst, resDstIsland, new Size(450, 450));
                            //pega a área de interesse
                            var resIlha = new Mat(resDstIsland, new Rect(220, 220, 150, 130));
                            // Keypoints - são as bolinhas
                            var keypoints1 = detector.Detect(srcIlha);
                            var keypoints2 = detector.Detect(resIlha);
                            // --------------------
                            if (keypoints1.Length == keypoints2.Length)
                            {
                                fourthkp = keypoints1.Length;
                                matchesCounter++;
                                if (check ?? false)
                                {
                                    //Match das imagens filtradas
                                    var matches = matcher.Match(srcIlha, resIlha);
                                    try
                                    {
                                        //desenha as linhas entre os keypoints
                                        Cv2.DrawMatches(srcIlha, keypoints1, resIlha, keypoints2, matches, imgMatches);
                                        //mostra os matches
                                        Cv2.ImShow("Ilha", imgMatches);
                                    }
                                    catch
                                    {
                                    }
                                }
                                break;
                            }
                        }
                        break;

                    default:
                        return(Task.FromResult("Canceled"));
                    }
                }
                if (matchesCounter == 4 && firstkp == 201 && secondkp == 169 && thirdkp == 127 && fourthkp == 143)
                {
                    return(Task.FromResult("ADMIN"));
                }
                else if (matchesCounter == 4 && firstkp == 174 && secondkp == 169 && thirdkp == 133 && fourthkp == 154)
                {
                    return(Task.FromResult("DIRETOR"));
                }
                else if (matchesCounter == 4)
                {
                    return(Task.FromResult("OK"));
                }
                else
                {
                    return(Task.FromResult("Wrong"));
                }
            }
            else
            {
                return(Task.FromResult("Canceled"));
            }
        }
Exemple #28
0
    //--------------------------------------------------------------------------------------------------


    static void Main()
    {
        Mat srcOri = new Mat("C:/Users/Li&Ao/Desktop/Test/5.JPG", ImreadModes.Grayscale);
        Mat dstOri = new Mat("C:/Users/Li&Ao/Desktop/Test/6.JPG", ImreadModes.Grayscale);

        Mat src = new Mat();
        Mat dst = new Mat();

        RotateAndResize(srcOri, out src, true);
        RotateAndResize(dstOri, out dst, true);


        // Step1: Detect the keypoints and generate their descriptors using SURF
        ORB orb = ORB.Create();

        KeyPoint[] kp1, kp2;
        Mat        desc1 = new Mat();
        Mat        desc2 = new Mat();

        orb.DetectAndCompute(src, null, out kp1, desc1);
        orb.DetectAndCompute(dst, null, out kp2, desc2);

        // Step2: Matching descriptor vectors with a brute force matcher
        var bfMatcher = new BFMatcher();
        var matches   = bfMatcher.KnnMatch(desc1, desc2, k: 2);

        // Step3: Ratio test for outlier removal
        var betterKp1     = new List <Point2f>();
        var betterKp2     = new List <Point2f>();
        var betterMatches = new List <DMatch>();

        foreach (DMatch[] items in matches)
        {
            if (items[0].Distance < 0.8 * items[1].Distance)
            {
                betterKp1.Add(kp1[items[0].QueryIdx].Pt);
                betterKp2.Add(kp2[items[0].TrainIdx].Pt);
                betterMatches.Add(items[0]);
            }
        }

        // Step4: RANSAC for outlier removal
        Point2d Point2fToPoint2d(Point2f pf) => new Point2d(((double)pf.X), ((double)pf.Y));

        var betterKp1_tmp = betterKp1.ConvertAll(Point2fToPoint2d);
        var betterKp2_tmp = betterKp2.ConvertAll(Point2fToPoint2d);

        var bestTuple = RansacMethod(betterKp1_tmp, betterKp2_tmp, src.Cols, src.Rows);
        var bestKp1   = bestTuple.Item1;
        var bestKp2   = bestTuple.Item2;

        //Step5:draw matches after ransac
        var plotMatches = new List <DMatch>();

        foreach (DMatch[] items in matches)
        {
            var p1 = Point2fToPoint2d(kp1[items[0].QueryIdx].Pt);
            var p2 = Point2fToPoint2d(kp2[items[0].TrainIdx].Pt);
            if (bestKp1.Contains(p1) && bestKp2.Contains(p2))
            {
                plotMatches.Add(items[0]);
            }
        }

        Mat outImg = new Mat();

        Cv2.DrawMatches(src, kp1, dst, kp2, plotMatches, outImg);
        Cv2.ImShow("outImg", outImg);


        Cv2.Resize(outImg, outImg, new Size(outImg.Rows / 2, outImg.Cols / 2));
        Cv2.ImWrite("C:/Users/Li&Ao/Desktop/Test/output.JPG", outImg);

        //Calculate R matrix
        Matrix <double> A = Matrix <double> .Build.Dense(3, bestKp1.Count);

        Matrix <double> B = Matrix <double> .Build.Dense(3, bestKp2.Count);

        for (int i = 0; i < bestKp1.Count; i++)
        {
            Vector <double> p1 = From2dTo3d(bestKp1[i], src.Cols, src.Rows);
            Vector <double> p2 = From2dTo3d(bestKp2[i], src.Cols, src.Rows);
            A.SetColumn(i, p1);
            B.SetColumn(i, p2);
        }
        var R = CalcRotation(A, B);

        Console.WriteLine("R matrix is:" + R);
        Cv2.WaitKey();
    }
Exemple #29
0
        public Mat Run(Mat img1, Mat img2)
        {
            Mat img3 = new Mat(Math.Max(img1.Height, img2.Height), img2.Width + img1.Width, MatType.CV_8UC3).SetTo(0);

            using (var descriptors1 = new Mat())
                using (var descriptors2 = new Mat())
                    using (var matcher = new BFMatcher(NormTypes.L2SQR))
                        using (var kaze = KAZE.Create())
                        {
                            kaze.DetectAndCompute(img1, null, out keypoints1, descriptors1);
                            kaze.DetectAndCompute(img2, null, out keypoints2, descriptors2);

                            if (descriptors1.Width > 0 && descriptors2.Width > 0)
                            {
                                DMatch[][] matches = matcher.KnnMatch(descriptors1, descriptors2, 2);
                                using (Mat mask = new Mat(matches.Length, 1, MatType.CV_8U))
                                {
                                    mask.SetTo(Scalar.White);
                                    int nonZero = Cv2.CountNonZero(mask);
                                    VoteForUniqueness(matches, mask);
                                    nonZero = Cv2.CountNonZero(mask);
                                    nonZero = VoteForSizeAndOrientation(keypoints2, keypoints1, matches, mask, 1.5f, 10);

                                    List <Point2f> obj             = new List <Point2f>();
                                    List <Point2f> scene           = new List <Point2f>();
                                    List <DMatch>  goodMatchesList = new List <DMatch>();
                                    //iterate through the mask only pulling out nonzero items because they're matches
                                    MatIndexer <byte> maskIndexer = mask.GetGenericIndexer <byte>();
                                    for (int i = 0; i < mask.Rows; i++)
                                    {
                                        if (maskIndexer[i] > 0)
                                        {
                                            obj.Add(keypoints1[matches[i][0].QueryIdx].Pt);
                                            scene.Add(keypoints2[matches[i][0].TrainIdx].Pt);
                                            goodMatchesList.Add(matches[i][0]);
                                        }
                                    }

                                    List <Point2d> objPts   = obj.ConvertAll(Point2fToPoint2d);
                                    List <Point2d> scenePts = scene.ConvertAll(Point2fToPoint2d);
                                    if (nonZero >= 4)
                                    {
                                        Mat homography = Cv2.FindHomography(objPts, scenePts, HomographyMethods.Ransac, 1.5, mask);
                                        nonZero = Cv2.CountNonZero(mask);

                                        if (homography != null && homography.Width > 0)
                                        {
                                            Point2f[] objCorners = { new Point2f(0,                 0),
                                                                     new Point2f(img1.Cols,         0),
                                                                     new Point2f(img1.Cols, img1.Rows),
                                                                     new Point2f(0,         img1.Rows) };

                                            Point2d[] sceneCorners = MyPerspectiveTransform3(objCorners, homography);

                                            //This is a good concat horizontal
                                            using (Mat left = new Mat(img3, new Rect(0, 0, img1.Width, img1.Height)))
                                                using (Mat right = new Mat(img3, new Rect(img1.Width, 0, img2.Width, img2.Height)))
                                                {
                                                    img1.CopyTo(left);
                                                    img2.CopyTo(right);

                                                    byte[] maskBytes = new byte[mask.Rows * mask.Cols];
                                                    mask.GetArray(out maskBytes);
                                                    Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, goodMatchesList, img3, Scalar.All(-1), Scalar.All(-1), maskBytes, DrawMatchesFlags.NotDrawSinglePoints);

                                                    List <List <Point> > listOfListOfPoint2D = new List <List <Point> >();
                                                    List <Point>         listOfPoint2D       = new List <Point>();
                                                    listOfPoint2D.Add(new Point(sceneCorners[0].X + img1.Cols, sceneCorners[0].Y));
                                                    listOfPoint2D.Add(new Point(sceneCorners[1].X + img1.Cols, sceneCorners[1].Y));
                                                    listOfPoint2D.Add(new Point(sceneCorners[2].X + img1.Cols, sceneCorners[2].Y));
                                                    listOfPoint2D.Add(new Point(sceneCorners[3].X + img1.Cols, sceneCorners[3].Y));
                                                    listOfListOfPoint2D.Add(listOfPoint2D);
                                                    img3.Polylines(listOfListOfPoint2D, true, Scalar.LimeGreen, 2);

                                                    //This works too
                                                    //Cv2.Line(img3, scene_corners[0] + new Point2d(img1.Cols, 0), scene_corners[1] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                                    //Cv2.Line(img3, scene_corners[1] + new Point2d(img1.Cols, 0), scene_corners[2] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                                    //Cv2.Line(img3, scene_corners[2] + new Point2d(img1.Cols, 0), scene_corners[3] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                                    //Cv2.Line(img3, scene_corners[3] + new Point2d(img1.Cols, 0), scene_corners[0] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                                }
                                        }
                                    }
                                }
                            }
                            return(img3);
                        }
        }
Exemple #30
0
        // http://docs.opencv.org/3.0-beta/modules/features2d/doc/features2d.html
        // http://docs.opencv.org/3.0-beta/modules/features2d/doc/feature_detection_and_description.html
        // http://docs.opencv.org/3.0-beta/doc/tutorials/features2d/akaze_matching/akaze_matching.html
        /// <summary>
        ///  Compare images with a feature detection algorithm
        /// </summary>
        /// <param name="mat_image1"> 1st image (OpenCv Mat)</param>
        /// <param name="mat_image2"> 2nd image (OpenCv Mat)</param>
        /// <param name="feature_count">number of feature keypoints found</param>
        /// <param name="match_count">number of matches founds</param>
        /// <param name="view">image of the feature and good matches</param>
        /// <returns>Similarity % (#good matches/ # matches)</returns>
        private static double CompareFeatures(Mat mat_image1, Mat mat_image2, out double feature_count, out double match_count, out Bitmap view)
        {
            match_count   = 0;
            feature_count = 0;

            int nmatch  = 0;
            int ngmatch = 0;

            view = new Bitmap(1, 1);

            // stop here if one of the image does not seem to be valid
            if (mat_image1 == null)
            {
                return(0);
            }
            if (mat_image1.Empty())
            {
                return(0);
            }
            if (mat_image2 == null)
            {
                return(0);
            }
            if (mat_image2.Empty())
            {
                return(0);
            }

            try
            {
                // Detect the keypoints and generate their descriptors

                var detector = AKAZE.Create();
                //var detector = BRISK.Create();
                //var detector = ORB.Create(); // require grayscale

                /*
                 * // grayscale
                 * Cv2.CvtColor(mat_image1, mat_image1, ColorConversionCodes.BGR2GRAY);
                 * Cv2.CvtColor(mat_image2, mat_image2, ColorConversionCodes.BGR2GRAY);
                 * mat_image1.EqualizeHist();
                 * mat_image2.EqualizeHist();
                 */

                var descriptors1 = new MatOfFloat();
                var descriptors2 = new MatOfFloat();
                var keypoints1   = new KeyPoint[1];
                var keypoints2   = new KeyPoint[1];
                try
                {
                    keypoints1 = detector.Detect(mat_image1);
                    keypoints2 = detector.Detect(mat_image2);
                    if (keypoints1 != null)
                    {
                        detector.Compute(mat_image1, ref keypoints1, descriptors1);
                        if (descriptors1 == null)
                        {
                            return(0);
                        }
                    }
                    if (keypoints2 != null)
                    {
                        detector.Compute(mat_image2, ref keypoints2, descriptors2);
                        if (descriptors2 == null)
                        {
                            return(0);
                        }
                    }
                }
                catch (System.AccessViolationException) { }
                catch (Exception) { }

                // Find good matches  (Nearest neighbor matching ratio)
                float nn_match_ratio = 0.95f;

                var matcher    = new BFMatcher(NormTypes.Hamming);
                var nn_matches = new DMatch[1][];
                try
                {
                    nn_matches = matcher.KnnMatch(descriptors1, descriptors2, 2);
                }
                catch (System.AccessViolationException) { }
                catch (Exception) { }

                var good_matches = new List <DMatch>();
                var matched1     = new List <KeyPoint>();
                var matched2     = new List <KeyPoint>();
                var inliers1     = new List <KeyPoint>();
                var inliers2     = new List <KeyPoint>();

                if (nn_matches != null && nn_matches.Length > 0)
                {
                    for (int i = 0; i < nn_matches.GetLength(0); i++)
                    {
                        if (nn_matches[i].Length >= 2)
                        {
                            DMatch first = nn_matches[i][0];
                            float  dist1 = nn_matches[i][0].Distance;
                            float  dist2 = nn_matches[i][1].Distance;

                            if (dist1 < nn_match_ratio * dist2)
                            {
                                good_matches.Add(first);
                                matched1.Add(keypoints1[first.QueryIdx]);
                                matched2.Add(keypoints2[first.TrainIdx]);
                            }
                        }
                    }
                }

                // Count matches & features
                feature_count = keypoints1.Length + keypoints2.Length;
                nmatch        = nn_matches.Length;
                match_count   = nmatch;
                ngmatch       = good_matches.Count;

                // Draw matches view
                var mview = new Mat();

                // show images + good matchs
                if (keypoints1.Length > 0 && keypoints2.Length > 0)
                {
                    Cv2.DrawMatches(mat_image1, keypoints1, mat_image2, keypoints2, good_matches.ToArray(), mview);
                    view = BitmapConverter.ToBitmap(mview);
                }
                else
                {
                    // no matchs
                    view = new Bitmap(1, 1);
                }
            }
            catch (System.AccessViolationException e)
            {
                Console.Error.WriteLine("Access Error  => CompareFeatures : \n{0}", e.Message);
            }
            catch (Exception)
            {
                // Console.Error.WriteLine("Error  => CompareFeatures : \n{0}", e.Message);
            }

            // similarity = 0  when there was no feature  or no match
            if (feature_count <= 0)
            {
                return(0);
            }
            if (nmatch <= 0)
            {
                return(0);
            }

            // similarity = ratio of good matches/ # matches
            var similarity = 100.0 * ngmatch / nmatch;

            return(similarity);
        }