Exemplo n.º 1
0
        static (KeyPoint[], Mat) FeatureCommand(Mat source)
        {
            // 特徴量検出アルゴリズム
            var feature = KAZE.Create();

            var magnification = 2;

            using var _ex = source.Resize(new Size(source.Width * magnification, source.Height * magnification));
            // ガンマ補正
            var gamma = 1.8;

            byte[] _gammaLut = new byte[256];
            for (int i = 0; i < _gammaLut.Length; i++)
            {
                _gammaLut[i] = (byte)(255d * Math.Pow(i / 255d, 1d / gamma));
            }
            using Mat _temp_gammaImage = new Mat();
            Cv2.LUT(_ex, _gammaLut, _temp_gammaImage);
            // 特徴量計算
            KeyPoint[] keyPoints;              // 特徴点
            Mat        descriptor = new Mat(); // 特徴量

            feature.DetectAndCompute(_temp_gammaImage, null, out keyPoints, descriptor);
            //var _featureImage = new Mat();
            //Cv2.DrawKeypoints(_temp_gammaImage, _keypoint, _featureImage);

            return(keyPoints, descriptor);
        }
Exemplo n.º 2
0
        public override void RunTest()
        {
            var gray  = new Mat(ImagePath.Lenna, ImreadModes.Grayscale);
            var kaze  = KAZE.Create();
            var akaze = AKAZE.Create();

            var kazeDescriptors  = new Mat();
            var akazeDescriptors = new Mat();

            KeyPoint[] kazeKeyPoints = null, akazeKeyPoints = null;
            var        kazeTime      = MeasureTime(() =>
                                                   kaze.DetectAndCompute(gray, null, out kazeKeyPoints, kazeDescriptors));
            var akazeTime = MeasureTime(() =>
                                        akaze.DetectAndCompute(gray, null, out akazeKeyPoints, akazeDescriptors));

            var dstKaze  = new Mat();
            var dstAkaze = new Mat();

            Cv2.DrawKeypoints(gray, kazeKeyPoints, dstKaze);
            Cv2.DrawKeypoints(gray, akazeKeyPoints, dstAkaze);

            using (new Window(String.Format("KAZE [{0:F2}ms]", kazeTime.TotalMilliseconds), dstKaze))
                using (new Window(String.Format("AKAZE [{0:F2}ms]", akazeTime.TotalMilliseconds), dstAkaze))
                {
                    Cv2.WaitKey();
                }
        }
        public void New3()
        {
            var descriptorExtractor = KAZE.Create();
            var descriptorMatcher   = new BFMatcher();

            new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher);
        }
Exemplo n.º 4
0
 public void New5()
 {
     using var ip = new LinearIndexParams();
     using var sp = new SearchParams();
     using (var descriptorExtractor = KAZE.Create())
         using (var descriptorMatcher = new FlannBasedMatcher(ip, sp))
             using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
 }
        public void New5()
        {
            var descriptorExtractor = KAZE.Create();
            LinearIndexParams ip    = new LinearIndexParams();
            SearchParams      sp    = new SearchParams();
            var descriptorMatcher   = new FlannBasedMatcher(ip, sp);

            new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher);
        }
Exemplo n.º 6
0
        public void GetKeypoints(Mat gray)
        {
            var kaze            = KAZE.Create();
            var kazeDescriptors = new Mat();

            kaze.DetectAndCompute(gray, null, out kazeKeyPoints, kazeDescriptors);

            var dstKaze = new Mat();

            Cv2.DrawKeypoints(gray, kazeKeyPoints, dstKaze);
        }
Exemplo n.º 7
0
        private static void Feature()
        {
            Mat  img  = new Mat("data/lenna.png", ImreadModes.GrayScale);
            KAZE kaze = KAZE.Create();


            KeyPoint[] keyPoints;
            Mat        descriptors = new Mat();

            kaze.DetectAndCompute(img, null, out keyPoints, descriptors);

            Mat dst = new Mat();

            Cv2.DrawKeypoints(img, keyPoints, dst);
            Window.ShowImages(dst);
        }
Exemplo n.º 8
0
        public Mat Run(Mat img1, Mat img2)
        {
            Mat img3 = new Mat(Math.Max(img1.Height, img2.Height), img2.Width + img1.Width, MatType.CV_8UC3).SetTo(0);

            using (var descriptors1 = new Mat())
                using (var descriptors2 = new Mat())
                    using (var matcher = new BFMatcher(NormTypes.L2SQR))
                        using (var kaze = KAZE.Create())
                        {
                            kaze.DetectAndCompute(img1, null, out keypoints1, descriptors1);
                            kaze.DetectAndCompute(img2, null, out keypoints2, descriptors2);

                            if (descriptors1.Width > 0 && descriptors2.Width > 0)
                            {
                                DMatch[][] matches = matcher.KnnMatch(descriptors1, descriptors2, 2);
                                using (Mat mask = new Mat(matches.Length, 1, MatType.CV_8U))
                                {
                                    mask.SetTo(Scalar.White);
                                    int nonZero = Cv2.CountNonZero(mask);
                                    VoteForUniqueness(matches, mask);
                                    nonZero = Cv2.CountNonZero(mask);
                                    nonZero = VoteForSizeAndOrientation(keypoints2, keypoints1, matches, mask, 1.5f, 10);

                                    List <Point2f> obj             = new List <Point2f>();
                                    List <Point2f> scene           = new List <Point2f>();
                                    List <DMatch>  goodMatchesList = new List <DMatch>();
                                    //iterate through the mask only pulling out nonzero items because they're matches
                                    MatIndexer <byte> maskIndexer = mask.GetGenericIndexer <byte>();
                                    for (int i = 0; i < mask.Rows; i++)
                                    {
                                        if (maskIndexer[i] > 0)
                                        {
                                            obj.Add(keypoints1[matches[i][0].QueryIdx].Pt);
                                            scene.Add(keypoints2[matches[i][0].TrainIdx].Pt);
                                            goodMatchesList.Add(matches[i][0]);
                                        }
                                    }

                                    List <Point2d> objPts   = obj.ConvertAll(Point2fToPoint2d);
                                    List <Point2d> scenePts = scene.ConvertAll(Point2fToPoint2d);
                                    if (nonZero >= 4)
                                    {
                                        Mat homography = Cv2.FindHomography(objPts, scenePts, HomographyMethods.Ransac, 1.5, mask);
                                        nonZero = Cv2.CountNonZero(mask);

                                        if (homography != null && homography.Width > 0)
                                        {
                                            Point2f[] objCorners = { new Point2f(0,                 0),
                                                                     new Point2f(img1.Cols,         0),
                                                                     new Point2f(img1.Cols, img1.Rows),
                                                                     new Point2f(0,         img1.Rows) };

                                            Point2d[] sceneCorners = MyPerspectiveTransform3(objCorners, homography);

                                            //This is a good concat horizontal
                                            using (Mat left = new Mat(img3, new Rect(0, 0, img1.Width, img1.Height)))
                                                using (Mat right = new Mat(img3, new Rect(img1.Width, 0, img2.Width, img2.Height)))
                                                {
                                                    img1.CopyTo(left);
                                                    img2.CopyTo(right);

                                                    byte[] maskBytes = new byte[mask.Rows * mask.Cols];
                                                    mask.GetArray(out maskBytes);
                                                    Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, goodMatchesList, img3, Scalar.All(-1), Scalar.All(-1), maskBytes, DrawMatchesFlags.NotDrawSinglePoints);

                                                    List <List <Point> > listOfListOfPoint2D = new List <List <Point> >();
                                                    List <Point>         listOfPoint2D       = new List <Point>();
                                                    listOfPoint2D.Add(new Point(sceneCorners[0].X + img1.Cols, sceneCorners[0].Y));
                                                    listOfPoint2D.Add(new Point(sceneCorners[1].X + img1.Cols, sceneCorners[1].Y));
                                                    listOfPoint2D.Add(new Point(sceneCorners[2].X + img1.Cols, sceneCorners[2].Y));
                                                    listOfPoint2D.Add(new Point(sceneCorners[3].X + img1.Cols, sceneCorners[3].Y));
                                                    listOfListOfPoint2D.Add(listOfPoint2D);
                                                    img3.Polylines(listOfListOfPoint2D, true, Scalar.LimeGreen, 2);

                                                    //This works too
                                                    //Cv2.Line(img3, scene_corners[0] + new Point2d(img1.Cols, 0), scene_corners[1] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                                    //Cv2.Line(img3, scene_corners[1] + new Point2d(img1.Cols, 0), scene_corners[2] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                                    //Cv2.Line(img3, scene_corners[2] + new Point2d(img1.Cols, 0), scene_corners[3] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                                    //Cv2.Line(img3, scene_corners[3] + new Point2d(img1.Cols, 0), scene_corners[0] + new Point2d(img1.Cols, 0), Scalar.LimeGreen);
                                                }
                                        }
                                    }
                                }
                            }
                            return(img3);
                        }
        }
Exemplo n.º 9
0
 public void New3()
 {
     using (var descriptorExtractor = KAZE.Create())
         using (var descriptorMatcher = new BFMatcher())
             using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
 }
Exemplo n.º 10
0
        //img1:test image; img2:ref img
        public float MatchTemplate(Mat img1, Mat img2, bool ishowImageMatchTemplate, string s = "Match")
        {
            float matchRate = 0.0f;

            using (var descriptors1 = new Mat())
                using (var descriptors2 = new Mat())
                    using (var matcher = new BFMatcher(NormTypes.L2SQR))
                        using (var kaze = KAZE.Create())
                        {
                            KeyPoint[] keypoints1, keypoints2;
                            kaze.DetectAndCompute(img1, null, out keypoints1, descriptors1);
                            kaze.DetectAndCompute(img2, null, out keypoints2, descriptors2);

                            DMatch[][] matches = matcher.KnnMatch(descriptors1, descriptors2, 2);
                            using (Mat mask = new Mat(matches.Length, 1, MatType.CV_8U))
                            {
                                mask.SetTo(new Scalar(255));
                                int nonZero = Cv2.CountNonZero(mask);
                                VoteForUniqueness(matches, mask);
                                nonZero = Cv2.CountNonZero(mask);
                                nonZero = VoteForSizeAndOrientation(keypoints2, keypoints1, matches, mask, 1.5f, 20);

                                List <Point2f> obj             = new List <Point2f>();
                                List <Point2f> scene           = new List <Point2f>();
                                List <DMatch>  goodMatchesList = new List <DMatch>();
                                //iterate through the mask only pulling out nonzero items because they're matches
                                for (int i = 0; i < mask.Rows; i++)
                                {
                                    MatIndexer <byte> maskIndexer = mask.GetGenericIndexer <byte>();
                                    if (maskIndexer[i] > 0)
                                    {
                                        obj.Add(keypoints1[matches[i][0].QueryIdx].Pt);
                                        scene.Add(keypoints2[matches[i][0].TrainIdx].Pt);
                                        goodMatchesList.Add(matches[i][0]);
                                    }
                                }

                                List <Point2d> objPts   = obj.ConvertAll(Point2fToPoint2d);
                                List <Point2d> scenePts = scene.ConvertAll(Point2fToPoint2d);
                                if (nonZero >= 4)
                                {
                                    Mat homography = Cv2.FindHomography(objPts, scenePts, HomographyMethods.Ransac, 1.5, mask);
                                    nonZero = Cv2.CountNonZero(mask);

                                    //calculate match rate by how many match points exist
                                    //matchRate = (float)nonZero / keypoints2.Count();
                                    matchRate = 1 - (float)(keypoints2.Count() - nonZero) / (keypoints2.Count() + nonZero);

                                    if (homography != null && ishowImageMatchTemplate == true)
                                    {
                                        Point2f[] objCorners = { new Point2f(0,                 0),
                                                                 new Point2f(img1.Cols,         0),
                                                                 new Point2f(img1.Cols, img1.Rows),
                                                                 new Point2f(0,         img1.Rows) };

                                        Point2d[] sceneCorners = MyPerspectiveTransform3(objCorners, homography);

                                        //This is a good concat horizontal
                                        using (Mat img3 = new Mat(Math.Max(img1.Height, img2.Height), img2.Width + img1.Width, MatType.CV_8UC3))
                                            using (Mat left = new Mat(img3, new Rect(0, 0, img1.Width, img1.Height)))
                                                using (Mat right = new Mat(img3, new Rect(img1.Width, 0, img2.Width, img2.Height)))
                                                {
                                                    img1.CopyTo(left);
                                                    img2.CopyTo(right);

                                                    byte[] maskBytes = new byte[mask.Rows * mask.Cols];
                                                    mask.GetArray(0, 0, maskBytes);
                                                    Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, goodMatchesList, img3, Scalar.All(-1), Scalar.All(-1), maskBytes, DrawMatchesFlags.NotDrawSinglePoints);


                                                    //List<List<Point>> listOfListOfPoint2D = new List<List<Point>>();
                                                    //List<Point> listOfPoint2D = new List<Point>();
                                                    //listOfPoint2D.Add(new Point(sceneCorners[0].X + img1.Cols, sceneCorners[0].Y));
                                                    //listOfPoint2D.Add(new Point(sceneCorners[1].X + img1.Cols, sceneCorners[1].Y));
                                                    //listOfPoint2D.Add(new Point(sceneCorners[2].X + img1.Cols, sceneCorners[2].Y));
                                                    //listOfPoint2D.Add(new Point(sceneCorners[3].X + img1.Cols, sceneCorners[3].Y));
                                                    //listOfListOfPoint2D.Add(listOfPoint2D);
                                                    //img3.Polylines(listOfListOfPoint2D, true, Scalar.LimeGreen, 2);


                                                    Cv2.ImShow(s, img3.Resize(new Size(img3.Rows / 2, img3.Cols / 2)));
                                                    Cv2.WaitKey(0);
                                                    Cv2.DestroyWindow(s);

                                                    //Window.ShowImages(img3.Resize(new Size(img3.Rows / 2, img3.Cols / 2)));
                                                    //Window.WaitKey(0);
                                                    //Window.DestroyAllWindows();
                                                }
                                    }
                                }
                            }
                        }

            return(matchRate);
        }