public void MathingWithLshIndexParams()
        {
            using var img1        = Image("tsukuba_left.png", ImreadModes.Grayscale);
            using var img2        = Image("tsukuba_right.png", ImreadModes.Grayscale);
            using var orb         = ORB.Create(500);
            using var descriptor1 = new Mat();
            using var descriptor2 = new Mat();
            orb.DetectAndCompute(img1, null, out _, descriptor1);
            orb.DetectAndCompute(img2, null, out _, descriptor2);

            using var indexParams = new LshIndexParams(12, 20, 2);

            Assert.Equal(MatType.CV_8UC1, descriptor1.Type());
            Assert.Equal(MatType.CV_8UC1, descriptor2.Type());

            // LshIndexParams requires Binary descriptor, so it must NOT convert to CV_32F.
            //descriptor1.ConvertTo(descriptor1, MatType.CV_32F);
            //descriptor2.ConvertTo(descriptor2, MatType.CV_32F);

            using var matcher = new FlannBasedMatcher(indexParams);
            DMatch[] matches = matcher.Match(descriptor1, descriptor2);

            Assert.NotEmpty(matches);

            /*
             *  using (var view = new Mat())
             *  using (var window = new Window())
             *  {
             *      Cv2.DrawMatches(img1, keyPoints1, img2, keyPoints2, matches, view);
             *      window.ShowImage(view);
             *      Cv2.WaitKey();
             *  }*/
        }
Ejemplo n.º 2
0
        public V2()
        {
            ProcessFunction = (object obj) =>
            {
                dynamic prt          = obj as dynamic;
                var     descriptors1 = prt.descriptors1;
                var     descriptors2 = prt.descriptors2;
                var     keypoints1   = prt.keypoints1;
                var     keypoints2   = prt.keypoints2;
                Mat     gray1        = prt.gray1;
                Mat     gray2        = prt.gray2;
                // Match descriptor vectors
                var      bfMatcher    = new BFMatcher(NormTypes.L2, false);
                var      flannMatcher = new FlannBasedMatcher();
                DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
                DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

                // Draw matches
                var bfView = new Mat();
                Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
                var flannView = new Mat();
                Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);
                return(new { bfView, flannView });
            };

            PostProcessAction = () =>
            {
                Console.WriteLine("V2 terminou de processar!! Tempo gasto: {0}" + Environment.NewLine, ElapsedTime);
            };
        }
Ejemplo n.º 3
0
 public static Mat GetHomography(Mat mMain, Mat mSecondary)
 {
     KeyPoint[] keypoints  = null;
     KeyPoint[] keypoints2 = null;
     using (SIFT sIFT = SIFT.Create(1000))
     {
         using (Mat mat = new Mat())
         {
             using (Mat mat2 = new Mat())
             {
                 sIFT.DetectAndCompute(mMain, new Mat(), out keypoints, mat);
                 sIFT.DetectAndCompute(mSecondary, new Mat(), out keypoints2, mat2);
                 FlannBasedMatcher flannBasedMatcher = new FlannBasedMatcher();
                 DMatch[]          array             = new DMatch[0];
                 array = flannBasedMatcher.Match(mat, mat2);
                 List <Point2f> list  = new List <Point2f>();
                 List <Point2f> list2 = new List <Point2f>();
                 for (int i = 0; i < array.Length; i++)
                 {
                     list.Add(keypoints[array[i].QueryIdx].Pt);
                     list2.Add(keypoints2[array[i].TrainIdx].Pt);
                 }
                 return(Cv2.FindHomography(InputArray.Create(list2), InputArray.Create(list), HomographyMethods.Ransac));
             }
         }
     }
 }
Ejemplo n.º 4
0
        private void MatchBySift(Mat src1, Mat src2)
        {
            using var gray1 = new Mat();
            using var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY);

            using var sift = SIFT.Create();

            // Detect the keypoints and generate their descriptors using SIFT
            using var descriptors1 = new Mat <float>();
            using var descriptors2 = new Mat <float>();
            sift.DetectAndCompute(gray1, null, out var keypoints1, descriptors1);
            sift.DetectAndCompute(gray2, null, out var keypoints2, descriptors2);

            // Match descriptor vectors
            using var bfMatcher    = new BFMatcher(NormTypes.L2, false);
            using var flannMatcher = new FlannBasedMatcher();
            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            using var bfView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            using var flannView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SIFT matching (by BFMather)", bfView))
                using (new Window("SIFT matching (by FlannBasedMatcher)", flannView))
                {
                    Cv2.WaitKey();
                }
        }
Ejemplo n.º 5
0
        public static void Run()
        {
            string imagePath = "./images/geo.jpg";
            Mat    img       = Cv2.ImRead(imagePath);

            SimpleBlobDetector.Params blobDetectorParams = new SimpleBlobDetector.Params();
            blobDetectorParams.MinThreshold = 10;
            blobDetectorParams.MaxThreshold = 100;
            FlannBasedMatcher matcher = new FlannBasedMatcher(null, null);

            blobDetectorParams.FilterByArea = true;
            blobDetectorParams.MinArea      = 150;

            blobDetectorParams.FilterByCircularity = true;
            blobDetectorParams.MinCircularity      = 0.3f;

            blobDetectorParams.FilterByConvexity = true;
            blobDetectorParams.MinConvexity      = 0.2f;

            blobDetectorParams.FilterByInertia = true;
            blobDetectorParams.MinInertiaRatio = 0.1f;

            var detector  = SimpleBlobDetector.Create(blobDetectorParams);
            var keypoints = detector.Detect(img);
            Mat output    = new Mat();

            Cv2.DrawKeypoints(img, keypoints, output, new Scalar(0, 0, 255), DrawMatchesFlags.DrawRichKeypoints);

            Cv2.ImShow("Output", output);
            Cv2.WaitKey(0);
        }
Ejemplo n.º 6
0
        public void Mathing()
        {
            using (var img1 = Image("tsukuba_left.png", ImreadModes.GrayScale))
                using (var img2 = Image("tsukuba_right.png", ImreadModes.GrayScale))
                    using (var orb = ORB.Create(500))
                        using (var descriptor1 = new Mat())
                            using (var descriptor2 = new Mat())
                            {
                                KeyPoint[] keyPoints1, keyPoints2;
                                orb.DetectAndCompute(img1, null, out keyPoints1, descriptor1);
                                orb.DetectAndCompute(img2, null, out keyPoints2, descriptor2);

                                // Flann needs the descriptors to be of type CV_32F
                                Assert.AreEqual(MatType.CV_8UC1, descriptor1.Type());
                                Assert.AreEqual(MatType.CV_8UC1, descriptor2.Type());
                                descriptor1.ConvertTo(descriptor1, MatType.CV_32F);
                                descriptor2.ConvertTo(descriptor2, MatType.CV_32F);

                                var      matcher = new FlannBasedMatcher();
                                DMatch[] matches = matcher.Match(descriptor1, descriptor2);

                                /*
                                 * using (var view = new Mat())
                                 * using (var window = new Window())
                                 * {
                                 *  Cv2.DrawMatches(img1, keyPoints1, img2, keyPoints2, matches, view);
                                 *  window.ShowImage(view);
                                 *  Cv2.WaitKey();
                                 * }*/
                            }
        }
Ejemplo n.º 7
0
        private void MatchBySurf(Mat src1, Mat src2)
        {
            using var gray1 = new Mat();
            using var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY);

            using var surf = SURF.Create(200, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            using var descriptors1 = new Mat <float>();
            using var descriptors2 = new Mat <float>();
            surf.DetectAndCompute(gray1, null, out keypoints1, descriptors1);
            surf.DetectAndCompute(gray2, null, out keypoints2, descriptors2);

            // Match descriptor vectors
            using var bfMatcher    = new BFMatcher(NormTypes.L2, false);
            using var flannMatcher = new FlannBasedMatcher();
            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            using var bfView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            using var flannView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SURF matching (by BFMather)", WindowMode.AutoSize, bfView))
                using (new Window("SURF matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView))
                {
                    Cv2.WaitKey();
                }
        }
Ejemplo n.º 8
0
        public static void FindMatch(
            Mat modelImage,
            Mat observedImage,
            out VectorOfKeyPoint modelKeyPoints,
            out VectorOfKeyPoint observedKeyPoints,
            VectorOfVectorOfDMatch matches,
            out Mat mask,
            out Mat homography)
        {
            int    k = 9;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    //extract features from the object image
                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                        using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                            using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                            {
                                matcher.Add(modelDescriptors);

                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                               matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                              observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }
                    watch.Stop();
                }
        }
Ejemplo n.º 9
0
        public void Run(Mat gray1, Mat gray2, Mat dst2, bool useBFMatcher, int pointsToMatch)
        {
            var sift = SIFT.Create(pointsToMatch);

            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new Mat();
            var        descriptors2 = new Mat();

            sift.DetectAndCompute(gray1, null, out keypoints1, descriptors1);
            sift.DetectAndCompute(gray2, null, out keypoints2, descriptors2);

            if (useBFMatcher)
            {
                var      bfMatcher = new BFMatcher(NormTypes.L2, false);
                DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2);
                Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, dst2);
            }
            else
            {
                var      flannMatcher = new FlannBasedMatcher();
                DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);
                Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, dst2);
            }
            kp1 = keypoints1;
            kp2 = keypoints2;
        }
Ejemplo n.º 10
0
        private void MatchBySurf(Mat src1, Mat src2)
        {
            var gray1 = new Mat();
            var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            var surf = new SURF(500, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            var descriptors1 = new MatOfFloat();
            var descriptors2 = new MatOfFloat();
            surf.Run(gray1, null, out keypoints1, descriptors1);
            surf.Run(gray2, null, out keypoints2, descriptors2);

            // Match descriptor vectors 
            var bfMatcher = new BFMatcher(NormType.L2, false);
            var flannMatcher = new FlannBasedMatcher();
            DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            var bfView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            var flannView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SURF matching (by BFMather)", WindowMode.AutoSize, bfView))
            using (new Window("SURF matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView))
            {
                Cv2.WaitKey();
            }
        }
        // Initialises the matcher
        private void InitMatcher()
        {
            LinearIndexParams ip = new LinearIndexParams();
            SearchParams      sp = new SearchParams();

            matcher = new FlannBasedMatcher(ip, sp);
        }
Ejemplo n.º 12
0
        public void Run(Mat gray1, Mat gray2, Mat dst, int hessianThreshold, bool useBFMatcher)
        {
            var surf = SURF.Create(hessianThreshold, 4, 2, true);

            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new Mat();
            var        descriptors2 = new Mat();

            surf.DetectAndCompute(gray1, null, out keypoints1, descriptors1);
            surf.DetectAndCompute(gray2, null, out keypoints2, descriptors2);

            if (useBFMatcher)
            {
                if (descriptors1.Rows > 0 && descriptors2.Rows > 0) // occasionally there is nothing to match!
                {
                    var      bfMatcher = new BFMatcher(NormTypes.L2, false);
                    DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2);
                    Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, dst);
                }
            }
            else
            {
                var flannMatcher = new FlannBasedMatcher();
                if (descriptors1.Width > 0 && descriptors2.Width > 0)
                {
                    DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);
                    Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, dst);
                }
            }
        }
Ejemplo n.º 13
0
 public void New5()
 {
     using var ip = new LinearIndexParams();
     using var sp = new SearchParams();
     using (var descriptorExtractor = KAZE.Create())
         using (var descriptorMatcher = new FlannBasedMatcher(ip, sp))
             using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
 }
        public void New5()
        {
            var descriptorExtractor = KAZE.Create();
            LinearIndexParams ip    = new LinearIndexParams();
            SearchParams      sp    = new SearchParams();
            var descriptorMatcher   = new FlannBasedMatcher(ip, sp);

            new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher);
        }
 /// <summary>
 /// Creates a traffic sign
 /// </summary>
 /// <param name="Image"> image of the sign </param>
 public TrafficSign(Image <Bgr, byte> Image)
 {
     ImageOriginal = Image;
     Emgu.CV.CvEnum.Inter inter = Image.Height > SignHeight ? Emgu.CV.CvEnum.Inter.Area : Emgu.CV.CvEnum.Inter.Cubic;
     ImageGray   = Image.Convert <Gray, byte>().Resize(int.MaxValue, SignHeight, inter, true);
     Name        = null;
     matcher     = null;
     IsKnownSign = false;
 }
Ejemplo n.º 16
0
 public void New2Flann()
 {
     using (var descriptorExtractor = SURF.Create(100))
         using (var descriptorMatcher = new FlannBasedMatcher())
         {
             using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
             using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
         }
 }
        public void New4()
        {
            LinearIndexParams ip = new LinearIndexParams();
            SearchParams      sp = new SearchParams();

            using (var descriptorExtractor = SURF.Create(100))
                using (var descriptorMatcher = new FlannBasedMatcher(ip, sp))
                    using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
        }
Ejemplo n.º 18
0
        public static int FindMatch2(Mat modelImage, TemplateContainer.ImageData template, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            Stopwatch watch;

            homography = null;
            BriefDescriptorExtractor descriptor = new BriefDescriptorExtractor();

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
            //using (UMat uObservedImage = template.image.Mat.GetUMat(AccessType.Read))
            {
                //extract features from the object image
                Mat modelDescriptors = new Mat();
                featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                watch = Stopwatch.StartNew();

                // extract features from the observed image

                //featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                observedKeyPoints   = template.keyPointsSurf;
                observedDescriptors = template.descriptorSurf;
                // Bruteforce, slower but more accurate
                // You can use KDTree for faster matching with slight loss in accuracy
                using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                    using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                        using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                        {
                            matcher.Add(modelDescriptors);

                            matcher.KnnMatch(observedDescriptors, matches, k, null);
                            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                            nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 10)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                           matches, mask, 1.8, 18);
                                if (nonZeroCount >= 12)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                          observedKeyPoints, matches, mask, 2);
                                }
                            }
                        }
                watch.Stop();
            }
            matchTime = watch.ElapsedMilliseconds;
            return(nonZeroCount);
        }
Ejemplo n.º 19
0
        public void ShowKeyPoints()
        {
            lstMat.Clear();
            lstModelDescriptors.Clear();
            var featureDetector = new SIFT();

            Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams();
            Emgu.CV.Flann.SearchParams      sp = new SearchParams();
            DescriptorMatcher matcher          = new FlannBasedMatcher(ip, sp);
            Rectangle         cropRect         = new Rectangle(842, 646, 70, 70);
            Mat mask = new Mat(new Size(70, 70), DepthType.Cv8U, 1);

            CvInvoke.Rectangle(mask, new Rectangle(0, 0, 70, 70), new MCvScalar(255, 255, 255), -1);
            CvInvoke.Circle(mask, new Point(35, 37), 22, new MCvScalar(0, 0, 0), -1);


            lstMat.Add(mask);
            String[] folders = { @"Linage2\Main\PartyAuto", @"Linage2\Main\PartyManual" };
            foreach (String folder in folders)
            {
                DirectoryInfo imageFolder = new DirectoryInfo(folder);
                FileInfo[]    files       = Utils.GetFilesByExtensions(imageFolder, ".jpg", ".png").ToArray();
                foreach (FileInfo finfo in files)
                {
                    Mat img  = CvInvoke.Imread(finfo.FullName, ImreadModes.Color);
                    Mat crop = CVUtil.crop_color_frame(img, cropRect);
                    //lstMat.Add(crop);
                    VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint();
                    Mat modelDescriptors            = new Mat();
                    featureDetector.DetectAndCompute(crop, mask, modelKeyPoints, modelDescriptors, false);
                    lstModelDescriptors.Add(modelDescriptors);
                    Mat result = new Mat();
                    Features2DToolbox.DrawKeypoints(crop, modelKeyPoints, result, new Bgr(Color.Red));

                    lstMat.Add(result);
                    //BOWImgDescriptorExtractor bow = new BOWImgDescriptorExtractor(featureDetector, matcher);
                }
            }


            /*BOWKMeansTrainer bowtrainer = new BOWKMeansTrainer(1000, new MCvTermCriteria(10, 0.001), 1, Emgu.CV.CvEnum.KMeansInitType.PPCenters);
             * foreach (Mat m in lstModelDescriptors) {
             *  bowtrainer.Add(m);
             * }
             * Mat dict = new Mat();
             * bowtrainer.Cluster();
             * StringBuilder sb = new StringBuilder();
             * Image<Bgr, Byte> imgsave = dict.ToImage<Bgr, Byte>();
             *
             * (new XmlSerializer(typeof(Image<Bgr, Byte>))).Serialize(new StringWriter(sb), imgsave);
             * Console.WriteLine(sb.ToString());*/
        }
Ejemplo n.º 20
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="observedKeyPoints"></param>
        /// <param name="modelKeyPoint"></param>
        /// <returns></returns>
        public static VectorOfPoint Matching(VectorOfKeyPoint observedKeyPoints, VectorOfKeyPoint modelKeyPoints, Mat observedDescriptors, Mat modelDescriptors, Size modelSize)
        {
            KAZE featureDetector = new KAZE();

            using (var ip = new LinearIndexParams())
                using (var sp = new SearchParams())
                    using (var matcher = new FlannBasedMatcher(ip, sp))
                        using (var matches = new VectorOfVectorOfDMatch())
                        {
                            Mat homography = new Mat();
                            matcher.Add(modelDescriptors);

                            matcher.KnnMatch(observedDescriptors, matches, 2, null);
                            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, 0.80, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                }
                            }

                            if (homography != null && !homography.Size.IsEmpty)
                            {
                                Rectangle rect = new Rectangle(Point.Empty, modelSize);
                                PointF[]  pts  = new PointF[]
                                {
                                    new PointF(rect.Left, rect.Bottom),
                                    new PointF(rect.Right, rect.Bottom),
                                    new PointF(rect.Right, rect.Top),
                                    new PointF(rect.Left, rect.Top)
                                };

                                pts = CvInvoke.PerspectiveTransform(pts, homography);

                                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);

                                return(new VectorOfPoint(points));
                            }
                            else
                            {
                                return(new VectorOfPoint());
                            }
                        }
        }
Ejemplo n.º 21
0
        public static List <System.Drawing.Point> func(Bitmap bitmap1, Bitmap bitmap2)
        {
            //Mat img1 = new Mat(@"roll/0.png", ImreadModes.Unchanged);
            //Mat img2 = new Mat(@"roll/1.png", ImreadModes.Unchanged);
            Mat  img1 = BitmapToMat(bitmap1);
            Mat  img2 = BitmapToMat(bitmap2);
            SIFT sift = SIFT.Create(20);

            //KeyPoint[] k = sift.Detect(img1);
            // Detect the keypoints and generate their descriptors using SIFT
            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new Mat <float>();
            var        descriptors2 = new Mat <float>();

            sift.DetectAndCompute(img1, null, out keypoints1, descriptors1);
            sift.DetectAndCompute(img2, null, out keypoints2, descriptors2);

            // Match descriptor vectors
            var bfMatcher    = new BFMatcher(NormTypes.L2, false);
            var flannMatcher = new FlannBasedMatcher();

            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            var bfView = new Mat();

            Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, bfMatches, bfView);
            var flannView = new Mat();

            Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, flannMatches, flannView);

            using (new Window("SIFT matching (by BFMather)", bfView))
                using (new Window("SIFT matching (by FlannBasedMatcher)", flannView))
                {
                    Cv2.WaitKey();
                }
            List <System.Drawing.Point> points = new List <System.Drawing.Point>();

            foreach (DMatch match in bfMatches)
            {
                System.Drawing.Point p = new System.Drawing.Point();
                p.X = (int)(keypoints1[match.QueryIdx].Pt.X - keypoints2[match.TrainIdx].Pt.X);
                p.Y = (int)(keypoints1[match.QueryIdx].Pt.Y - keypoints2[match.TrainIdx].Pt.Y);
                points.Add(p);
            }

            return(points);
        }
Ejemplo n.º 22
0
        public TestSURF()
        {
            //Constructor
            //생성자에서 필요한 배열들을 선언

            t_keypoints  = new KeyPoint[10000];
            t_descriptor = new Mat();

            surfobj = new SURF();
            surfobj.HessianThreshold = 400;


            fm          = new FlannBasedMatcher();
            isComputing = false;
        }
Ejemplo n.º 23
0
        /// <summary>
        /// Initialize camera input, frame window and other image objects required.
        /// This is done after getting the settings of the tracker object of this class.
        /// </summary>
        private void InitializeCamera()
        {
            // Intialize camera
            try
            {
                //videoInput = new VideoInput();
                capture           = new CvCapture(CaptureDevice.Any, deviceID);
                DefaultBrightness = (int)capture.Brightness;
            }
            catch (Exception exception)
            {
                System.Windows.MessageBox.Show("Failed to initialize the camera, the program will be closed." +
                                               "\n\nThis is the internal error:\n" + exception.Message, "Notify", System.Windows.MessageBoxButton.OK, System.Windows.MessageBoxImage.Information);
                System.Diagnostics.Process.GetCurrentProcess().Kill();
            }

            // small frame to decrease computational complexity
            size = new CvSize(320, 240);

            capture.SetCaptureProperty(CaptureProperty.FrameHeight, size.Height);
            capture.SetCaptureProperty(CaptureProperty.FrameWidth, size.Width);
            capture.SetCaptureProperty(CaptureProperty.FrameCount, 15);

            frame1            = new IplImage(size, BitDepth.U8, 3);
            frame2            = new IplImage(size, BitDepth.U8, 3);
            grayFrame1        = new IplImage(size, BitDepth.U8, 1);
            grayFrame2        = new IplImage(size, BitDepth.U8, 1);
            transformedFrame  = new IplImage(size, BitDepth.U8, 1);
            sift              = new SIFT();
            surf              = new SURF(500, 4, 2, true);
            bruteForceMatcher = new BFMatcher(NormType.L2, false);
            flannBasedMatcher = new FlannBasedMatcher();

            // windows to view what's going on
            window1 = new CvWindow("Camera 1", WindowMode.KeepRatio);
            window1.Resize(size.Width, size.Height);
            window1.Move(screenWidth - 17 - 2 * size.Width, 20);

            window2 = new CvWindow("Camera 2", WindowMode.KeepRatio);
            window2.Resize(size.Width, size.Height);
            window2.Move(screenWidth - 20 - 1 * size.Width, 20);

            window3 = new CvWindow("Result", WindowMode.KeepRatio);
            window3.Resize(size.Width * 2, size.Height);
            window3.Move(screenWidth - 20 - 2 * size.Width, 20 + size.Height);
        }
Ejemplo n.º 24
0
        private DescriptorMatcher InitMatcher()
        {
            FlannBasedMatcher matcher = new FlannBasedMatcher(new OpenCvSharp.Flann.KDTreeIndexParams(4), new OpenCvSharp.Flann.SearchParams(50));

            int current = 0;

            foreach (var item in _indexedDatset)
            {
                Mat itemFeatures = new Mat(item.Rows, item.Cols, item.MType);
                itemFeatures.SetArray(0, 0, item.Features);
                Mat tempItem = itemFeatures;

                itemFeatures.ConvertTo(itemFeatures, MatType.CV_32F);
                matcher.Add(new Mat[] { itemFeatures });
                item.index = current++;
            }

            return(matcher);
        }
Ejemplo n.º 25
0
        public void FindMatches(Image <Rgb, byte> SubMap, out VectorOfKeyPoint VectorSubMapKeyPoint,
                                out Mat SubMapDiscriptors, out VectorOfVectorOfDMatch matches,
                                out Mat mask, out System.Drawing.Rectangle zone, out Mat homography, int k, double uniquenessThreshold, SIFTParametrs parametrs)
        {
            VectorSubMapKeyPoint = new VectorOfKeyPoint();
            SubMapDiscriptors    = new Mat();
            matches = new VectorOfVectorOfDMatch();
            zone    = new System.Drawing.Rectangle();
            using (SIFT siftCPU = new SIFT(parametrs.nFeatures, parametrs.nOctaveLayers,
                                           parametrs.contrastThreshold, parametrs.edgeThreshold, parametrs.sigma))
            {
                siftCPU.DetectAndCompute(SubMap, null, VectorSubMapKeyPoint, SubMapDiscriptors, false);
            }
            matches = new VectorOfVectorOfDMatch();
            using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                    using (Emgu.CV.Features2D.DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                    {
                        matcher.Add(SubMapDiscriptors);
                        matcher.KnnMatch(MapDiscriptors, matches, k, null);
                    }

            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            homography = null;

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(VectorSubMapKeyPoint, VectorMapKeyPoint,
                                                                           matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(
                        VectorSubMapKeyPoint, VectorMapKeyPoint, matches, mask, 2);
                }
            }
        }
Ejemplo n.º 26
0
        public VectorOfVectorOfDMatch GetMatchesForModel(SurfData scene, SurfData model)
        {
            using (FlannBasedMatcher matcher =
                       new FlannBasedMatcher(_hierarchicalParams, _searchParams))
            {
                matcher.Add(model.Descriptors);

                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
                matcher.KnnMatch(scene.Descriptors, matches, _kConstant, null);

                MDMatch[][] newMatches = matches
                                         .ToArrayOfArray()
                                         .OrderBy(m => m[0].Distance)
                                         .Take(_matchLimit)
                                         .ToArray();

                VectorOfVectorOfDMatch limitMatches = new VectorOfVectorOfDMatch(newMatches);
                matches.Dispose();
                return(limitMatches);
            }
        }
Ejemplo n.º 27
0
        private void MatchBySurf(Mat src1, Mat src2)
        {
            var gray1 = new Mat();
            var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            var surf = new SURF(500, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new MatOfFloat();
            var        descriptors2 = new MatOfFloat();

            surf.Run(gray1, null, out keypoints1, descriptors1);
            surf.Run(gray2, null, out keypoints2, descriptors2);

            // Match descriptor vectors
            var bfMatcher    = new BFMatcher(NormType.L2, false);
            var flannMatcher = new FlannBasedMatcher();

            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            var bfView = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            var flannView = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SURF matching (by BFMather)", WindowMode.AutoSize, bfView))
                using (new Window("SURF matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView))
                {
                    Cv2.WaitKey();
                }
        }
Ejemplo n.º 28
0
        private void find(Mat modelImage, out VectorOfKeyPoint modelKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.70;

            homography = null;

            VectorOfKeyPoint currentKeyPoints = new VectorOfKeyPoint();
            Mat currentDescriptors            = new Mat();

            detector.DetectAndCompute(current, null, currentKeyPoints, currentDescriptors, false);

            modelKeyPoints = new VectorOfKeyPoint();
            Mat modelDescriptors = new Mat();

            detector.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);

            LinearIndexParams ip      = new LinearIndexParams();
            SearchParams      sp      = new SearchParams();
            DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp);

            matcher.Add(modelDescriptors);

            matcher.KnnMatch(currentDescriptors, matches, k, null);
            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, currentKeyPoints, matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, currentKeyPoints, matches, mask, 2);
                }
            }
        }
Ejemplo n.º 29
0
        private static VectorOfVectorOfDMatch GetSceneMatchesForModel(SURFData sceneData, SURFData modelData)
        {
            FlannBasedMatcher matcher =
                new FlannBasedMatcher(new HierarchicalClusteringIndexParams(), new SearchParams());

            matcher.Add(modelData.Descriptors);

            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            matcher.KnnMatch(sceneData.Descriptors, matches, 1, null);

            MDMatch[][] newMatches = matches
                                     .ToArrayOfArray()
                                     .OrderBy(m => m[0].Distance)
                                     .Take(8)
                                     .ToArray();

            VectorOfVectorOfDMatch limitMatches = new VectorOfVectorOfDMatch(newMatches);

            matches.Dispose();
            return(limitMatches);
        }
Ejemplo n.º 30
0
        public void siftcharacterors(Mat src1, Mat src2)
        {
            Mat gray1 = new Mat();
            Mat gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY);

            var siftdemo = SIFT.Create();

            //寻找特征点
            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new MatOfFloat();
            var        descriptors2 = new MatOfFloat();

            siftdemo.DetectAndCompute(gray1, null, out keypoints1, descriptors1);
            siftdemo.DetectAndCompute(gray2, null, out keypoints2, descriptors2);

            var bfMatcher    = new BFMatcher(NormTypes.L2, false);
            var flannMatcher = new FlannBasedMatcher();

            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            var bfView = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            var flannView = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SIFT matching (by BFMather)", WindowMode.AutoSize, bfView))
            //using (new Window("SIFT matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView))
            {
                Cv2.WaitKey();
            }
        }
Ejemplo n.º 31
0
        private void testToolStripMenuItem_Click(object sender, EventArgs e)
        {
            lstMat.Clear();
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
            Mat testImage  = CvInvoke.Imread(@"Linage2\Main\PartyAuto\2e35av2fwbk.png", ImreadModes.Color);
            Mat modelImage = CVUtil.crop_color_frame(testImage, new Rectangle(842, 646, 70, 70));

            log(modelImage.ToString());
            Image <Bgr, Byte> img = modelImage.ToImage <Bgr, Byte>();

            CvInvoke.cvSetImageROI(img, new Rectangle(0, 0, 35, 35));

            //UMat uModelImage = modelImage.GetUMat(AccessType.Read);
            var featureDetector             = new SIFT();
            Mat modelDescriptors            = new Mat();
            VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint();

            VectorOfKeyPoint observedKeyPoints = new VectorOfKeyPoint();

            featureDetector.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);
            log("model size = " + modelKeyPoints.Size);
            Mat observedDescriptors = new Mat();

            featureDetector.DetectAndCompute(testImage, null, observedKeyPoints, observedDescriptors, false);

            int    k = 2;
            double uniquenessThreshold = 0.80;
            Mat    mask;
            Mat    homography = null;

            // Bruteforce, slower but more accurate
            // You can use KDTree for faster matching with slight loss in accuracy
            using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                    using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                    {
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                        int nonZeroCount = CvInvoke.CountNonZero(mask);
                        if (nonZeroCount >= 4)
                        {
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                       matches, mask, 1.5, 20);
                            if (nonZeroCount >= 4)
                            {
                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                PointF[] src =
                                {
                                    new PointF(0, 0), new PointF(0, modelImage.Height - 1), new PointF(modelImage.Width - 1, modelImage.Height - 1), new PointF(modelImage.Width - 1, 0)
                                };
                                PointF[] points = CvInvoke.PerspectiveTransform(src, homography);
                                foreach (var p in points)
                                {
                                    Console.WriteLine(p.ToString());
                                }
                                Point[] ap = Array.ConvertAll(points,
                                                              new Converter <PointF, Point>(CVUtil.PointFToPoint));

                                CvInvoke.Polylines(testImage, ap, true, new MCvScalar(255, 0, 0));
                                CvInvoke.Rectangle(testImage, new Rectangle(0, 0, 100, 100), new MCvScalar(255, 255, 0));
                                CvInvoke.Circle(testImage, new Point(100, 100), 50, new MCvScalar(255, 255, 0), -1);
                                lstMat.Add(testImage);
                            }
                            //Mat modelMatches = new Mat();
                            //Features2DToolbox.DrawKeypoints(modelImage, modelKeyPoints, result, new Bgr(Color.Red));
                            //Features2DToolbox.DrawKeypoints(testImage, observedKeyPoints, result, new Bgr(Color.Red));
                            //Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, testImage, observedKeyPoints, matches, modelMatches,
                            //    new MCvScalar(255, 0, 0), new MCvScalar(0, 255, 0));
                            //lstMat.Add(modelMatches);

                            //Mat model1 = new Mat();
                            //Features2DToolbox.DrawKeypoints(modelImage, modelKeyPoints, model1, new Bgr(Color.Red));
                            //lstMat.Add(model1);
                            //modelMatches = crop_color_frame(testImage,new Rectangle(842,646,70,70));
                        }
                    }
            log("Done " + mask.Size);

            Refresh();
        }