コード例 #1
1
        static void Main(string[] args)
        {
            var img1 = new Mat(@"..\..\Images\left.png", LoadMode.GrayScale);
            Cv2.ImShow("Left", img1);
            Cv2.WaitKey(1); // do events

            var img2 = new Mat(@"..\..\Images\right.png", LoadMode.GrayScale);
            Cv2.ImShow("Right", img2);
            Cv2.WaitKey(1); // do events

            // detecting keypoints
            // FastFeatureDetector, StarDetector, SIFT, SURF, ORB, BRISK, MSER, GFTTDetector, DenseFeatureDetector, SimpleBlobDetector
            // SURF = Speeded Up Robust Features
            var detector = new SURF(hessianThreshold: 400); //A good default value could be from 300 to 500, depending from the image contrast.
            var keypoints1 = detector.Detect(img1);
            var keypoints2 = detector.Detect(img2);

            // computing descriptors, BRIEF, FREAK
            // BRIEF = Binary Robust Independent Elementary Features
            var extractor = new BriefDescriptorExtractor();
            var descriptors1 = new Mat();
            var descriptors2 = new Mat();
            extractor.Compute(img1, ref keypoints1, descriptors1);
            extractor.Compute(img2, ref keypoints2, descriptors2);

            // matching descriptors
            var matcher = new BFMatcher();
            var matches = matcher.Match(descriptors1, descriptors2);

            // drawing the results
            var imgMatches = new Mat();
            Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches);
            Cv2.ImShow("Matches", imgMatches);
            Cv2.WaitKey(1); // do events

            Cv2.WaitKey(0);

            Cv2.DestroyAllWindows();
            img1.Dispose();
            img2.Dispose();
        }
コード例 #2
0
        private void FrmMain_Shown(object sender, EventArgs e)
        {
            this.list = Ptma.LoadFromPath("config.xml");
            for (int i = 0; i < list.Count; i++)
            {
                TrainedTemplate trainedTemplate = new TrainedTemplate();
                trainedTemplate.templateImage = Cv2.ImRead(list[i].imageSrc, OpenCvSharp.LoadMode.Color);

                SURF featureDetector = new SURF();
                //获取模板图的特征点
                KeyPoint[] templateKeyPoints = featureDetector.Detect(trainedTemplate.templateImage);
                //提取模板图的特征点
                Mat  templateDescriptors = new Mat(trainedTemplate.templateImage.Rows, trainedTemplate.templateImage.Cols, trainedTemplate.templateImage.Type());
                SURF descriptorExtractor = new SURF();
                descriptorExtractor.Compute(trainedTemplate.templateImage, ref templateKeyPoints, templateDescriptors);
                trainedTemplate.templateDescriptors = templateDescriptors;
                trainedTemplate.templateKeyPoints   = templateKeyPoints;
                this.toolList.Add(trainedTemplate);
            }
            this.dgvMain.DataSource = this.list;

            Thread bgThread = new Thread(CaptureAndAnalyse);

            bgThread.IsBackground = true;
            bgThread.Start();
        }
コード例 #3
0
        private void MatchBySurf(Mat src1, Mat src2)
        {
            Mat gray1 = new Mat();
            Mat gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            SURF surf = new SURF(500, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            MatOfFloat descriptors1 = new MatOfFloat();
            MatOfFloat descriptors2 = new MatOfFloat();
            surf.Run(gray1, null, out keypoints1, descriptors1);
            surf.Run(gray2, null, out keypoints2, descriptors2);

            // Matching descriptor vectors with a brute force matcher
            BFMatcher matcher = new BFMatcher(NormType.L2, false);
            DMatch[] matches = matcher.Match(descriptors1, descriptors2);

            // Draw matches
            Mat view = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, matches, view);

            using (new Window("SURF matching", WindowMode.AutoSize, view))
            {
                Cv2.WaitKey();
            }
        }
コード例 #4
0
        private VectorOfPoint _octagon;             //Искомая область

        /// <summary>
        /// Конструктор.
        /// </summary>
        /// <param name="brickSingModel">Обрабатываемое изображение. Принимается Image<Bgr, Byte></param>
        public SingDetectorMethodCanny(IInputArray brickSingModel)
        {
            _detector = new SURF(500);

            using (Mat redMask = new Mat())
            {
                GetRedPixelMask(brickSingModel, redMask);
                _modelKeypoints   = new VectorOfKeyPoint();
                _modelDescriptors = new Mat();
                _detector.DetectAndCompute(redMask, null, _modelKeypoints, _modelDescriptors, false);
                if (_modelKeypoints.Size == 0)
                {
                    //throw new Exception("Изображение для обработки не загружено");
                }
            }

            _modelDescriptorMatcher = new BFMatcher(DistanceType.L2);
            _modelDescriptorMatcher.Add(_modelDescriptors);

            _octagon = new VectorOfPoint(
                new Point[] {
                new Point(1, 0),
                new Point(2, 0),
                new Point(3, 1),
                new Point(3, 2),
                new Point(2, 3),
                new Point(1, 3),
                new Point(0, 2),
                new Point(0, 1)
            });
        }
コード例 #5
0
        /// <summary>
        /// Generate a SignMatcher with custom path
        /// </summary>
        /// <param name="knownSignsPath"> Folder with known signs </param>
        public SignMatcher(string knownSignsPath)
        {
            KnownSignsPath = knownSignsPath;

            // Upright-SURF to speedup the process with extended descriptors
            detector = new SURF(500, 4, 3, true, true);
        }
コード例 #6
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;
            double hessianThresh       = 100;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    SURF surfCPU = new SURF(hessianThresh);
                    SIFT siftCPU = new SIFT();


                    //extract features from the object image
                    UMat modelDescriptors = new UMat();

                    //surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    siftCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    UMat observedDescriptors = new UMat();

                    //surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    siftCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));

                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                    //Features2DToolbox.VoteForUniqueness(matches, 1, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                   matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                  observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
コード例 #7
0
ファイル: SiftSurfSample.cs プロジェクト: 0sv/opencvsharp
        private void MatchBySurf(Mat src1, Mat src2)
        {
            var gray1 = new Mat();
            var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            var surf = new SURF(500, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            var descriptors1 = new MatOfFloat();
            var descriptors2 = new MatOfFloat();
            surf.Run(gray1, null, out keypoints1, descriptors1);
            surf.Run(gray2, null, out keypoints2, descriptors2);

            // Match descriptor vectors 
            var bfMatcher = new BFMatcher(NormType.L2, false);
            var flannMatcher = new FlannBasedMatcher();
            DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            var bfView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            var flannView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SURF matching (by BFMather)", WindowMode.AutoSize, bfView))
            using (new Window("SURF matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView))
            {
                Cv2.WaitKey();
            }
        }
コード例 #8
0
        private void MatchBySurf(Mat src1, Mat src2)
        {
            Mat gray1 = new Mat();
            Mat gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            SURF surf = new SURF(500, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            MatOfFloat descriptors1 = new MatOfFloat();
            MatOfFloat descriptors2 = new MatOfFloat();

            surf.Run(gray1, null, out keypoints1, descriptors1);
            surf.Run(gray2, null, out keypoints2, descriptors2);

            // Matching descriptor vectors with a brute force matcher
            BFMatcher matcher = new BFMatcher(NormType.L2, false);

            DMatch[] matches = matcher.Match(descriptors1, descriptors2);

            // Draw matches
            Mat view = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, matches, view);

            using (new Window("SURF matching", WindowMode.AutoSize, view))
            {
                Cv2.WaitKey();
            }
        }
コード例 #9
0
        public void TestSURF()
        {
            SURF detector = new SURF(500);

            //ParamDef[] parameters = detector.GetParams();
            EmguAssert.IsTrue(TestFeature2DTracker(detector, detector), "Unable to find homography matrix");
        }
コード例 #10
0
ファイル: SurfAndSift.cs プロジェクト: pathi/OpenCVB
        public void Run(Mat gray1, Mat gray2, Mat dst, int hessianThreshold, bool useBFMatcher)
        {
            var surf = SURF.Create(hessianThreshold, 4, 2, true);

            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new Mat();
            var        descriptors2 = new Mat();

            surf.DetectAndCompute(gray1, null, out keypoints1, descriptors1);
            surf.DetectAndCompute(gray2, null, out keypoints2, descriptors2);

            if (useBFMatcher)
            {
                if (descriptors1.Rows > 0 && descriptors2.Rows > 0) // occasionally there is nothing to match!
                {
                    var      bfMatcher = new BFMatcher(NormTypes.L2, false);
                    DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2);
                    Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, dst);
                }
            }
            else
            {
                var flannMatcher = new FlannBasedMatcher();
                if (descriptors1.Width > 0 && descriptors2.Width > 0)
                {
                    DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);
                    Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, dst);
                }
            }
        }
コード例 #11
0
        public void TestLATCH()
        {
            SURF  surf  = new SURF(300);
            LATCH latch = new LATCH();

            EmguAssert.IsTrue(TestFeature2DTracker(surf, latch), "Unable to find homography matrix");
        }
コード例 #12
0
        public StopSignDetector(IInputArray stopSignModel)
        {
            _detector = new SURF(500);
            using (Mat redMask = new Mat())
            {
                GetRedPixelMask(stopSignModel, redMask);
                _modelKeypoints   = new VectorOfKeyPoint();
                _modelDescriptors = new Mat();
                _detector.DetectAndCompute(redMask, null, _modelKeypoints, _modelDescriptors, false);
                if (_modelKeypoints.Size == 0)
                {
                    throw new Exception("No image feature has been found in the stop sign model");
                }
            }

            _modelDescriptorMatcher = new BFMatcher(DistanceType.L2);
            _modelDescriptorMatcher.Add(_modelDescriptors);

            _octagon = new VectorOfPoint(
                new Point[]
            {
                new Point(1, 0),
                new Point(2, 0),
                new Point(3, 1),
                new Point(3, 2),
                new Point(2, 3),
                new Point(1, 3),
                new Point(0, 2),
                new Point(0, 1)
            });
        }
コード例 #13
0
        public void TestDAISY()
        {
            SURF  surf  = new SURF(300);
            DAISY daisy = new DAISY();

            EmguAssert.IsTrue(TestFeature2DTracker(surf, daisy), "Unable to find homography matrix");
        }
コード例 #14
0
        private void MatchBySurf(Mat src1, Mat src2)
        {
            using var gray1 = new Mat();
            using var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY);

            using var surf = SURF.Create(200, 4, 2, true);

            // Detect the keypoints and generate their descriptors using SURF
            KeyPoint[] keypoints1, keypoints2;
            using var descriptors1 = new Mat <float>();
            using var descriptors2 = new Mat <float>();
            surf.DetectAndCompute(gray1, null, out keypoints1, descriptors1);
            surf.DetectAndCompute(gray2, null, out keypoints2, descriptors2);

            // Match descriptor vectors
            using var bfMatcher    = new BFMatcher(NormTypes.L2, false);
            using var flannMatcher = new FlannBasedMatcher();
            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            using var bfView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            using var flannView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SURF matching (by BFMather)", WindowMode.AutoSize, bfView))
                using (new Window("SURF matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView))
                {
                    Cv2.WaitKey();
                }
        }
コード例 #15
0
ファイル: AutoTestFeatures2d.cs プロジェクト: xklg309/emgucv
        public void TestBOWKmeansTrainer()
        {
            Image <Gray, byte> box    = EmguAssert.LoadImage <Gray, byte>("box.png");
            SURF             detector = new SURF(500);
            VectorOfKeyPoint kpts     = new VectorOfKeyPoint();
            Mat descriptors           = new Mat();

            detector.DetectAndCompute(box, null, kpts, descriptors, false);

            BOWKMeansTrainer trainer = new BOWKMeansTrainer(100, new MCvTermCriteria(), 3, CvEnum.KMeansInitType.PPCenters);

            trainer.Add(descriptors);
            Mat vocabulary = new Mat();

            trainer.Cluster(vocabulary);

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            BOWImgDescriptorExtractor extractor = new BOWImgDescriptorExtractor(detector, matcher);

            extractor.SetVocabulary(vocabulary);

            Mat descriptors2 = new Mat();

            extractor.Compute(box, kpts, descriptors2);
        }
コード例 #16
0
 private void FindAndDrawMatches()
 {
     using (var template = new Mat("Images\\Circle_Template.bmp", ImreadModes.GrayScale))
     {
         using (var surf = SURF.Create(1000))
         {
             using (var templateDescriptors = new Mat())
             {
                 surf.DetectAndCompute(template, null, out KeyPoint[] templateKeyPoints, templateDescriptors);
                 using (var image = new Mat("Images\\Circle.bmp", ImreadModes.GrayScale))
                 {
                     using (var imageDescriptors = new Mat())
                     {
                         surf.DetectAndCompute(image, null, out KeyPoint[] imageKeyPoints, imageDescriptors);
                         using (var matcher = new BFMatcher())
                         {
                             var matches = matcher.Match(imageDescriptors, templateDescriptors);
                             using (var overLay = image.Overlay())
                             {
                                 Cv2.DrawMatches(image, imageKeyPoints, template, templateKeyPoints, matches, overLay);
                                 this.Result.Source = overLay.ToBitmapSource();
                             }
                         }
                     }
                 }
             }
         }
     }
 }
コード例 #17
0
        private void FindAndDrawHomo()
        {
            using (var template = new Mat("Images\\Circle_Template.bmp", ImreadModes.Color))
            {
                using (var surf = SURF.Create(1000))
                {
                    using (var templateDescriptors = new Mat())
                    {
                        surf.DetectAndCompute(template, null, out KeyPoint[] templateKeyPoints, templateDescriptors);
                        using (var image = new Mat("Images\\Circle.bmp", ImreadModes.Color))
                        {
                            using (var imageDescriptors = new Mat())
                            {
                                surf.DetectAndCompute(image, null, out KeyPoint[] imageKeyPoints, imageDescriptors);
                                using (var matcher = new BFMatcher())
                                {
                                    var matches     = matcher.Match(imageDescriptors, templateDescriptors);
                                    var goodMatches = matches;//.Where(m => m.Distance < 0.2).ToArray();
                                    using (var srcPoints = InputArray.Create(goodMatches.Select(m => templateKeyPoints[m.TrainIdx].Pt)))
                                    {
                                        using (var dstPoints = InputArray.Create(goodMatches.Select(m => imageKeyPoints[m.QueryIdx].Pt)))
                                        {
                                            using (var h**o = Cv2.FindHomography(srcPoints, dstPoints, HomographyMethods.Rho))
                                            {
                                                ////using (var overlay = image.Overlay())
                                                ////{
                                                ////    DrawBox(template, h**o, overlay);
                                                ////    this.Result.Source = overlay.ToBitmapSource();
                                                ////}

                                                using (var tmp = image.Overlay())
                                                {
                                                    Cv2.BitwiseNot(template, template);
                                                    Cv2.WarpPerspective(template, tmp, h**o, tmp.Size());
                                                    using (var overlay = tmp.Overlay())
                                                    {
                                                        for (var r = 0; r < tmp.Rows; r++)
                                                        {
                                                            for (var c = 0; c < tmp.Cols; c++)
                                                            {
                                                                overlay.Set(r, c,
                                                                            tmp.At <int>(r, c) == 0
                                                                        ? new Vec4b(0, 0, 0, 0)
                                                                        : new Vec4b(0, 0, 255, 150));
                                                            }
                                                        }

                                                        this.Result.Source = overlay.ToBitmapSource();
                                                    }
                                                }
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
コード例 #18
0
ファイル: test.cs プロジェクト: chencen2000/testMQ
        static void test_svm()
        {
            FileStorage fs   = new FileStorage("test.yaml", FileStorage.Mode.Read);
            FileNode    n    = fs["voca"];
            Mat         voca = new Mat();

            n.ReadMat(voca);

            SURF      surf    = new SURF(400);
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            BOWImgDescriptorExtractor bowDex = new BOWImgDescriptorExtractor(surf, matcher);

            bowDex.SetVocabulary(voca);

            SVM svm = new SVM();

            //FileStorage fs1 = new FileStorage("svm.yaml", FileStorage.Mode.Read);
            svm.Read(fs.GetRoot());

            foreach (string s in System.IO.Directory.GetFiles(@"C:\projects\local\testMQ\testMQ\bin\Debug\icons"))
            {
                Image <Bgr, Byte> test_img = new Image <Bgr, byte>(s);
                //Image<Bgr, Byte> test_img = new Image<Bgr, byte>(@"C:\projects\local\testMQ\testMQ\bin\Debug\mail_samples\email_icon_t.jpg");
                //Image<Bgr, Byte> test_img = new Image<Bgr, byte>(@"C:\projects\local\testMQ\testMQ\bin\Debug\phone_icons\icon_2.jpg");
                //Image<Bgr, Byte> test_img = new Image<Bgr, byte>(@"C:\test\35928233-email-icon-on-blue-background-clean-vector.jpg");
                Mat ii = new Mat();
                CvInvoke.CvtColor(test_img, ii, ColorConversion.Bgr2Gray);
                MKeyPoint[] kp   = surf.Detect(ii);
                Mat         desc = new Mat();
                bowDex.Compute(ii, new VectorOfKeyPoint(kp), desc);
                float r = svm.Predict(desc);
                Program.logIt(string.Format("{0}={1}", s, r));
            }
        }
コード例 #19
0
        public void New2()
        {
            var descriptorExtractor = SURF.Create(100);
            var descriptorMatcher   = new BFMatcher();

            new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher);
        }
コード例 #20
0
        /**
         * Extracts all the important points from a Mat image
         **/
        public static VectorOfKeyPoint extractKeyPoints(Mat image, int minHessian = 400)
        {
            VectorOfKeyPoint res      = new VectorOfKeyPoint();
            SURF             detector = new SURF(minHessian);

            detector.DetectRaw(image, res);
            return(res);
        }
コード例 #21
0
 public void New4()
 {
     using (var ip = new LinearIndexParams())
         using (var sp = new SearchParams())
             using (var descriptorExtractor = SURF.Create(100))
                 using (var descriptorMatcher = new FlannBasedMatcher(ip, sp))
                     using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
 }
コード例 #22
0
        public void SURFDraw(Mat image, Mat testImage)
        {
            VectorOfKeyPoint keyPoint = new VectorOfKeyPoint();
            SURF             surfCPU  = new SURF(500, 4, 2, true, false);

            surfCPU.DetectRaw(image, keyPoint);
            Features2DToolbox.DrawKeypoints(image, keyPoint, testImage, new Bgr(Color.Red), Features2DToolbox.KeypointDrawType.Default);
        }
コード例 #23
0
        public static Tuple <UMat, VectorOfKeyPoint> DetectAndCompute
            (SURF surf, Image <Bgr, byte> image, bool b, IInputArray inputArray = null)
        {
            var keypoints = new VectorOfKeyPoint();
            var desc      = new UMat();

            surf.DetectAndCompute(image, inputArray, keypoints, desc, b);
            return(new Tuple <UMat, VectorOfKeyPoint>(desc, keypoints));
        }
コード例 #24
0
 public void New2Flann()
 {
     using (var descriptorExtractor = SURF.Create(100))
         using (var descriptorMatcher = new FlannBasedMatcher())
         {
             using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
             using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { }
         }
 }
コード例 #25
0
        public void Detect()
        {
            // This parameter should introduce same result of http://opencv.jp/wordpress/wp-content/uploads/lenna_SURF-150x150.png
            using var gray = Image("lenna.png", 0);
            using var surf = SURF.Create(500, 4, 2, true);
            var keyPoints = surf.Detect(gray);

            Console.WriteLine($"KeyPoint has {keyPoints.Length} items.");
        }
コード例 #26
0
        public void New4()
        {
            var descriptorExtractor = SURF.Create(100);
            LinearIndexParams ip    = new LinearIndexParams();
            SearchParams      sp    = new SearchParams();
            var descriptorMatcher   = new FlannBasedMatcher(ip, sp);

            new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher);
        }
コード例 #27
0
        public void TestSURFBlankImage()
        {
            SURF detector          = new SURF(500);
            Image <Gray, Byte> img = new Image <Gray, byte>(1024, 900);
            VectorOfKeyPoint   vp  = new VectorOfKeyPoint();
            Mat descriptors        = new Mat();

            detector.DetectAndCompute(img, null, vp, descriptors, false);
        }
コード例 #28
0
        /// <summary>
        /// Generates a SignMatcher with default path
        /// </summary>
        public SignMatcher()
        {
            if (Directory.Exists(@"KnownSigns/"))
            {
                KnownSignsPath = @"KnownSigns/";
            }

            // Upright-SURF to speedup the process with extended descriptors
            detector = new SURF(500, 4, 3, true, true);
        }
コード例 #29
0
        public void DetectAndCompute()
        {
            using (var gray = Image("lenna.png", ImreadModes.Grayscale))
                using (var surf = SURF.Create(500))
                    using (Mat descriptor = new Mat())
                    {
                        surf.DetectAndCompute(gray, null, out var keyPoints, descriptor);

                        Console.WriteLine($"keyPoints has {keyPoints.Length} items.");
                        Console.WriteLine($"descriptor has {descriptor.Rows} items.");
                    }
        }
コード例 #30
0
 public void Compute()
 {
     using (var color = Image("lenna.png", ImreadModes.Color))
         using (var gray = Image("lenna.png", ImreadModes.GrayScale))
             using (var descriptors = new Mat())
                 using (var latch = LATCH.Create())
                     using (var surf = SURF.Create(500))
                     {
                         var keypoints = surf.Detect(gray);
                         latch.Compute(color, ref keypoints, descriptors);
                     }
 }
コード例 #31
0
        static void Main(string[] args)
        {
            var image        = new Image <Bgr, byte>("RGB.jpg").Resize(0.4, Inter.Area);
            var image_gray   = image.Convert <Gray, byte>();
            var surfDetector = new SURF(1000);
            var keyPoints    = surfDetector.Detect(image_gray);

            foreach (var point in keyPoints)
            {
                CvInvoke.Circle(image, new Point((int)point.Point.X, (int)point.Point.Y), 1, new MCvScalar(0, 0, 255, 255), 2);
            }
            CvInvoke.Imshow("result", image);
            CvInvoke.WaitKey();
        }
コード例 #32
0
ファイル: SURFDrawer.cs プロジェクト: anlEl/EMGUCV
 public void Clear()
 {
     k = 2;
     uniquenessThreshold = 0.8;
     hessianThresh       = 300;
     CPU                 = new SURF(hessianThresh);
     matcher             = new BFMatcher(DistanceType.L2);
     descriptor          = new BriefDescriptorExtractor();
     homography          = null;
     matches             = new VectorOfVectorOfDMatch();
     modelDescriptors    = new Mat();
     observedDescriptors = new Mat();
     result              = new Mat();
 }
コード例 #33
0
        private void FillImageSet(List <ImageData> set, string prefix)
        {
            UtilityHelper.refreshDirectory(prefix);
            if (dialog.ShowDialog() == DialogResult.OK)
            {
                var files = Directory.GetFiles(dialog.SelectedPath, "*.dcm");
                foreach (var file in files)
                {
                    var ds      = new DicomImage(file);
                    var dsBones = new DicomImage(file)
                    {
                        WindowWidth  = 100,
                        WindowCenter = 500
                    };
                    var    image        = ds.RenderImage().AsBitmap();
                    var    imageBones   = dsBones.RenderImage().AsBitmap();
                    string newName      = prefix + "/" + Path.GetFileName(file).Replace(".dcm", ".jpg");
                    string newBonesName = prefix + "/" + Path.GetFileName(file).Replace(".dcm", "_bones.jpg");
                    image.Save(newName);
                    imageBones.Save(newBonesName);
                    Feature2D s;
                    switch (algorithm)
                    {
                    case Algo.ORB:
                        s = new ORBDetector();
                        break;

                    case Algo.SURF:
                        s = new SURF(0.8);
                        break;

                    default:
                        s = new SIFT();
                        break;
                    }
                    Mat mat              = CvInvoke.Imread(newBonesName, ImreadModes.Grayscale);
                    Mat matOrig          = CvInvoke.Imread(newName, ImreadModes.Unchanged);
                    var vec              = new VectorOfKeyPoint();
                    Mat modelDescriptors = new Mat();
                    s.DetectAndCompute(mat, null, vec, modelDescriptors, false);
                    ImageData id = new ImageData(matOrig, mat)
                    {
                        KeyPoints   = vec,
                        Descriptors = modelDescriptors
                    };
                    set.Add(id);
                }
            }
        }