Пример #1
0
        private void MatchBySift(Mat src1, Mat src2)
        {
            using var gray1 = new Mat();
            using var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY);
            Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY);

            using var sift = SIFT.Create();

            // Detect the keypoints and generate their descriptors using SIFT
            KeyPoint[] keypoints1, keypoints2;
            using var descriptors1 = new Mat <float>();
            using var descriptors2 = new Mat <float>();
            sift.DetectAndCompute(gray1, null, out keypoints1, descriptors1);
            sift.DetectAndCompute(gray2, null, out keypoints2, descriptors2);

            // Match descriptor vectors
            using var bfMatcher    = new BFMatcher(NormTypes.L2, false);
            using var flannMatcher = new FlannBasedMatcher();
            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            using var bfView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView);
            using var flannView = new Mat();
            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView);

            using (new Window("SIFT matching (by BFMather)", WindowMode.AutoSize, bfView))
                using (new Window("SIFT matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView))
                {
                    Cv2.WaitKey();
                }
        }
        public bool ConfigRecognitionImageTrain(Mat imageTrain, Mat roiTrain, bool useGlobalMatch)
        {
            _trainsImage.Push(imageTrain);

            _keypointsImageTrain.Add(new VectorOfKeyPoint());
            _descriptorsImageTrain.Push(new Mat());

            _LODIndex = _trainsImage.Size - 1;

            SIFT sift = new SIFT();

            //Insere os pontos chaves da imagem alvo na lista de pontos chaves
            _keypointsImageTrain.Insert(_LODIndex, new VectorOfKeyPoint(sift.Detect(_trainsImage[_LODIndex], roiTrain)));
            if (_keypointsImageTrain[_LODIndex] != null && _keypointsImageTrain[_LODIndex].Size < 4)
            {
                return(false);
            }

            //Calcula os descritores dos pontos chaves extraidos, no caso se extrair poucos descritores ele return false = não reconhecido
            sift.Compute(_trainsImage[_LODIndex], _keypointsImageTrain[_LODIndex], _descriptorsImageTrain[_LODIndex]);
            if (_descriptorsImageTrain[_LODIndex].Rows < 4)
            {
                return(false);
            }

            if (useGlobalMatch)
            {
                return(true);
            }
            else
            {
                return(ConfigureImageTrainROI(_keypointsImageTrain[_LODIndex], roiTrain));
            }
        }
Пример #3
0
        public static IDictionary <string, MatOfFloat> CreateHashes(IEnumerable <string> pathes, int thumbSize)
        {
            var hashesDict = new ConcurrentDictionary <string, MatOfFloat>();
            var tasks      = new List <Task>();

            foreach (var path in pathes)
            {
                var task = new Task(() =>
                {
                    var sourceMat = new Mat(path);

                    var scale = (double)thumbSize / Max(sourceMat.Width, sourceMat.Height);
                    sourceMat = sourceMat.Resize(new Size(0, 0), scale, scale, InterpolationFlags.Nearest);
                    var gray  = new Mat();


                    Cv2.CvtColor(sourceMat, gray, ColorConversionCodes.BGR2GRAY);

                    var sift = SIFT.Create();

                    var descriptors = new MatOfFloat();

                    Console.WriteLine("Creating hash for " + path);
                    //var keypoints = sift.Detect(gray).Take(KEYPOINTS_NUMBER).ToArray();
                    //sift.Compute(gray, ref keypoints, descriptors);
                    sift.DetectAndCompute(gray, null, out KeyPoint[] keypoints, descriptors);
                    hashesDict.TryAdd(path, descriptors);
                });
                tasks.Add(task);

                task.Start();
            }
            Task.WaitAll(tasks.ToArray());
            return(hashesDict);
        }
Пример #4
0
        public static void FindMatch(string modelFileName, string observedFileName, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();
            {
                using (UMat uModelImage = CvInvoke.Imread(modelFileName, ImreadModes.Color).GetUMat(AccessType.Read))
                    using (UMat uObservedImage = CvInvoke.Imread(observedFileName, ImreadModes.Color).GetUMat(AccessType.Read))
                    {
                        SIFT sift             = new SIFT();
                        UMat modelDescriptors = new UMat();
                        sift.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                        UMat observedDescriptors = new UMat();
                        sift.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                        BFMatcher matcher = new BFMatcher(DistanceType.L2);
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                    }
            }
        }
Пример #5
0
        void Application_Idle(object sender, EventArgs e)
        {
            if (vc != null && !pause)
            {
                SIFT detector = new SIFT();

                Emgu.CV.Util.VectorOfKeyPoint keypoints = new Emgu.CV.Util.VectorOfKeyPoint();

                vc.Read(frame);
                System.Threading.Thread.Sleep((int)(1000.0 / rate - 5));
                //imageBox1.Image = frame;

                frLbl.Text = rate.ToString();
                cfLbl.Text = currentFrame.ToString();
                fcLbl.Text = frameCount.ToString();

                vc.Read(frame);
                imageBox1.Image = frame;
                //detector.Detect(frame);
                detector.DetectRaw(frame, keypoints);
                numOfKeyPoints = keypoints.Size;
                kpLbl.Text     = numOfKeyPoints.ToString();
                Features2DToolbox.DrawKeypoints(frame, keypoints, siftFrame, new Bgr(Color.Blue));
                imageBox2.Image = siftFrame;
                GC.Collect();

                currentFrame++;

                if (currentFrame >= frameCount)
                {
                    pause           = true;
                    button4.Enabled = false;
                }
            }
        }
Пример #6
0
        public void TestAgast()
        {
            AgastFeatureDetector agast = new AgastFeatureDetector();
            SIFT sift = new SIFT();

            TestFeature2DTracker(agast, sift);
        }
Пример #7
0
 public static Mat GetHomography(Mat mMain, Mat mSecondary)
 {
     KeyPoint[] keypoints  = null;
     KeyPoint[] keypoints2 = null;
     using (SIFT sIFT = SIFT.Create(1000))
     {
         using (Mat mat = new Mat())
         {
             using (Mat mat2 = new Mat())
             {
                 sIFT.DetectAndCompute(mMain, new Mat(), out keypoints, mat);
                 sIFT.DetectAndCompute(mSecondary, new Mat(), out keypoints2, mat2);
                 FlannBasedMatcher flannBasedMatcher = new FlannBasedMatcher();
                 DMatch[]          array             = new DMatch[0];
                 array = flannBasedMatcher.Match(mat, mat2);
                 List <Point2f> list  = new List <Point2f>();
                 List <Point2f> list2 = new List <Point2f>();
                 for (int i = 0; i < array.Length; i++)
                 {
                     list.Add(keypoints[array[i].QueryIdx].Pt);
                     list2.Add(keypoints2[array[i].TrainIdx].Pt);
                 }
                 return(Cv2.FindHomography(InputArray.Create(list2), InputArray.Create(list), HomographyMethods.Ransac));
             }
         }
     }
 }
Пример #8
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;
            double hessianThresh       = 100;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    SURF surfCPU = new SURF(hessianThresh);
                    SIFT siftCPU = new SIFT();


                    //extract features from the object image
                    UMat modelDescriptors = new UMat();

                    //surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    siftCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    UMat observedDescriptors = new UMat();

                    //surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    siftCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));

                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                    //Features2DToolbox.VoteForUniqueness(matches, 1, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                   matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                  observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
Пример #9
0
        static void Main(string[] args)
        {
            for (int i = 0; i < args.Length; i++)
            {
                System.Console.Out.WriteLine(args[i]);
            }
            using (StreamWriter sw = new StreamWriter(tmp_path, false))
            {
                sift = new SIFT();
                GenSIFT(args[0], sw);
                sw.Close();
            }

            Client client = new Client("10.141.211.159", 10021);

            client.SendFile(tmp_path);
            string ans = client.Receive();

            System.Console.Out.WriteLine(ans);
            string[] anss = ans.Split(' ');
            for (int i = 0; i < anss.Length; i++)
            {
                string[] t = anss[i].Split(':');
                System.Console.Out.WriteLine(t[0]);
                TryShowImage(@"LibX\" + t[0]);
            }
            client.Close();
        }
Пример #10
0
        public static List <CriteriaImageModel> CreateCriteriaArrays(FileInfo[] criteriaFiles)
        {
            var criteriaImages = new List <CriteriaImageModel>();

            foreach (var o in criteriaFiles)
            {
                using (var image = CvInvoke.Imread(o.FullName, ImreadModes.Grayscale))
                {
                    var mdlImage = new Mat();
                    CvInvoke.Threshold(image, mdlImage, 127.0, 255.0, ThresholdType.BinaryInv);
                    var uModelImage      = mdlImage.GetUMat(AccessType.Read);
                    var modelDescriptors = new Mat();
                    var modelKeyPoints   = new VectorOfKeyPoint();
                    using (var featureDetector = new SIFT(0, 3, 0.04, 10.0, 1.6))
                    {
                        featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    }
                    criteriaImages.Add(new CriteriaImageModel
                    {
                        Info             = o,
                        Image            = uModelImage,
                        ModelDescriptors = modelDescriptors,
                        ModelKeyPoints   = modelKeyPoints
                    });
                }
            }
            return(criteriaImages);
        }
Пример #11
0
    public KeyPoint[] getKeyPoints(Mat camMat, int nKeyPoints)
    {
        orb = SIFT.Create(nKeyPoints);
        KeyPoint[] keyPoints = orb.Detect(camMat);

        return(keyPoints);
    }
Пример #12
0
        public void Run(Mat gray1, Mat gray2, Mat dst2, bool useBFMatcher, int pointsToMatch)
        {
            var sift = SIFT.Create(pointsToMatch);

            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new Mat();
            var        descriptors2 = new Mat();

            sift.DetectAndCompute(gray1, null, out keypoints1, descriptors1);
            sift.DetectAndCompute(gray2, null, out keypoints2, descriptors2);

            if (useBFMatcher)
            {
                var      bfMatcher = new BFMatcher(NormTypes.L2, false);
                DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2);
                Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, dst2);
            }
            else
            {
                var      flannMatcher = new FlannBasedMatcher();
                DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);
                Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, dst2);
            }
            kp1 = keypoints1;
            kp2 = keypoints2;
        }
Пример #13
0
        private void MatchBySift(Mat src1, Mat src2)
        {
            Mat gray1 = new Mat();
            Mat gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            SIFT sift = new SIFT();

            // Detect the keypoints and generate their descriptors using SIFT
            KeyPoint[] keypoints1, keypoints2;
            MatOfFloat descriptors1 = new MatOfFloat();
            MatOfFloat descriptors2 = new MatOfFloat();

            sift.Run(gray1, null, out keypoints1, descriptors1);
            sift.Run(gray2, null, out keypoints2, descriptors2);

            // Matching descriptor vectors with a brute force matcher
            BFMatcher matcher = new BFMatcher(NormType.L2, false);

            DMatch[] matches = matcher.Match(descriptors1, descriptors2);

            // Draw matches
            Mat view = new Mat();

            Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, matches, view);

            using (new Window("SIFT matching", WindowMode.AutoSize, view))
            {
                Cv2.WaitKey();
            }
        }
Пример #14
0
        public void TestGFTTDetector()
        {
            GFTTDetector keyPointDetector    = new GFTTDetector(1000, 0.01, 1, 3, false, 0.04);
            SIFT         descriptorGenerator = new SIFT();

            //ParamDef[] parameters = keyPointDetector.GetParams();
            TestFeature2DTracker(keyPointDetector, descriptorGenerator);
        }
Пример #15
0
 public void DefaultNorm()
 {
     using (var alg = SIFT.Create())
     {
         var defnorm = alg.DefaultNorm;
         Assert.Equal(4, defnorm);
     }
 }
Пример #16
0
 public void DescriptorType()
 {
     using (var alg = SIFT.Create())
     {
         var dtype = alg.DescriptorType;
         Assert.Equal(MatType.CV_32F, dtype);
     }
 }
Пример #17
0
 public void DescriptorSize()
 {
     using (var alg = SIFT.Create())
     {
         var sz = alg.DescriptorSize;
         Assert.Equal(128, sz);
     }
 }
Пример #18
0
        public void TestMSER()
        {
            MSERDetector keyPointDetector    = new MSERDetector();
            SIFT         descriptorGenerator = new SIFT();

            //ParamDef[] parameters = keyPointDetector.GetParams();
            TestFeature2DTracker(keyPointDetector, descriptorGenerator);
        }
        public void SIFTDraw(Mat image, Mat testImage)
        {
            SIFT             siftCPU  = new SIFT();
            VectorOfKeyPoint keyPoint = new VectorOfKeyPoint();

            siftCPU.DetectRaw(image, keyPoint);

            Features2DToolbox.DrawKeypoints(image, keyPoint, testImage, new Bgr(Color.GreenYellow), Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);
        }
Пример #20
0
        public void Detect()
        {
            KeyPoint[] keyPoints = null;
            using (var gray = Image("lenna.png", 0))
                using (var surf = SIFT.Create(500))
                    keyPoints = surf.Detect(gray);

            Console.WriteLine($"KeyPoint has {keyPoints.Length} items.");
        }
Пример #21
0
        public void Detect()
        {
            KeyPoint[] keyPoints;
            using (var gray = Image("lenna.png", 0))
                using (var alg = SIFT.Create(500))
                    keyPoints = alg.Detect(gray);

            testOutputHelper.WriteLine($"KeyPoint has {keyPoints.Length} items.");
        }
Пример #22
0
        public static Mat Draw(Mat modelImage, Mat observedImage)
        {
            var sift = new SIFT();

            var modelKeyPoints    = new VectorOfKeyPoint();
            var observedKeyPoints = new VectorOfKeyPoint();

            UMat modelDescriptors    = new UMat();
            UMat observedDescriptors = new UMat();

            sift.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);
            sift.DetectAndCompute(observedImage, null, observedKeyPoints, observedDescriptors, false);

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            matcher.Add(modelDescriptors);

            var matches = new VectorOfVectorOfDMatch();

            matcher.KnnMatch(observedDescriptors, matches, 2, null);

            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);

            var homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 10);

            var result = new Mat();

            Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, matches, result,
                                          new MCvScalar(255, 255, 255),
                                          new MCvScalar(0, 0, 0),
                                          mask,
                                          Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);

            PointF[] pts =
            {
                new PointF(rect.Left,  rect.Bottom),
                new PointF(rect.Right, rect.Bottom),
                new PointF(rect.Right, rect.Top),
                new PointF(rect.Left,  rect.Top)
            };
            pts = CvInvoke.PerspectiveTransform(pts, homography);

            Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);
            using (VectorOfPoint vp = new VectorOfPoint(points))
            {
                CvInvoke.Polylines(result, vp, true, new MCvScalar(0, 255, 0, 55), 2);
            }

            return(result);
        }
Пример #23
0
        public void TestStar()
        {
            StarDetector keyPointDetector = new StarDetector();

            //SURF descriptorGenerator = new SURF(500, false);
            SIFT descriptorGenerator = new SIFT();

            //ParamDef[] parameters = keyPointDetector.GetParams();
            TestFeature2DTracker(keyPointDetector, descriptorGenerator);
        }
Пример #24
0
        private static void Run()
        {
            var dm = DescriptorMatcher.Create("BruteForce");

            dm.Clear();

            Console.WriteLine(Cv2.GetCudaEnabledDeviceCount());

            string[] algoNames = Algorithm.GetList();
            Console.WriteLine(String.Join("\n", algoNames));

            SIFT al1 = Algorithm.Create <SIFT>("Feature2D.SIFT");

            string[] ppp = al1.GetParams();
            Console.WriteLine(ppp);
            var    t = al1.ParamType("contrastThreshold");
            double d = al1.GetDouble("contrastThreshold");

            t.ToString();
            d.ToString();

            var src    = new Mat("img/lenna.png");
            var rand   = new Random();
            var memory = new List <long>(100);

            var a1 = new Mat(src, Rect.FromLTRB(0, 0, 30, 40));
            var a2 = new Mat(src, Rect.FromLTRB(0, 0, 30, 40));
            var a3 = new Mat(src, Rect.FromLTRB(0, 0, 30, 40));

            a3.ToString();

            for (long i = 0;; i++)
            {
                SIFT a = Algorithm.Create <SIFT>("Feature2D.SIFT");
                a.ToString();

                for (int j = 0; j < 200; j++)
                {
                    int c1   = rand.Next(100, 400);
                    int c2   = rand.Next(100, 400);
                    Mat temp = src.Row[c1];
                    src.Row[c1] = src.Row[c2];
                    src.Row[c2] = temp;
                }

                memory.Add(MyProcess.WorkingSet64);
                if (memory.Count >= 100)
                {
                    double average = memory.Average();
                    Console.WriteLine("{0:F3}MB", average / 1024.0 / 1024.0);
                    memory.Clear();
                    GC.Collect();
                }
            }
        }
Пример #25
0
 public OriantatioOnMap(Image <Rgb, byte> Map, SIFTParametrs parametrs, double Compression = 4, double Radius = 20)
 {
     this.Map = Map;
     using (SIFT siftCPU = new SIFT(parametrs.nFeatures, parametrs.nOctaveLayers,
                                    parametrs.contrastThreshold, parametrs.edgeThreshold, parametrs.sigma))
     {
         VectorMapKeyPoint = new VectorOfKeyPoint(siftCPU.Detect(Map));
         VectorMapKeyPoint = FilterKeyPoint(VectorMapKeyPoint, Map, Compression, Radius, parametrs);
         siftCPU.Compute(Map, VectorMapKeyPoint, MapDiscriptors);
     }
 }
Пример #26
0
        public void GetData()
        {
            var imPath = "D:\\PATHTOIMAGE\\Original.jpg";
            var image  = Cv2.ImRead(imPath);
            var sift   = new SIFT(100);

            KeyPoint[] keypoints;
            MatOfFloat descriptors = new MatOfFloat();

            sift.Run(image, null, out keypoints, descriptors);
        }
Пример #27
0
        public void ShowKeyPoints()
        {
            lstMat.Clear();
            lstModelDescriptors.Clear();
            var featureDetector = new SIFT();

            Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams();
            Emgu.CV.Flann.SearchParams      sp = new SearchParams();
            DescriptorMatcher matcher          = new FlannBasedMatcher(ip, sp);
            Rectangle         cropRect         = new Rectangle(842, 646, 70, 70);
            Mat mask = new Mat(new Size(70, 70), DepthType.Cv8U, 1);

            CvInvoke.Rectangle(mask, new Rectangle(0, 0, 70, 70), new MCvScalar(255, 255, 255), -1);
            CvInvoke.Circle(mask, new Point(35, 37), 22, new MCvScalar(0, 0, 0), -1);


            lstMat.Add(mask);
            String[] folders = { @"Linage2\Main\PartyAuto", @"Linage2\Main\PartyManual" };
            foreach (String folder in folders)
            {
                DirectoryInfo imageFolder = new DirectoryInfo(folder);
                FileInfo[]    files       = Utils.GetFilesByExtensions(imageFolder, ".jpg", ".png").ToArray();
                foreach (FileInfo finfo in files)
                {
                    Mat img  = CvInvoke.Imread(finfo.FullName, ImreadModes.Color);
                    Mat crop = CVUtil.crop_color_frame(img, cropRect);
                    //lstMat.Add(crop);
                    VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint();
                    Mat modelDescriptors            = new Mat();
                    featureDetector.DetectAndCompute(crop, mask, modelKeyPoints, modelDescriptors, false);
                    lstModelDescriptors.Add(modelDescriptors);
                    Mat result = new Mat();
                    Features2DToolbox.DrawKeypoints(crop, modelKeyPoints, result, new Bgr(Color.Red));

                    lstMat.Add(result);
                    //BOWImgDescriptorExtractor bow = new BOWImgDescriptorExtractor(featureDetector, matcher);
                }
            }


            /*BOWKMeansTrainer bowtrainer = new BOWKMeansTrainer(1000, new MCvTermCriteria(10, 0.001), 1, Emgu.CV.CvEnum.KMeansInitType.PPCenters);
             * foreach (Mat m in lstModelDescriptors) {
             *  bowtrainer.Add(m);
             * }
             * Mat dict = new Mat();
             * bowtrainer.Cluster();
             * StringBuilder sb = new StringBuilder();
             * Image<Bgr, Byte> imgsave = dict.ToImage<Bgr, Byte>();
             *
             * (new XmlSerializer(typeof(Image<Bgr, Byte>))).Serialize(new StringWriter(sb), imgsave);
             * Console.WriteLine(sb.ToString());*/
        }
Пример #28
0
        private VectorOfKeyPoint FilterKeyPoint(VectorOfKeyPoint InputVecor, Image <Rgb, byte> SourceImage, double Compression, double Diameter, SIFTParametrs parametrs)
        {
            VectorOfKeyPoint OutputVector = null;

            SourceImage = SourceImage.Resize(1.0 / Compression, Emgu.CV.CvEnum.Inter.Area);
            using (SIFT siftCPU = new SIFT(parametrs.nFeatures, parametrs.nOctaveLayers,
                                           parametrs.contrastThreshold, parametrs.edgeThreshold, parametrs.sigma))
            {
                VectorOfKeyPoint MainVecor = new VectorOfKeyPoint(siftCPU.Detect(SourceImage, null));
                OutputVector = new VectorOfKeyPoint(RemoveFakeKeyPoint(MainVecor, InputVecor, Compression, Diameter));
            }
            return(OutputVector);
        }
Пример #29
0
        public static List <System.Drawing.Point> func(Bitmap bitmap1, Bitmap bitmap2)
        {
            //Mat img1 = new Mat(@"roll/0.png", ImreadModes.Unchanged);
            //Mat img2 = new Mat(@"roll/1.png", ImreadModes.Unchanged);
            Mat  img1 = BitmapToMat(bitmap1);
            Mat  img2 = BitmapToMat(bitmap2);
            SIFT sift = SIFT.Create(20);

            //KeyPoint[] k = sift.Detect(img1);
            // Detect the keypoints and generate their descriptors using SIFT
            KeyPoint[] keypoints1, keypoints2;
            var        descriptors1 = new Mat <float>();
            var        descriptors2 = new Mat <float>();

            sift.DetectAndCompute(img1, null, out keypoints1, descriptors1);
            sift.DetectAndCompute(img2, null, out keypoints2, descriptors2);

            // Match descriptor vectors
            var bfMatcher    = new BFMatcher(NormTypes.L2, false);
            var flannMatcher = new FlannBasedMatcher();

            DMatch[] bfMatches    = bfMatcher.Match(descriptors1, descriptors2);
            DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2);

            // Draw matches
            var bfView = new Mat();

            Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, bfMatches, bfView);
            var flannView = new Mat();

            Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, flannMatches, flannView);

            using (new Window("SIFT matching (by BFMather)", bfView))
                using (new Window("SIFT matching (by FlannBasedMatcher)", flannView))
                {
                    Cv2.WaitKey();
                }
            List <System.Drawing.Point> points = new List <System.Drawing.Point>();

            foreach (DMatch match in bfMatches)
            {
                System.Drawing.Point p = new System.Drawing.Point();
                p.X = (int)(keypoints1[match.QueryIdx].Pt.X - keypoints2[match.TrainIdx].Pt.X);
                p.Y = (int)(keypoints1[match.QueryIdx].Pt.Y - keypoints2[match.TrainIdx].Pt.Y);
                points.Add(p);
            }

            return(points);
        }
Пример #30
0
        private void FillImageSet(List <ImageData> set, string prefix)
        {
            UtilityHelper.refreshDirectory(prefix);
            if (dialog.ShowDialog() == DialogResult.OK)
            {
                var files = Directory.GetFiles(dialog.SelectedPath, "*.dcm");
                foreach (var file in files)
                {
                    var ds      = new DicomImage(file);
                    var dsBones = new DicomImage(file)
                    {
                        WindowWidth  = 100,
                        WindowCenter = 500
                    };
                    var    image        = ds.RenderImage().AsBitmap();
                    var    imageBones   = dsBones.RenderImage().AsBitmap();
                    string newName      = prefix + "/" + Path.GetFileName(file).Replace(".dcm", ".jpg");
                    string newBonesName = prefix + "/" + Path.GetFileName(file).Replace(".dcm", "_bones.jpg");
                    image.Save(newName);
                    imageBones.Save(newBonesName);
                    Feature2D s;
                    switch (algorithm)
                    {
                    case Algo.ORB:
                        s = new ORBDetector();
                        break;

                    case Algo.SURF:
                        s = new SURF(0.8);
                        break;

                    default:
                        s = new SIFT();
                        break;
                    }
                    Mat mat              = CvInvoke.Imread(newBonesName, ImreadModes.Grayscale);
                    Mat matOrig          = CvInvoke.Imread(newName, ImreadModes.Unchanged);
                    var vec              = new VectorOfKeyPoint();
                    Mat modelDescriptors = new Mat();
                    s.DetectAndCompute(mat, null, vec, modelDescriptors, false);
                    ImageData id = new ImageData(matOrig, mat)
                    {
                        KeyPoints   = vec,
                        Descriptors = modelDescriptors
                    };
                    set.Add(id);
                }
            }
        }