Ejemplo n.º 1
0
        public void TestORB()
        {
            ORBDetector orb = new ORBDetector(700);

            //String[] parameters = orb.GetParamNames();
            EmguAssert.IsTrue(TestFeature2DTracker(orb, orb), "Unable to find homography matrix");
        }
Ejemplo n.º 2
0
        public void FindFeaturePointsBetweenTwoImages()
        {
            var filename            = "";
            var filename2           = "";
            var orb                 = new ORBDetector(2000);
            Image <Bgr, byte> left  = new Image <Bgr, byte>(filename);
            Image <Bgr, byte> right = new Image <Bgr, byte>(filename2);
            var vectorLeft          = new VectorOfKeyPoint();
            var vectorRight         = new VectorOfKeyPoint();
            var matLeft             = new Mat();
            var matRight            = new Mat();

            orb.DetectAndCompute(left, null, vectorLeft, matLeft, false);
            orb.DetectAndCompute(right, null, vectorRight, matRight, false);

            var matcher = new BFMatcher(DistanceType.Hamming2, true);
            var matches = new VectorOfVectorOfDMatch();

            matcher.Add(matLeft);
            matcher.KnnMatch(matRight, matches, 1, null);


            CalculateEssentialMAtrix(vectorLeft, vectorRight, camera.CameraMatrix);
            CalculateFundamentalMatrix(vectorLeft, vectorRight);
        }
        public UMat ORBDescriptor()
        {
            //ORB Feature Descriptor
            ORBDetector      orbDetector       = null;
            VectorOfKeyPoint modelKeyPointsOrb = null;
            VectorOfKeyPoint modelKeyPoints    = null;

            try
            {
                orbDetector       = new ORBDetector(500, 1, 8, 30, 0, 3, ORBDetector.ScoreType.Harris, 31, 20);
                modelKeyPoints    = new VectorOfKeyPoint();
                modelKeyPointsOrb = new VectorOfKeyPoint();
                MKeyPoint[] mKeyPointsOrb = orbDetector.Detect(preProcessedImageInGrayScale);
                modelKeyPointsOrb.Push(mKeyPointsOrb);
                UMat ORBDescriptor = new UMat();
                orbDetector.DetectAndCompute(preProcessedImageInGrayScale, null, modelKeyPoints, ORBDescriptor, true);
                return(ORBDescriptor);
            }
            finally
            {
                orbDetector.Dispose();
                modelKeyPointsOrb.Dispose();
                modelKeyPoints.Dispose();
            }
        }
Ejemplo n.º 4
0
        private void Button1_Click(object sender, EventArgs e)
        {
            Image <Rgb, Byte> image;

            ORBDetector detector = new ORBDetector();
            BFMatcher   matcher  = new BFMatcher(DistanceType.Hamming2);

            OpenFileDialog open = new OpenFileDialog();

            open.Filter = "Image Files (*.tif; *.dcm; *.jpg; *.jpeg; *.bmp)|*.tif; *.dcm; *.jpg; *.jpeg; *.bmp";

            if (open.ShowDialog() == DialogResult.OK)
            {
                image = new Image <Rgb, Byte>(open.FileName);
                (Image <Rgb, byte> Image, VectorOfKeyPoint Keypoints, Mat Descriptors)imgModel = (image.Resize(0.2, Emgu.CV.CvEnum.Inter.Area), new VectorOfKeyPoint(), new Mat());

                imageBox1.Image = image;
                //detector.DetectAndCompute(imgModel.Image, null, imgModel.Keypoints, imgModel.Descriptors, false);
                //matcher.Add(imgModel.Descriptors);
                //matcher.KnnMatch(imgTest.Descriptors, matches, 1, null);
                //imageBox2.Image = imgModel.Image;

                var ext = new List <string> {
                    ".jpg", ".gif", ".png"
                };
                var myFiles = Directory.GetFiles(@"\\psta.ru\EDU\Students\s43880\Desktop\!Примеры фото и скриптов для живых рисунков\TEmplates", "*.*", SearchOption.AllDirectories)
                              .Where(s => ext.Contains(Path.GetExtension(s)));
                int    Max     = 0;
                string MaxPath = "000";
                var    scene   = new Mat(open.FileName);
                foreach (var a in myFiles)
                {
                    var model = new Mat(a);

                    VectorOfKeyPoint       modelKeyPoints;
                    VectorOfKeyPoint       observedKeyPoints;
                    VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
                    Mat mask;
                    Mat homography;
                    int Match = FindMatch1(model, scene, out modelKeyPoints, out observedKeyPoints, matches, out mask, out homography);
                    if (Match > Max)
                    {
                        Max     = Match;
                        MaxPath = a;
                    }
                }
                //var model = new Mat(@"\\psta.ru\EDU\Students\s43880\Desktop\!Примеры фото и скриптов для живых рисунков\TEmplates\traktor.jpg");
                //var scene = new Mat(open.FileName);
                label1.Text = Path.GetFileName(MaxPath);
                var result = Draw(new Mat(MaxPath), scene, 1);
                imageBox2.Image = result;
                var result2 = Draw(new Mat(MaxPath), scene, 2);
                imageBox3.Image = result2;
            }
        }
Ejemplo n.º 5
0
        private void FillImageSet(List <ImageData> set, string prefix)
        {
            UtilityHelper.refreshDirectory(prefix);
            if (dialog.ShowDialog() == DialogResult.OK)
            {
                var files = Directory.GetFiles(dialog.SelectedPath, "*.dcm");
                foreach (var file in files)
                {
                    var ds      = new DicomImage(file);
                    var dsBones = new DicomImage(file)
                    {
                        WindowWidth  = 100,
                        WindowCenter = 500
                    };
                    var    image        = ds.RenderImage().AsBitmap();
                    var    imageBones   = dsBones.RenderImage().AsBitmap();
                    string newName      = prefix + "/" + Path.GetFileName(file).Replace(".dcm", ".jpg");
                    string newBonesName = prefix + "/" + Path.GetFileName(file).Replace(".dcm", "_bones.jpg");
                    image.Save(newName);
                    imageBones.Save(newBonesName);
                    Feature2D s;
                    switch (algorithm)
                    {
                    case Algo.ORB:
                        s = new ORBDetector();
                        break;

                    case Algo.SURF:
                        s = new SURF(0.8);
                        break;

                    default:
                        s = new SIFT();
                        break;
                    }
                    Mat mat              = CvInvoke.Imread(newBonesName, ImreadModes.Grayscale);
                    Mat matOrig          = CvInvoke.Imread(newName, ImreadModes.Unchanged);
                    var vec              = new VectorOfKeyPoint();
                    Mat modelDescriptors = new Mat();
                    s.DetectAndCompute(mat, null, vec, modelDescriptors, false);
                    ImageData id = new ImageData(matOrig, mat)
                    {
                        KeyPoints   = vec,
                        Descriptors = modelDescriptors
                    };
                    set.Add(id);
                }
            }
        }
        public static void FindMatch(string pageFile, string templateFile)
        {
            Image <Rgb, byte> page     = getPreprocessedImage(pageFile);
            Image <Rgb, byte> template = getPreprocessedImage(templateFile);

            var detector = new ORBDetector();
            VectorOfKeyPoint templateKeyPoints = new VectorOfKeyPoint();
            Mat templateDescriptors            = new Mat();

            detector.DetectAndCompute(template, null, templateKeyPoints, templateDescriptors, false);

            VectorOfKeyPoint pageKeyPoints = new VectorOfKeyPoint();
            Mat pageDescriptors            = new Mat();

            detector.DetectAndCompute(page, null, pageKeyPoints, pageDescriptors, false);
            using (var matcher = new BFMatcher(DistanceType.L1))
            {
                matcher.Add(templateDescriptors);
                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                //VectorOfDMatch matches2 = new VectorOfDMatch();
                //matcher.Match(pageDescriptors, matches2);


                matcher.KnnMatch(pageDescriptors, matches, 2, null);

                Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
                Mat homography   = new Mat();
                int nonZeroCount = CvInvoke.CountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(templateKeyPoints, pageKeyPoints, matches, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(templateKeyPoints, pageKeyPoints, matches, mask, 2);
                    }
                }

                Mat result = new Mat();
                Features2DToolbox.DrawMatches(template, templateKeyPoints, page, pageKeyPoints, matches, result, new MCvScalar(0, 255, 0), new MCvScalar(255, 0, 0), mask, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

                //Features2DToolbox.DrawMatches(template, templateKeyPoints, page, pageKeyPoints, matches2, result, new MCvScalar(0, 255, 0), new MCvScalar(255, 0, 0), null, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

                MainForm.This.PageBox.Image = result.ToBitmap();
            }
        }
        public SimpleAdHocTracker(CameraCalibrationInfo calibrationInfo)
        {
            _calibrationInfo = calibrationInfo;

            _detector = new ORBDetector();

            _prevGray = new Mat();
            _currGray = new Mat();

            _raux = new Mat();
            _taux = new Mat();

            _bootstrapKp       = new VectorOfKeyPoint();
            _trackedFeatures   = new VectorOfKeyPoint();
            _trackedFeatures3D = new VectorOfPoint3D32F();
        }
Ejemplo n.º 8
0
        public ORBDetector CreateDetector()
        {
            var _orb = new ORBDetector(
                this.model.NumberOfFeatures,
                this.model.ScaleFactor,
                this.model.NLevels,
                this.model.EdgeThreshold,
                this.model.firstLevel,
                this.model.WTK_A,
                this.model.ScoreType,
                this.model.PatchSize,
                this.model.FastThreshold
                );

            return(_orb);
        }
Ejemplo n.º 9
0
        public static UMat Run(Mat img)
        {
            var modelKeyPoints = new VectorOfKeyPoint();

            var result = new UMat();

            using (UMat uModelImage = img.ToUMat(AccessType.Read))
            {
                ORBDetector orbCPU           = new ORBDetector();
                UMat        modelDescriptors = new UMat();
                orbCPU.DetectRaw(uModelImage, modelKeyPoints);

                Features2DToolbox.DrawKeypoints(img, modelKeyPoints, result, new Bgr(Color.Red), Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);
            }

            return(result);
        }
Ejemplo n.º 10
0
        public PtamLikeAlgorithm(CameraCalibrationInfo calibrationInfo)
        {
            _calibrationInfo = calibrationInfo;

            _detector = new ORBDetector();

            _prevGray = new Mat();
            _currGray = new Mat();

            _raux = new VectorOfFloat();
            _taux = new VectorOfFloat();

            _bootstrapKp       = new VectorOfKeyPoint();
            _trackedFeatures   = new VectorOfKeyPoint();
            _trackedFeatures3D = new VectorOfPoint3D32F();

            InitialP1 = new Matrix <double>(3, 4);
            InitialP1.SetIdentity();
        }
Ejemplo n.º 11
0
        private void detectImgFeatures()
        {
            ORBDetector detector = new ORBDetector(100, 1.2f, 8);

            MKeyPoint[]      img0_keyPoints        = detector.Detect(imgs[0]);
            VectorOfKeyPoint img0_vector_keypoints = new VectorOfKeyPoint(img0_keyPoints);
            Matrix <Byte>    img0_descriptors      = new Matrix <Byte>(img0_vector_keypoints.Size, detector.DescriptorSize);

            MKeyPoint[]      img1_keyPoints        = detector.Detect(imgs[1]);
            VectorOfKeyPoint img1_vector_keypoints = new VectorOfKeyPoint(img1_keyPoints);
            Matrix <Byte>    img1_descriptors      = new Matrix <Byte>(img1_vector_keypoints.Size, detector.DescriptorSize);

            detector.Compute(imgs[0], img0_vector_keypoints, img0_descriptors);

            // display keypoints in red
            Image <Bgr, Byte> newImg = new Image <Bgr, Byte>(imgs[0].Width, imgs[0].Height);

            Features2DToolbox.DrawKeypoints(imgs[0], img0_vector_keypoints, newImg, new Bgr(255, 0, 255),
                                            Features2DToolbox.KeypointDrawType.DrawRichKeypoints);
            imgbox_original.Image = newImg;

            Image <Bgr, Byte> newImg2 = new Image <Bgr, Byte>(imgs[1].Width, imgs[1].Height);

            Features2DToolbox.DrawKeypoints(imgs[1], img1_vector_keypoints, newImg2, new Bgr(255, 0, 255),
                                            Features2DToolbox.KeypointDrawType.DrawRichKeypoints);
            imgbox_second.Image = newImg2;

            // apply BFMatcher to find matches in two images
            BFMatcher bfMatcher            = new BFMatcher(DistanceType.Hamming, true);
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            numberFoundPairs = matches.Size;
            bfMatcher.Add(img0_descriptors);
            bfMatcher.KnnMatch(img1_descriptors, matches, 1, null);

            // display final image as two merged images with keypoints
            Mat matched_image = new Mat();

            Features2DToolbox.DrawMatches(imgs[0], img0_vector_keypoints, imgs[1], img1_vector_keypoints,
                                          matches, matched_image, new MCvScalar(255, 0, 255), new MCvScalar(0, 255, 0));
            img_final = matched_image.ToImage <Bgr, Byte>();
        }
Ejemplo n.º 12
0
        public Mat FingerprintDescriptor(Mat input)
        {
            var harris_normalised = PrepareImage(input);

            float            threshold  = 125.0f;
            List <MKeyPoint> mKeyPoints = new List <MKeyPoint>();
            Mat rescaled = new Mat();
            VectorOfKeyPoint keypoints = new VectorOfKeyPoint();
            double           scale = 1.0, shift = 0.0;

            CvInvoke.ConvertScaleAbs(harris_normalised, rescaled, scale, shift);
            Mat[]       mat         = new Mat[] { rescaled, rescaled, rescaled };
            VectorOfMat vectorOfMat = new VectorOfMat(mat);

            int[] from_to  = { 0, 0, 1, 1, 2, 2 };
            Mat   harris_c = new Mat(rescaled.Size, DepthType.Cv8U, 3);

            CvInvoke.MixChannels(vectorOfMat, harris_c, from_to);
            for (int x = 0; x < harris_c.Width; x++)
            {
                for (int y = 0; y < harris_c.Height; y++)
                {
                    if (GetFloatValue(harris_c, x, y) > threshold)
                    {
                        MKeyPoint m = new MKeyPoint
                        {
                            Size  = 1,
                            Point = new PointF(x, y)
                        };
                        mKeyPoints.Add(m);
                    }
                }
            }

            keypoints.Push(mKeyPoints.ToArray());
            Mat         descriptors = new Mat();
            ORBDetector ORBCPU      = new ORBDetector();

            ORBCPU.Compute(_input_thinned, keypoints, descriptors);

            return(descriptors);
        }
Ejemplo n.º 13
0
        private void button1_Click_1(object sender, EventArgs e)
        {
            if (button1.Text == "Load")
            {
                trackings = new Point[11];
                for (int i = 0; i < trackings.Length; i++)
                {
                    trackings[i].X = -1;
                    trackings[i].Y = -1;
                }

                button1.Text = "UnLoad";
                ORBCPU       = new ORBDetector(400);
                objImage     = uObservedImage.ToImage <Bgr, Byte>();
                objImage.ROI = roi;
                uObjImage    = objImage.Mat.GetUMat(AccessType.ReadWrite);
                ORBCPU.DetectAndCompute(uObjImage, null, objKeyPoints, objDescriptors, false);
                shape  = 20;
                ORBCPU = new ORBDetector(8000);
            }
            else if (button1.Text == "UnLoad")
            {
                trackings = new Point[11];
                for (int i = 0; i < trackings.Length; i++)
                {
                    trackings[i].X = -1;
                    trackings[i].Y = -1;
                }
                objKeyPoints   = new VectorOfKeyPoint();
                objDescriptors = new UMat();
                objheight      = 0;
                objwidth       = 0;
                shape          = 20;
                button1.Text   = "Object";
            }
            else if (button1.Text == "Object")
            {
                button1.Text = "Load";
                shape        = 5;
            }
        }
Ejemplo n.º 14
0
        public int  FindMatch1(Mat modelImage, Mat observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k                   = 2;
            int    nonZeroCount        = 0;
            double uniquenessThreshold = 0.80;

            homography        = null;
            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();
            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))

                {
                    var featureDetector  = new ORBDetector(9000);
                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    using (var matcher = new BFMatcher(DistanceType.Hamming, false))
                    {
                        matcher.Add(modelDescriptors);

                        matcher.KnnMatch(observedDescriptors, matches, k, null);
                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                        mask.SetTo(new MCvScalar(255));
                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                        nonZeroCount = CvInvoke.CountNonZero(mask);
                        if (nonZeroCount >= 4)
                        {
                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                       matches, mask, 1.5, 20);
                            //if (nonZeroCount >= 4)
                            //    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                            //        observedKeyPoints, matches, mask, 2);
                        }
                    }
                }
            return(nonZeroCount);
        }
Ejemplo n.º 15
0
        private void btnORB_Click(object sender, EventArgs e)
        {
            var temproot = RootImg.Clone();
            var tempimg1 = WorkingImg.Clone();
            Image <Bgr, byte> colorimg   = tempimg1.Convert <Bgr, byte>();
            Image <Bgr, byte> tempOriImg = temproot.Convert <Bgr, byte>();
            var f2d = new ORBDetector();

            var keypoint = f2d.Detect(WorkingImg);

            foreach (var point in keypoint)
            {
                System.Drawing.Rectangle rect = new Rectangle();
                rect.X      = (int)point.Point.X;
                rect.Y      = (int)point.Point.Y;
                rect.Width  = (int)point.Size;
                rect.Height = (int)point.Size;
                tempOriImg.Draw(rect, new Bgr(60, 200, 10), 2);
            }

            rtxLog.AppendText("btnORB_Click" + Environment.NewLine);
            RegistHisroty(tempOriImg);
        }
Ejemplo n.º 16
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out long score)
        {
            int       k = 2;
            double    uniquenessThreshold = 0.80;
            Stopwatch watch;

            homography        = null;
            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    //   KAZE featureDetector = new KAZE();
                    //     SURF featureDetector = new SURF(100);
                    //    SIFT featureDetector = new SIFT();
                    ORBDetector featureDetector  = new ORBDetector();
                    Mat         modelDescriptors = new Mat();
                    //进行检测和计算,把opencv中的两部分和到一起了,分开用也可以
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    watch = Stopwatch.StartNew();

                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    // KdTree for faster results / less accuracy
                    using (var ip = new Emgu.CV.Flann.KdTreeIndexParams())
                        using (var sp = new SearchParams())
                            //  using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))//开始进行匹配
                            using (BFMatcher matcher = new BFMatcher(DistanceType.L2))
                            {
                                matcher.Add(modelDescriptors);
                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);//去除重复的匹配

                                // Calculate score based on matches size
                                // ---------------------------------------------->
                                score = 0;
                                for (int i = 0; i < matches.Size; i++)
                                {
                                    if (mask.GetData(i)[0] == 0)
                                    {
                                        continue;
                                    }
                                    foreach (var e in matches[i].ToArray())
                                    {
                                        ++score;
                                    }
                                }
                                // <----------------------------------------------

                                int nonZeroCount = CvInvoke.CountNonZero(mask);//用于寻找模板在图中的位置
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }
                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
Ejemplo n.º 17
0
 public ImageFingerPrintVendor(DescriptorExtractor extractor)
 {
     this.hashAlgo          = new PHash();
     this.extractor         = extractor;
     this.featureSearchAlgo = new ORBDetector(numberOfFeatures: 100, scoreType: ORBDetector.ScoreType.Fast);
 }
Ejemplo n.º 18
0
        public void MatchFeatures()
        {
            string PathToImage1 = "C:\\Users\\Image1.jpg";
            string PathToImage2 = "C:\\Users\\Image2.jpg";

            Mat Image1 = CvInvoke.Imread(PathToImage1);

            /* Emgu.CV.Mat is a class which can store the pixel values.
             * Emgu.CV.CvInvoke is the library to invoke OpenCV functions.
             * Imread loads an image from the specified path.
             * Image1 now have the details of first image */

            Mat Image2 = CvInvoke.Imread(PathToImage2);           // Image2 now have the details of second image

            ORBDetector ORB = new ORBDetector();                  // Emgu.CV.Features2D.ORBDetector class. Now, ORB is an instance of the class.

            VectorOfKeyPoint KeyPoints1 = new VectorOfKeyPoint(); // KeyPoints1 - for storing the keypoints of Image1

            VectorOfKeyPoint KeyPoints2 = new VectorOfKeyPoint(); // KeyPoints2 - for storing the keypoints of Image2

            Mat Descriptors1 = new Mat();                         // Descriptors1 - for storing the descriptors of Image1

            Mat Descriptors2 = new Mat();                         // Descriptors2 - for storing the descriptors of Image2


            //Feature Extraction from Image1
            ORB.DetectAndCompute(Image1, null, KeyPoints1, Descriptors1, false);

            /* Detects Keypoints in Image1 and then computes descriptors on the image from the keypoints.
             * Keypoints will be stored into - KeyPoints1 and Descriptors will be stored into - Descriptors1*/


            //Feature Extraction from Image2
            ORB.DetectAndCompute(Image2, null, KeyPoints2, Descriptors2, false);

            int k = 2;

            /*  Count of best matches found per each descriptor
             * or less if a descriptor has less than k possible matches in total. */

            BFMatcher matcher = new BFMatcher(DistanceType.Hamming);       // BruteForceMatcher to perform descriptor matching.

            matcher.Add(Descriptors1);                                     // Descriptors of Image1 is added.

            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch(); // For storing the output of matching operation.

            matcher.KnnMatch(Descriptors2, matches, k, null);              // matches will now have the result of matching operation.


            /* After the matching operation, we will get a 2D array (as k = 2).
             * For checking whether two Images are similar or not,
             * we need the distance parameter from this array.
             * (matches[0][0].Distance, matches[0][0].Distance, matches[1][0].Distance, ...)
             * If Image1 and Image2 are same, all  distance values will be 0.
             * If they're similar the distance values will be lesser.
             * Otherwise, distance values will be greater*/


            /* That is, for two images to be similar, the distance values present in the matches array,
             * should be lesser */


            List <float> matchList = new List <float>();


            /* The matching operation, in some situation, may result in false-positive results.
             * For filtering out the false-positive results, David Lowe proposed a test
             * https://www.cs.ubc.ca/~lowe/papers/ijcv04.pdf#page=20
             * This test rejects poor matches by computing the ratio between the best and second-best match.
             * If the ratio is below some threshold, the match is discarded as being low-quality. */

            for (int b = 0; b < matches.Size; ++b)
            {
                const double ratio = 0.8; // As in Lowe's paper; can be tuned accordingly.
                if (matches[b][0].Distance < ratio * matches[b][1].Distance)
                {
                    matchList.Add(matches[b][0].Distance);
                }
            }


            matchList.Sort();
            matchList = matchList.Take(40).ToList();

            /* matchList will now contain first 40 matches.
             * Based on my research, I had found that for qualifying as a similar image,
             * there should be atleast 10 distance values,
             * with distance value less than or equal to 45
             *
             *              - In this case. Tune this to your particular situation. */


            int distanceThreshold = 45;
            int FilterThreshold   = 10;
            int FilterCount       = 0;

            for (j = 0; j < matchList.Count; j++)
            {
                if ((matchList[j]) <= (distanceThreshold))
                {
                    FilterCount++;
                }
            }


            if (FilterCount >= FilterThreshold)
            {
                // Images are similar, perform the operation you want.
                FilterCount = 0;
            }
            else
            {
                FilterCount = 0;
            }
        }
Ejemplo n.º 19
0
        void NewORBDetector()
        {
            float ms_MIN_RATIO = 100;
            float ms_MAX_DIST  = 100;

            (Image <Bgr, byte> Image, VectorOfKeyPoint Keypoints, Mat Descriptors)_imgModel = (new Image <Bgr, byte>(@"C:\Images\ImgModel.jpg").Resize(0.2, Inter.Area), new VectorOfKeyPoint(), new Mat());
            (Image <Bgr, byte> Image, VectorOfKeyPoint Keypoints, Mat Descriptors)_imgTest  = (new Image <Bgr, byte>(@"C:\Images\ImgTest.jpg").Resize(0.2, Inter.Area), new VectorOfKeyPoint(), new Mat());
            Mat imgKeypointsModel                      = new Mat();
            Mat imgKeypointsTest                       = new Mat();
            Mat imgMatches                             = new Mat();
            Mat imgWarped                              = new Mat();
            VectorOfVectorOfDMatch matches             = new VectorOfVectorOfDMatch();
            VectorOfVectorOfDMatch filteredMatches     = new VectorOfVectorOfDMatch();
            List <MDMatch[]>       filteredMatchesList = new List <MDMatch[]>();

            ORBDetector _ORB       = new ORBDetector();
            BFMatcher   _BFMatcher = new BFMatcher(DistanceType.Hamming2);

            _ORB.DetectAndCompute(_imgModel.Image, null, _imgModel.Keypoints, _imgModel.Descriptors, false);
            _ORB.DetectAndCompute(_imgTest.Image, null, _imgTest.Keypoints, _imgTest.Descriptors, false);

            _BFMatcher.Add(_imgModel.Descriptors);
            _BFMatcher.KnnMatch(_imgTest.Descriptors, matches, k: 2, mask: null);

            MDMatch[][] matchesArray = matches.ToArrayOfArray();

            //Apply ratio test
            for (int i = 0; i < matchesArray.Length; i++)
            {
                MDMatch first = matchesArray[i][0];
                float   dist1 = matchesArray[i][0].Distance;
                float   dist2 = matchesArray[i][1].Distance;

                if (dist1 < ms_MIN_RATIO * dist2)
                {
                    filteredMatchesList.Add(matchesArray[i]);
                }
            }

            //Filter by threshold
            MDMatch[][] defCopy = new MDMatch[filteredMatchesList.Count][];
            filteredMatchesList.CopyTo(defCopy);
            filteredMatchesList = new List <MDMatch[]>();

            foreach (var item in defCopy)
            {
                if (item[0].Distance < ms_MAX_DIST)
                {
                    filteredMatchesList.Add(item);
                }
            }

            filteredMatches = new VectorOfVectorOfDMatch(filteredMatchesList.ToArray());


            Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(_imgModel.Keypoints, _imgTest.Keypoints, filteredMatches, null, 10);

            CvInvoke.WarpPerspective(_imgTest.Image, imgWarped, homography, _imgTest.Image.Size);

            Features2DToolbox.DrawKeypoints(_imgModel.Image, _imgModel.Keypoints, imgKeypointsModel, new Bgr(0, 0, 255));
            Features2DToolbox.DrawKeypoints(_imgTest.Image, _imgTest.Keypoints, imgKeypointsTest, new Bgr(0, 0, 255));
            Features2DToolbox.DrawMatches(_imgModel.Image, _imgModel.Keypoints, _imgTest.Image, _imgTest.Keypoints, filteredMatches, imgMatches, new MCvScalar(0, 255, 0), new MCvScalar(0, 0, 255), null, Features2DToolbox.KeypointDrawType.Default);

            Task.Factory.StartNew(() => Emgu.CV.UI.ImageViewer.Show(imgKeypointsModel, "Keypoints Model"));
            Task.Factory.StartNew(() => Emgu.CV.UI.ImageViewer.Show(imgKeypointsTest, "Keypoints Test"));
            Task.Factory.StartNew(() => Emgu.CV.UI.ImageViewer.Show(imgMatches, "Matches"));
            Task.Factory.StartNew(() => Emgu.CV.UI.ImageViewer.Show(imgWarped, "Warp"));
        }
Ejemplo n.º 20
0
        public static void FindMatches(Mat modelImage, Mat observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, MatchingTechnique matchingTechnique, float keyPointFilter = 1, double detectorParameter = -1)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;

            homography        = null;
            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();


            Feature2D    detector;
            Feature2D    descriptor;
            DistanceType distanceType;

            if (matchingTechnique == MatchingTechnique.FAST)
            {
                if (detectorParameter <= 0)
                {
                    detectorParameter = 20;
                }

                detector     = new FastDetector((int)detectorParameter);
                descriptor   = new BriefDescriptorExtractor();
                distanceType = DistanceType.Hamming;
            }
            else if (matchingTechnique == MatchingTechnique.ORB)
            {
                if (detectorParameter <= 0)
                {
                    detectorParameter = 100000;
                }

                detector     = new ORBDetector((int)detectorParameter);
                descriptor   = detector;
                distanceType = DistanceType.Hamming;
            }
            else if (matchingTechnique == MatchingTechnique.SURF)
            {
                if (detectorParameter <= 0)
                {
                    detectorParameter = 300;
                }

                detector     = new SURF(detectorParameter);
                descriptor   = detector;
                distanceType = DistanceType.L2;
            }
            else
            {
                throw new NotImplementedException($"{matchingTechnique} not supported.");
            }

            // Extract features from model image.
            UMat modelDescriptors = new UMat();

            detector.DetectRaw(modelImage, modelKeyPoints, null);
            Console.WriteLine($"modelKeyPoints: {modelKeyPoints.Size}");
            if (keyPointFilter < 2)
            {
                modelKeyPoints = GetBestKeypointsPercent(modelKeyPoints, keyPointFilter);
            }
            else
            {
                modelKeyPoints = GetBestKeypointsCount(modelKeyPoints, (int)keyPointFilter);
            }
            descriptor.Compute(modelImage, modelKeyPoints, modelDescriptors);

            // Extract features from observed image.
            UMat observedDescriptors = new UMat();

            detector.DetectRaw(observedImage, observedKeyPoints, null);
            Console.WriteLine($"observedKeyPoints: {observedKeyPoints.Size}");
            if (keyPointFilter < 2)
            {
                observedKeyPoints = GetBestKeypointsPercent(observedKeyPoints, keyPointFilter);
            }
            else
            {
                observedKeyPoints = GetBestKeypointsCount(observedKeyPoints, (int)keyPointFilter);
            }
            descriptor.Compute(observedImage, observedKeyPoints, observedDescriptors);

            // Match keypoints.
            BFMatcher matcher = new BFMatcher(distanceType);

            matcher.Add(modelDescriptors);
            matcher.KnnMatch(observedDescriptors, matches, k, null);

            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                }
            }
        }
Ejemplo n.º 21
0
        public AlignedResult CreateAlignedSecondImageKeypoints(SKBitmap firstImage, SKBitmap secondImage,
                                                               bool discardTransX, AlignmentSettings settings, bool keystoneRightOnFirst)
        {
#if __NO_EMGU__
            return(null);
#endif
            var result = new AlignedResult();

            var detector = new ORBDetector();
            const ImreadModes READ_MODE = ImreadModes.Color;

            var mat1                = new Mat();
            var descriptors1        = new Mat();
            var allKeyPointsVector1 = new VectorOfKeyPoint();
            CvInvoke.Imdecode(GetBytes(firstImage, 1), READ_MODE, mat1);
            detector.DetectAndCompute(mat1, null, allKeyPointsVector1, descriptors1, false);

            var mat2                = new Mat();
            var descriptors2        = new Mat();
            var allKeyPointsVector2 = new VectorOfKeyPoint();
            CvInvoke.Imdecode(GetBytes(secondImage, 1), READ_MODE, mat2);
            detector.DetectAndCompute(mat2, null, allKeyPointsVector2, descriptors2, false);

            const double THRESHOLD_PROPORTION = 1 / 4d;
            var          thresholdDistance    = Math.Sqrt(Math.Pow(firstImage.Width, 2) + Math.Pow(firstImage.Height, 2)) * THRESHOLD_PROPORTION;

            var distanceThresholdMask = new Mat(allKeyPointsVector2.Size, allKeyPointsVector1.Size, DepthType.Cv8U, 1);
            if (!settings.UseCrossCheck)
            {
                unsafe
                {
                    var maskPtr = (byte *)distanceThresholdMask.DataPointer.ToPointer();
                    for (var i = 0; i < allKeyPointsVector2.Size; i++)
                    {
                        var keyPoint2 = allKeyPointsVector2[i];
                        for (var j = 0; j < allKeyPointsVector1.Size; j++)
                        {
                            var keyPoint1        = allKeyPointsVector1[j];
                            var physicalDistance = CalculatePhysicalDistanceBetweenPoints(keyPoint2.Point, keyPoint1.Point);
                            if (physicalDistance < thresholdDistance)
                            {
                                *maskPtr = 255;
                            }
                            else
                            {
                                *maskPtr = 0;
                            }

                            maskPtr++;
                        }
                    }
                }
            }

            var vectorOfMatches = new VectorOfVectorOfDMatch();
            var matcher         = new BFMatcher(DistanceType.Hamming, settings.UseCrossCheck);
            matcher.Add(descriptors1);
            matcher.KnnMatch(descriptors2, vectorOfMatches, settings.UseCrossCheck ? 1 : 2, settings.UseCrossCheck ? new VectorOfMat() : new VectorOfMat(distanceThresholdMask));

            var goodMatches = new List <MDMatch>();
            for (var i = 0; i < vectorOfMatches.Size; i++)
            {
                if (vectorOfMatches[i].Size == 0)
                {
                    continue;
                }

                if (vectorOfMatches[i].Size == 1 ||
                    (vectorOfMatches[i][0].Distance < 0.75 * vectorOfMatches[i][1].Distance)) //make sure matches are unique
                {
                    goodMatches.Add(vectorOfMatches[i][0]);
                }
            }

            if (goodMatches.Count < settings.MinimumKeypoints)
            {
                return(null);
            }

            var pairedPoints = new List <PointForCleaning>();
            for (var ii = 0; ii < goodMatches.Count; ii++)
            {
                var keyPoint1 = allKeyPointsVector1[goodMatches[ii].TrainIdx];
                var keyPoint2 = allKeyPointsVector2[goodMatches[ii].QueryIdx];
                pairedPoints.Add(new PointForCleaning
                {
                    KeyPoint1 = keyPoint1,
                    KeyPoint2 = keyPoint2,
                    Data      = new KeyPointOutlierDetectorData
                    {
                        Distance = (float)CalculatePhysicalDistanceBetweenPoints(keyPoint1.Point, keyPoint2.Point),
                        Slope    = (keyPoint2.Point.Y - keyPoint1.Point.Y) / (keyPoint2.Point.X - keyPoint1.Point.X)
                    },
                    Match = new MDMatch
                    {
                        Distance = goodMatches[ii].Distance,
                        ImgIdx   = goodMatches[ii].ImgIdx,
                        QueryIdx = ii,
                        TrainIdx = ii
                    }
                });
            }

            if (settings.DrawKeypointMatches)
            {
                result.DirtyMatchesCount = pairedPoints.Count;
                result.DrawnDirtyMatches = DrawMatches(firstImage, secondImage, pairedPoints);
            }

            if (settings.DiscardOutliersByDistance || settings.DiscardOutliersBySlope)
            {
                //Debug.WriteLine("DIRTY POINTS START (ham,dist,slope,ydiff), count: " + pairedPoints.Count);
                //foreach (var pointForCleaning in pairedPoints)
                //{
                //    Debug.WriteLine(pointForCleaning.Match.Distance  + "," + pointForCleaning.Data.Distance + "," + pointForCleaning.Data.Slope + "," + Math.Abs(pointForCleaning.KeyPoint1.Point.Y - pointForCleaning.KeyPoint2.Point.Y));
                //}

                //Debug.WriteLine("DIRTY PAIRS:");
                //PrintPairs(pairedPoints);

                if (settings.DiscardOutliersByDistance)
                {
                    // reject distances and slopes more than some number of standard deviations from the median
                    var medianDistance = pairedPoints.OrderBy(p => p.Data.Distance).ElementAt(pairedPoints.Count / 2).Data.Distance;
                    var distanceStdDev = CalcStandardDeviation(pairedPoints.Select(p => p.Data.Distance).ToArray());
                    pairedPoints = pairedPoints.Where(p => Math.Abs(p.Data.Distance - medianDistance) < Math.Abs(distanceStdDev * (settings.KeypointOutlierThresholdTenths / 10d))).ToList();
                    //Debug.WriteLine("Median Distance: " + medianDistance);
                    //Debug.WriteLine("Distance Cleaned Points count: " + pairedPoints.Count);
                }

                if (settings.DiscardOutliersBySlope)
                {
                    var validSlopes = pairedPoints.Where(p => !float.IsNaN(p.Data.Slope) && float.IsFinite(p.Data.Slope)).ToArray();
                    var medianSlope = validSlopes.OrderBy(p => p.Data.Slope).ElementAt(validSlopes.Length / 2).Data.Slope;
                    var slopeStdDev = CalcStandardDeviation(validSlopes.Select(p => p.Data.Slope).ToArray());
                    pairedPoints = validSlopes.Where(p => Math.Abs(p.Data.Slope - medianSlope) < Math.Abs(slopeStdDev * (settings.KeypointOutlierThresholdTenths / 10d))).ToList();
                    //Debug.WriteLine("Median Slope: " + medianSlope);
                    //Debug.WriteLine("Slope Cleaned Points count: " + pairedPoints.Count);
                }

                //Debug.WriteLine("CLEAN POINTS START (ham,dist,slope,ydiff), count: " + pairedPoints.Count);
                //foreach (var pointForCleaning in pairedPoints)
                //{
                //    Debug.WriteLine(pointForCleaning.Match.Distance + "," + pointForCleaning.Data.Distance + "," + pointForCleaning.Data.Slope + "," + Math.Abs(pointForCleaning.KeyPoint1.Point.Y - pointForCleaning.KeyPoint2.Point.Y));
                //}

                //Debug.WriteLine("CLEANED PAIRS:");
                //PrintPairs(pairedPoints);

                for (var ii = 0; ii < pairedPoints.Count; ii++)
                {
                    var oldMatch = pairedPoints[ii].Match;
                    pairedPoints[ii].Match = new MDMatch
                    {
                        Distance = oldMatch.Distance,
                        ImgIdx   = oldMatch.ImgIdx,
                        QueryIdx = ii,
                        TrainIdx = ii
                    };
                }

                if (settings.DrawKeypointMatches)
                {
                    result.CleanMatchesCount = pairedPoints.Count;
                    result.DrawnCleanMatches = DrawMatches(firstImage, secondImage, pairedPoints);
                }
            }

            var points1 = pairedPoints.Select(p => new SKPoint(p.KeyPoint1.Point.X, p.KeyPoint1.Point.Y)).ToArray();
            var points2 = pairedPoints.Select(p => new SKPoint(p.KeyPoint2.Point.X, p.KeyPoint2.Point.Y)).ToArray();


            var translation1 = FindVerticalTranslation(points1, points2, secondImage);
            var translated1  = SKMatrix.MakeTranslation(0, translation1);
            points2 = translated1.MapPoints(points2);

            var rotation1 = FindRotation(points1, points2, secondImage);
            var rotated1  = SKMatrix.MakeRotation(rotation1, secondImage.Width / 2f, secondImage.Height / 2f);
            points2 = rotated1.MapPoints(points2);

            var zoom1   = FindZoom(points1, points2, secondImage);
            var zoomed1 = SKMatrix.MakeScale(zoom1, zoom1, secondImage.Width / 2f, secondImage.Height / 2f);
            points2 = zoomed1.MapPoints(points2);



            var translation2 = FindVerticalTranslation(points1, points2, secondImage);
            var translated2  = SKMatrix.MakeTranslation(0, translation2);
            points2 = translated2.MapPoints(points2);

            var rotation2 = FindRotation(points1, points2, secondImage);
            var rotated2  = SKMatrix.MakeRotation(rotation2, secondImage.Width / 2f, secondImage.Height / 2f);
            points2 = rotated2.MapPoints(points2);

            var zoom2   = FindZoom(points1, points2, secondImage);
            var zoomed2 = SKMatrix.MakeScale(zoom2, zoom2, secondImage.Width / 2f, secondImage.Height / 2f);
            points2 = zoomed2.MapPoints(points2);



            var translation3 = FindVerticalTranslation(points1, points2, secondImage);
            var translated3  = SKMatrix.MakeTranslation(0, translation3);
            points2 = translated3.MapPoints(points2);

            var rotation3 = FindRotation(points1, points2, secondImage);
            var rotated3  = SKMatrix.MakeRotation(rotation3, secondImage.Width / 2f, secondImage.Height / 2f);
            points2 = rotated3.MapPoints(points2);

            var zoom3   = FindZoom(points1, points2, secondImage);
            var zoomed3 = SKMatrix.MakeScale(zoom3, zoom3, secondImage.Width / 2f, secondImage.Height / 2f);
            points2 = zoomed3.MapPoints(points2);


            var keystoned1 = SKMatrix.MakeIdentity();
            var keystoned2 = SKMatrix.MakeIdentity();
            if (settings.DoKeystoneCorrection)
            {
                keystoned1 = FindTaper(points2, points1, secondImage, keystoneRightOnFirst);
                points1    = keystoned1.MapPoints(points1);
                keystoned2 = FindTaper(points1, points2, secondImage, !keystoneRightOnFirst);
                points2    = keystoned2.MapPoints(points2);
            }


            var horizontaled = SKMatrix.MakeIdentity();
            if (!discardTransX)
            {
                var horizontalAdj = FindHorizontalTranslation(points1, points2, secondImage);
                horizontaled = SKMatrix.MakeTranslation(horizontalAdj, 0);
                points2      = horizontaled.MapPoints(points2);
            }



            var tempMatrix1 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix1, translated1, rotated1);
            var tempMatrix2 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix2, tempMatrix1, zoomed1);

            var tempMatrix3 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix3, tempMatrix2, translated2);
            var tempMatrix4 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix4, tempMatrix3, rotated2);
            var tempMatrix5 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix5, tempMatrix4, zoomed2);

            var tempMatrix6 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix6, tempMatrix5, translated3);
            var tempMatrix7 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix7, tempMatrix6, rotated3);
            var tempMatrix8 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix8, tempMatrix7, zoomed3);


            var tempMatrix9 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix9, tempMatrix8, keystoned2);

            var tempMatrix10 = new SKMatrix();
            SKMatrix.Concat(ref tempMatrix10, tempMatrix9, horizontaled);

            var finalMatrix = tempMatrix10;
            result.TransformMatrix2 = finalMatrix;
            var alignedImage2 = new SKBitmap(secondImage.Width, secondImage.Height);
            using (var canvas = new SKCanvas(alignedImage2))
            {
                canvas.SetMatrix(finalMatrix);
                canvas.DrawBitmap(secondImage, 0, 0);
            }
            result.AlignedBitmap2 = alignedImage2;


            result.TransformMatrix1 = keystoned1;
            var alignedImage1 = new SKBitmap(firstImage.Width, firstImage.Height);
            using (var canvas = new SKCanvas(alignedImage1))
            {
                canvas.SetMatrix(keystoned1);
                canvas.DrawBitmap(firstImage, 0, 0);
            }
            result.AlignedBitmap1 = alignedImage1;


            return(result);
        }
Ejemplo n.º 22
0
        public void TestORB()
        {
            ORBDetector orb = new ORBDetector(700);

            EmguAssert.IsTrue(TestFeature2DTracker(orb, orb), "Unable to find homography matrix");
        }