Пример #1
0
        static void test()
        {
            System.Collections.Generic.Dictionary <string, object> data = new Dictionary <string, object>();
            Bitmap b1          = ImageDecoder.DecodeFromFile(@"C:\Users\qa\Desktop\picture\iphone_icon\scoll_down_selected_icon.jpg");
            var    surf        = new SpeededUpRobustFeaturesDetector(threshold: 0.0002f, octaves: 5, initial: 2);
            var    descriptors = surf.Transform(b1);
            //List<SpeededUpRobustFeaturePoint> descriptors = new List<SpeededUpRobustFeaturePoint>(surf.Transform(b1));
            //double[][] features = descriptors.Apply(d => d.Descriptor);
            List <double[]> features = new List <double[]>();

            foreach (var d in descriptors)
            {
                features.Add(d.Descriptor);
            }
            data.Add("pos", features.ToArray());

            System.Web.Script.Serialization.JavaScriptSerializer jss = new System.Web.Script.Serialization.JavaScriptSerializer();
            string s = jss.Serialize(data);

            /*
             * foreach(string s in System.IO.Directory.GetFiles(@"C:\Users\qa\Desktop\picture\iphone_icon"))
             * {
             *  Bitmap b = new Bitmap(s);
             *  var surf = new SpeededUpRobustFeaturesDetector(threshold: 0.0002f, octaves: 5, initial: 2);
             *  var descriptors = surf.Transform(b);
             *  //double[][] features = descriptors.Apply(d => d.Descriptor);
             *
             *
             * }
             */
        }
Пример #2
0
        public override IEnumerable <SURFFeature> ExtractFeatures(Bitmap bitmap, string TagSet)
        {
            SpeededUpRobustFeaturesDetector           Surf          = new SpeededUpRobustFeaturesDetector(Threshold, Octaves, Initial);
            IEnumerable <SpeededUpRobustFeaturePoint> FeaturePoints = Surf.Transform(bitmap);

            //Check laplacian
            if (ExtractPositiveOnly ?? false)
            {
                FeaturePoints = FeaturePoints.Where(x => x.Laplacian < 0);
            }
            if (ExtractNegativeOnly ?? false)
            {
                FeaturePoints = FeaturePoints.Where(x => x.Laplacian > 0);
            }

            //Check scale
            if (MinimumScale != null)
            {
                FeaturePoints = FeaturePoints.Where(x => x.Scale > MinimumScale);
            }
            if (MaximumScale != null)
            {
                FeaturePoints = FeaturePoints.Where(x => x.Scale < MaximumScale);
            }

            return(FeaturePoints.Select(x => new SURFFeature(x, TagSet)));
        }
        public void doc_test()
        {
            string localPath = TestContext.CurrentContext.TestDirectory;

            #region doc_apply
            // Let's load an example image, such as Lena,
            // from a standard dataset of example images:
            var    images = new TestImages(path: localPath);
            Bitmap lena   = images["lena.bmp"];

            // Create a new SURF with the default parameter values:
            var surf = new SpeededUpRobustFeaturesDetector(threshold: 0.0002f, octaves: 5, initial: 2);

            // Use it to extract the SURF point descriptors from the Lena image:
            List <SpeededUpRobustFeaturePoint> descriptors = surf.ProcessImage(lena);

            // We can obtain the actual double[] descriptors using
            double[][] features = descriptors.Apply(d => d.Descriptor);

            // Now those descriptors can be used to represent the image itself, such
            // as for example, in the Bag-of-Visual-Words approach for classification.
            #endregion

            Assert.AreEqual(523, descriptors.Count);
            double sum = features.Sum(x => x.Sum());
            Assert.AreEqual(2340.9402310500964, sum, 1e-10);
        }
Пример #4
0
    protected void surfRansacBlendStraight(List <Bitmap> imgs)
    {
        MatrixH homography;

        List <SpeededUpRobustFeaturePoint[]> surfPoints = new List <SpeededUpRobustFeaturePoint[]>();
        //Calculate all the Surf Points
        SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector();
        double lastAngle = 0;

        for (int i = 0; i < imgs.Count; i++)
        {
            //Grayscale to find the edges and adjust the normal to point up
            AForge.Imaging.Filters.GrayscaleBT709 grayscale = new AForge.Imaging.Filters.GrayscaleBT709();
            AForge.Imaging.DocumentSkewChecker    skew      = new AForge.Imaging.DocumentSkewChecker();

            double angle = skew.GetSkewAngle(grayscale.Apply(imgs[i]));

            //Less than 5 deg change in angle to account for wobble, ignore big shifts
            if (Math.Abs(angle - lastAngle) < 5)
            {
                AForge.Imaging.Filters.RotateBilinear rotate = new AForge.Imaging.Filters.RotateBilinear(angle);
                rotate.FillColor = Color.FromArgb(0, 255, 255, 255);
                imgs[i]          = rotate.Apply(imgs[i]);
                lastAngle        = angle;
            }
            showImage(imgs[i]);
            surfPoints.Add(surf.ProcessImage(imgs[i]).ToArray());
        }


        Bitmap final = imgs[0];

        for (int i = 1; i < imgs.Count; i++)
        {
            SpeededUpRobustFeaturePoint[] surfFinal = surf.ProcessImage(final).ToArray();

            //Correlate the Harris pts between imgs
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(5);
            matcher.Threshold = 0.05;

            IntPoint[][] matches = matcher.Match(surfFinal, surfPoints[i]);

            //Create the homography matrix using RANSAC
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.015, 1);
            homography = ransac.Estimate(matches[0], matches[1]);

            Blend blend = new Blend(homography, final);
            blend.Gradient = true;
            final          = blend.Apply(imgs[i]);
        }

        //Smooth/Sharpen if I wanted to
        AForge.Imaging.Filters.Sharpen filter = new AForge.Imaging.Filters.Sharpen();
        //AForge.Imaging.Filters.Gaussian filter = new AForge.Imaging.Filters.Guassian(5);
        //filter.ApplyInPlace(final);

        showImage(final);
    }
Пример #5
0
        public void Panorama_Example1()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Resources.dc_left;
            Bitmap img2 = Resources.dc_right;

            // Those pictures are shown below:
            // ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            // ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            Assert.AreEqual(1.13583624f, homographyMatrix.Elements[0], 1e-5);
            Assert.AreEqual(-0.0229569562f, homographyMatrix.Elements[1], 1e-5);
            Assert.AreEqual(-255.243988f, homographyMatrix.Elements[2], 1e-2);
            Assert.AreEqual(0.080111593f, homographyMatrix.Elements[3], 1e-5);
            Assert.AreEqual(1.11404252f, homographyMatrix.Elements[4], 1e-5);
            Assert.AreEqual(-167.362167f, homographyMatrix.Elements[5], 1e-2);
            Assert.AreEqual(0.00011207442f, homographyMatrix.Elements[6], 1e-5);
            Assert.AreEqual(0.0000529394056f, homographyMatrix.Elements[7], 1e-5);
            Assert.AreEqual(8, homographyMatrix.Elements.Length);


            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            // ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);

#pragma warning disable 618
            double[,] expected = Properties.Resources.blend_result.ToDoubleMatrix(0);
            double[,] actual = result.ToDoubleMatrix(0);
            Assert.IsTrue(Matrix.IsEqual(expected, actual, 0.1));
#pragma warning restore 618
        }
Пример #6
0
        public void ProcessImageTest4()
        {
            Bitmap[] bitmaps =
            {
                Resources.flower01,
                Resources.flower03,
                Resources.flower06,
                Resources.flower07,
                Resources.flower09,
                Resources.flower10,
            };

            var surf = new SpeededUpRobustFeaturesDetector();

            int current = 0;

            foreach (Bitmap img in bitmaps)
            {
                List <SpeededUpRobustFeaturePoint> expected;
                List <SpeededUpRobustFeaturePoint> actual;

                // Create OpenSURF detector by Chris Evans
                {
                    // Create Integral Image
                    OpenSURFcs.IntegralImage iimg = OpenSURFcs.IntegralImage.FromImage(img);

                    // Extract the interest points
                    var pts = OpenSURFcs.FastHessian.getIpoints(0.0002f, 5, 2, iimg);

                    // Describe the interest points
                    OpenSURFcs.SurfDescriptor.DecribeInterestPoints(pts, false, false, iimg);

                    expected = new List <SpeededUpRobustFeaturePoint>();
                    foreach (var p in pts)
                    {
                        expected.Add(new SpeededUpRobustFeaturePoint(p.x, p.y, p.scale,
                                                                     p.laplacian, p.orientation, p.response));
                    }
                }

                // Create Accord.NET SURF detector (based on OpenSURF by Chris Evans)
                {
                    actual = surf.ProcessImage(img);
                }

                var img1 = new FeaturesMarker(actual).Apply(img);
                var img2 = new FeaturesMarker(expected).Apply(img);

                ImageBox.Show(new Concatenate(img1).Apply(img2), PictureBoxSizeMode.Zoom);


                current++;
            }
        }
Пример #7
0
        public void ProcessImageTest2()
        {
            // Load an Image
            Bitmap img = Accord.Imaging.Image.Clone(Resources.sample_trans);

            // Extract the interest points
            var surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

            surf.ComputeDescriptors = SpeededUpRobustFeatureDescriptorType.None;
            surf.ComputeOrientation = true;

            List <SpeededUpRobustFeaturePoint> points = surf.ProcessImage(img);

            Assert.AreEqual(8, points.Count);

            SpeededUpRobustFeaturePoint p;

            p = points[0];
            Assert.AreEqual(0, p.Laplacian);
            Assert.AreEqual(25.3803387, p.X, 1e-2);
            Assert.AreEqual(14.7987738, p.Y, 1e-2);
            Assert.AreEqual(1.98713827, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(4.78528404, p.Orientation, 1e-2);
            Assert.IsNull(p.Descriptor);

            p = points[1];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(20.4856224, p.X, 1e-2);
            Assert.AreEqual(20.4817181, p.Y, 1e-2);
            Assert.AreEqual(1.90549147, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(4.89748764, p.Orientation, 1e-2);
            Assert.IsNull(p.Descriptor);

            p = points[2];
            Assert.AreEqual(0, p.Laplacian, 1e-2);
            Assert.AreEqual(14.7991896, p.X, 1e-2);
            Assert.AreEqual(25.3776169, p.Y, 1e-2);
            Assert.AreEqual(1.9869982, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(3.07735944, p.Orientation, 1e-2);
            Assert.IsNull(p.Descriptor);

            p = points[6];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(22.4346638, p.X, 1e-2);
            Assert.AreEqual(41.4026527, p.Y, 1e-2);
            Assert.AreEqual(2.83586049, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(3.13142157, p.Orientation, 1e-2);
            Assert.IsNull(p.Descriptor);
        }
        public void ProcessImageTest4()
        {
            var bitmaps = BagOfVisualWordsTest.GetImages();
            var surf    = new SpeededUpRobustFeaturesDetector();

            int current = 0;

            foreach (Bitmap img in bitmaps)
            {
                List <SpeededUpRobustFeaturePoint> expected;
                List <SpeededUpRobustFeaturePoint> actual;

                // Create OpenSURF detector by Chris Evans
                {
                    // Create Integral Image
                    OpenSURFcs.IntegralImage iimg = OpenSURFcs.IntegralImage.FromImage(img);

                    // Extract the interest points
                    var pts = OpenSURFcs.FastHessian.getIpoints(0.0002f, 5, 2, iimg);

                    // Describe the interest points
                    OpenSURFcs.SurfDescriptor.DecribeInterestPoints(pts, false, false, iimg);

                    expected = new List <SpeededUpRobustFeaturePoint>();
                    foreach (var p in pts)
                    {
                        expected.Add(new SpeededUpRobustFeaturePoint(p.x, p.y, p.scale,
                                                                     p.laplacian, p.orientation, p.response));
                    }
                }

                // Create Accord.NET SURF detector (based on OpenSURF by Chris Evans)
                {
                    actual = surf.ProcessImage(img);
                }

                var img1 = new FeaturesMarker(actual).Apply(img);
                var img2 = new FeaturesMarker(expected).Apply(img);

                // ImageBox.Show(new Concatenate(img1).Apply(img2), PictureBoxSizeMode.Zoom);


                current++;

                for (int i = 0; i < expected.Count; i++)
                {
                    var e = expected[i];
                    var a = actual[i];
                    Assert.AreEqual(e, a);
                }
            }
        }
Пример #9
0
        private void btnSurf_Click(object sender, EventArgs e)
        {
            // Step 1: Detect feature points using Surf Corners Detector
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector();

            surfPoints1 = surf.ProcessImage(img1).ToArray();
            surfPoints2 = surf.ProcessImage(img2).ToArray();

            // Show the marked points in the original images
            Bitmap img1mark = new FeaturesMarker(surfPoints1).Apply(img1);
            Bitmap img2mark = new FeaturesMarker(surfPoints2).Apply(img2);

            // Concatenate the two images together in a single image (just to show on screen)
            Concatenate concatenate = new Concatenate(img1mark);
            pictureBox.Image = concatenate.Apply(img2mark);
        }
Пример #10
0
        public void ZeroWidthTest()
        {
            Bitmap img = Accord.Imaging.Image.Clone(Resources.surf_bug_1);

            var iimg     = OpenSURFcs.IntegralImage.FromImage(img);
            var expected = OpenSURFcs.FastHessian.getIpoints(0.0002f, 5, 2, iimg);

            OpenSURFcs.SurfDescriptor.DecribeInterestPoints(expected, false, false, iimg);


            var surf   = new SpeededUpRobustFeaturesDetector();
            var actual = surf.ProcessImage(img);

            Assert.AreEqual(0, expected.Count);
            Assert.AreEqual(0, actual.Count);
        }
Пример #11
0
        private void btnSurf_Click(object sender, EventArgs e)
        {
            // Step 1: Detect feature points using Surf Corners Detector
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector();

            surfPoints1 = surf.Transform(img1);
            surfPoints2 = surf.Transform(img2);

            // Show the marked points in the original images
            Bitmap img1mark = new FeaturesMarker(surfPoints1).Apply(img1);
            Bitmap img2mark = new FeaturesMarker(surfPoints2).Apply(img2);

            // Concatenate the two images together in a single image (just to show on screen)
            Concatenate concatenate = new Concatenate(img1mark);

            pictureBox.Image = concatenate.Apply(img2mark);
        }
Пример #12
0
        private void DetectButton_OnClick(object sender, RoutedEventArgs e)
        {
            var threshold = (float)Math.Pow(10.0, this.LogThresholdSlider.Value);
            var octaves   = (int)this.OctaveSlider.Value;
            var initial   = (int)this.InitialSlider.Value;

            // Create a new SURF Features Detector using the given parameters
            var surf = new SpeededUpRobustFeaturesDetector(threshold, octaves, initial);

            var points = surf.ProcessImage(this.lenna);

            // Create a new AForge's Corner Marker Filter
            var features = new FeaturesMarker(points);

            // Apply the filter and display it on a picturebox
            this.LenaImage.Source = (BitmapSource)features.Apply(this.lenna);
        }
Пример #13
0
    protected void drawSurfFeatures(List <Bitmap> imgs)
    {
        List <SpeededUpRobustFeaturePoint[]> surfPoints = new List <SpeededUpRobustFeaturePoint[]>();
        //Calculate all the Surf Points
        SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector();

        foreach (Bitmap img in imgs)
        {
            surfPoints.Add(surf.ProcessImage(img).ToArray());
        }

        //Draw & Show all the harris points
        for (int i = 0; i < imgs.Count; i++)
        {
            showImage(new PointsMarker(surfPoints[i]).Apply(imgs[i]));
        }
    }
Пример #14
0
    protected void surfRansacBlend(List <Bitmap> imgs)
    {
        MatrixH homography;

        List <SpeededUpRobustFeaturePoint[]> surfPoints = new List <SpeededUpRobustFeaturePoint[]>();
        //Calculate all the Surf Points
        SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector();

        for (int i = 0; i < imgs.Count; i++)
        {
            surfPoints.Add(surf.ProcessImage(imgs[i]).ToArray());
        }


        Bitmap final = imgs[0];

        for (int i = 1; i < imgs.Count; i++)
        {
            SpeededUpRobustFeaturePoint[] surfFinal = surf.ProcessImage(final).ToArray();

            //Correlate the Harris pts between imgs
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(5);
            matcher.Threshold = 0.05;

            IntPoint[][] matches = matcher.Match(surfFinal, surfPoints[i]);

            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.015, 1);
            homography = ransac.Estimate(matches[0], matches[1]);

            Blend blend = new Blend(homography, final);
            blend.Gradient = true;
            final          = blend.Apply(imgs[i]);
        }

        //Smooth/Sharpen if I wanted to
        AForge.Imaging.Filters.Sharpen filter = new AForge.Imaging.Filters.Sharpen();
        //AForge.Imaging.Filters.Gaussian filter = new AForge.Imaging.Filters.Guassian(5);
        //filter.ApplyInPlace(final);

        showImage(final);
    }
Пример #15
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Open a image
            Bitmap lenna = Properties.Resources.lena512;

            float threshold = (float)numThreshold.Value;
            int   octaves   = (int)numOctaves.Value;
            int   initial   = (int)numInitial.Value;

            // Create a new SURF Features Detector using the given parameters
            SpeededUpRobustFeaturesDetector surf =
                new SpeededUpRobustFeaturesDetector(threshold, octaves, initial);

            List <SpeededUpRobustFeaturePoint> points = surf.ProcessImage(lenna);

            // Create a new AForge's Corner Marker Filter
            FeaturesMarker features = new FeaturesMarker(points);

            // Apply the filter and display it on a picturebox
            pictureBox1.Image = features.Apply(lenna);
        }
Пример #16
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Open a image
            Bitmap lenna = Properties.Resources.lena512;

            float threshold = (float)numThreshold.Value;
            int octaves = (int)numOctaves.Value;
            int initial = (int)numInitial.Value;

            // Create a new SURF Features Detector using the given parameters
            SpeededUpRobustFeaturesDetector surf =
                new SpeededUpRobustFeaturesDetector(threshold, octaves, initial);

            List<SpeededUpRobustFeaturePoint> points = surf.ProcessImage(lenna);

            // Create a new AForge's Corner Marker Filter
            FeaturesMarker features = new FeaturesMarker(points);

            // Apply the filter and display it on a picturebox
            pictureBox1.Image = features.Apply(lenna);
        }
Пример #17
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Open a image
            Bitmap lenna = Surf.Properties.Resources.lena512;

            // Create a new SURF Features Detector using the given parameters
            SpeededUpRobustFeaturesDetector surf =
                new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

            var points = surf.ProcessImage(lenna);

            // Get the SURF Features Descriptor from the detector
            SurfDescriptor descriptor = surf.GetDescriptor();
            descriptor.Describe(points);

            // Create a new AForge's Corner Marker Filter
            FeaturesMarker features = new FeaturesMarker(points.ToArray());

            // Apply the filter and display it on a picturebox
            pictureBox1.Image = features.Apply(lenna);
        }
Пример #18
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Open a image
            Bitmap lenna = Surf.Properties.Resources.lena512;

            // Create a new SURF Features Detector using the given parameters
            SpeededUpRobustFeaturesDetector surf =
                new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

            var points = surf.ProcessImage(lenna);

            // Get the SURF Features Descriptor from the detector
            SurfDescriptor descriptor = surf.GetDescriptor();

            descriptor.Describe(points);

            // Create a new AForge's Corner Marker Filter
            FeaturesMarker features = new FeaturesMarker(points.ToArray());

            // Apply the filter and display it on a picturebox
            pictureBox1.Image = features.Apply(lenna);
        }
Пример #19
0
    protected void drawSurfFeaturesCorrelations(List <Bitmap> imgs)
    {
        List <SpeededUpRobustFeaturePoint[]> surfPoints = new List <SpeededUpRobustFeaturePoint[]>();
        //Calculate all the Surf Points
        SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector();

        foreach (Bitmap img in imgs)
        {
            surfPoints.Add(surf.ProcessImage(img).ToArray());
        }

        //Map them and draw them!
        Bitmap surfImg = imgs[0];

        for (int i = 0; i < imgs.Count - 1; i++)
        {
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(5);
            matcher.Threshold = 0.005;
            IntPoint[][] matches = matcher.Match(surfPoints[i], surfPoints[i + 1]);

            Concatenate concat = new Concatenate(surfImg);
            Bitmap      img    = concat.Apply(imgs[i + 1]);

            Color color = Color.White;
            if (i % 3 == 1)
            {
                color = Color.OrangeRed;
            }
            if (i % 3 == 2)
            {
                color = Color.Blue;
            }
            PairsMarker pairs = new PairsMarker(matches[0].Apply(p => new IntPoint(p.X + surfImg.Width - imgs[0].Width, p.Y)), matches[1].Apply(p => new IntPoint(p.X + surfImg.Width, p.Y)), color);
            surfImg = pairs.Apply(img);
        }

        showImage(surfImg);
    }
Пример #20
0
        public void Example1()
        {
            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Resources.dc_left;
            Bitmap img2 = Resources.dc_right;

            // Those pictures are shown below:
            ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var     ransac           = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);
        }
Пример #21
0
        public void Example1()
        {
            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Resources.dc_left;
            Bitmap img2 = Resources.dc_right;

            // Those pictures are shown below:
            ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);
        }
Пример #22
0
        public void IndexFiles(FileInfo[] imageFiles, System.ComponentModel.BackgroundWorker IndexBgWorker,
                               Action <string> logWriter,
                               SurfSettings surfSetting = null)
        {
            //For Time Profilling
            long readingTime, indexingTime = 0, saveingTime = 0;

            #region Surf Dectator Region
            double hessianThresh       = 500;
            double uniquenessThreshold = 0.8;

            if (surfSetting != null)
            {
                hessianThresh       = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
            }
            float hessianThreshold2 = (float)hessianThresh / 1000000;
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            #endregion

            int rows = 0;

            Stopwatch sw1;

            sw1 = Stopwatch.StartNew();
            logWriter("Index started...");

            string fullFileName = Path.Combine(DirectoryHelper.SaveDirectoryPath, "SurfAccordLinear.bin");
            if (File.Exists(fullFileName))
            {
                File.Delete(fullFileName);
            }
            using (FileStream fs = new FileStream(fullFileName, FileMode.Create, FileAccess.Write, FileShare.None))
            {
                System.Runtime.Serialization.Formatters.Binary.BinaryFormatter bf
                    = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
                int totalFileCount = imageFiles.Length;
                for (int i = 0; i < totalFileCount; i++)
                {
                    var fi = imageFiles[i];

                    using (Bitmap observerImage = (Bitmap)Image.FromFile(fi.FullName))
                    {
                        List <SpeededUpRobustFeaturePoint> observerImageSurfPoints = surf.ProcessImage(observerImage);

                        if (observerImageSurfPoints.Count > 4)
                        {
                            SURFAccordRecord3 record = new SURFAccordRecord3
                            {
                                Id              = i,
                                ImageName       = fi.Name,
                                ImagePath       = fi.FullName,
                                SurfDescriptors = observerImageSurfPoints
                            };
                            bf.Serialize(fs, record);
                        }
                        else
                        {
                            Debug.WriteLine(fi.Name + " skip from index, because it didn't have significant feature");
                        }
                    }
                    IndexBgWorker.ReportProgress(i);
                }
                fs.Close();
            }

            sw1.Stop();
            readingTime = sw1.ElapsedMilliseconds;
            logWriter(string.Format("Reading Surb Complete, it tooked {0} ms. Saving Repository...", readingTime));


            logWriter(string.Format("Reading: {0} ms, Indexing: {1} ms, Saving Indexed data {2}", readingTime, indexingTime, saveingTime));
        }
Пример #23
0
        public void ProcessImageTest3()
        {
            // Load an Image
            Bitmap img = Accord.Imaging.Image.Clone(Resources.sample_trans);

            // Extract the interest points
            var surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

            surf.ComputeDescriptors = SpeededUpRobustFeatureDescriptorType.Extended;
            surf.ComputeOrientation = false;

            List <SpeededUpRobustFeaturePoint> points = surf.ProcessImage(img);

            Assert.AreEqual(8, points.Count);

            SpeededUpRobustFeaturePoint p;

            p = points[0];
            Assert.AreEqual(0, p.Laplacian);
            Assert.AreEqual(25.3803387, p.X, 1e-2);
            Assert.AreEqual(14.7987738, p.Y, 1e-2);
            Assert.AreEqual(1.98713827, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0.0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.026510688411631893, p.Descriptor[23], 1e-10);
            Assert.AreEqual(0.28209917003131696, p.Descriptor[42], 1e-10);

            p = points[1];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(20.4856224, p.X, 1e-2);
            Assert.AreEqual(20.4817181, p.Y, 1e-2);
            Assert.AreEqual(1.90549147, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.0017327366915338997, p.Descriptor[23], 1e-10);
            Assert.AreEqual(0.011412382779922381, p.Descriptor[54], 1e-10);

            p = points[2];
            Assert.AreEqual(0, p.Laplacian, 1e-2);
            Assert.AreEqual(14.7991896, p.X, 1e-2);
            Assert.AreEqual(25.3776169, p.Y, 1e-2);
            Assert.AreEqual(1.9869982, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0.0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.013389417853018544, p.Descriptor[23], 1e-10);
            Assert.AreEqual(0.0000054046300690336785, p.Descriptor[12], 1e-10);

            p = points[6];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(22.4346638, p.X, 1e-2);
            Assert.AreEqual(41.4026527, p.Y, 1e-2);
            Assert.AreEqual(2.83586049, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0.0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.059752032324088523, p.Descriptor[23], 1e-10);
            Assert.AreEqual(-0.0000056593837766382935, p.Descriptor[12], 1e-10);
        }
Пример #24
0
        public void IndexFiles(FileInfo[] imageFiles, System.ComponentModel.BackgroundWorker IndexBgWorker,
            Action<string> logWriter,
            SurfSettings surfSetting = null)
        {
            //For Time Profilling
            long readingTime, indexingTime = 0 , saveingTime = 0;

            #region Surf Dectator Region
            double hessianThresh = 500;
            double uniquenessThreshold = 0.8;

            if (surfSetting != null)
            {
                hessianThresh = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
            }
            float hessianThreshold2 = (float)hessianThresh / 1000000;
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            #endregion

            int rows = 0;

            Stopwatch sw1;

            sw1 = Stopwatch.StartNew();
            logWriter("Index started...");

            string fullFileName = Path.Combine(DirectoryHelper.SaveDirectoryPath, "SurfAccordLinear.bin");
            if (File.Exists(fullFileName))
                File.Delete(fullFileName);
            using (FileStream fs = new FileStream(fullFileName, FileMode.Create, FileAccess.Write, FileShare.None))
            {
                System.Runtime.Serialization.Formatters.Binary.BinaryFormatter bf
                    = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
                int totalFileCount = imageFiles.Length;
                for (int i = 0; i < totalFileCount; i++)
                {
                    var fi = imageFiles[i];

                    using (Bitmap observerImage = (Bitmap)Image.FromFile(fi.FullName))
                    {
                        List<SpeededUpRobustFeaturePoint> observerImageSurfPoints = surf.ProcessImage(observerImage);

                        if (observerImageSurfPoints.Count > 4)
                        {
                            SURFAccordRecord3 record = new SURFAccordRecord3
                            {
                                Id = i,
                                ImageName = fi.Name,
                                ImagePath = fi.FullName,
                                SurfDescriptors = observerImageSurfPoints
                            };
                            bf.Serialize(fs, record);
                        }
                        else
                        {
                            Debug.WriteLine(fi.Name + " skip from index, because it didn't have significant feature");
                        }
                    }
                    IndexBgWorker.ReportProgress(i);
                }
                fs.Close();
            }

            sw1.Stop();
            readingTime = sw1.ElapsedMilliseconds;
            logWriter(string.Format("Reading Surb Complete, it tooked {0} ms. Saving Repository...", readingTime));

            logWriter(string.Format("Reading: {0} ms, Indexing: {1} ms, Saving Indexed data {2}", readingTime, indexingTime, saveingTime));
        }
        public void ProcessImageTest3()
        {
            // Load an Image
            Bitmap img = Accord.Imaging.Image.Clone(Properties.Resources.sample_trans);

            // Extract the interest points
            var surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

            surf.ComputeDescriptors = SpeededUpRobustFeatureDescriptorType.Extended;
            surf.ComputeOrientation = false;

            List<SpeededUpRobustFeaturePoint> points = surf.ProcessImage(img);

            Assert.AreEqual(8, points.Count);

            SpeededUpRobustFeaturePoint p;

            p = points[0];
            Assert.AreEqual(0, p.Laplacian);
            Assert.AreEqual(25.3803387, p.X, 1e-2);
            Assert.AreEqual(14.7987738, p.Y, 1e-2);
            Assert.AreEqual(1.98713827, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0.0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.026510688411631893, p.Descriptor[23], 1e-10);
            Assert.AreEqual(0.28209917003131696, p.Descriptor[42], 1e-10);

            p = points[1];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(20.4856224, p.X, 1e-2);
            Assert.AreEqual(20.4817181, p.Y, 1e-2);
            Assert.AreEqual(1.90549147, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.0017327366915338997, p.Descriptor[23], 1e-10);
            Assert.AreEqual(0.011412382779922381, p.Descriptor[54], 1e-10);

            p = points[2];
            Assert.AreEqual(0, p.Laplacian, 1e-2);
            Assert.AreEqual(14.7991896, p.X, 1e-2);
            Assert.AreEqual(25.3776169, p.Y, 1e-2);
            Assert.AreEqual(1.9869982, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0.0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.013389417853018544, p.Descriptor[23], 1e-10);
            Assert.AreEqual(0.0000054046300690336785, p.Descriptor[12], 1e-10);

            p = points[6];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(22.4346638, p.X, 1e-2);
            Assert.AreEqual(41.4026527, p.Y, 1e-2);
            Assert.AreEqual(2.83586049, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0.0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.059752032324088523, p.Descriptor[23], 1e-10);
            Assert.AreEqual(-0.0000056593837766382935, p.Descriptor[12], 1e-10);
        }
 public FeaturePointMotionDetector()
 {
     _surf       = new SpeededUpRobustFeaturesDetector();
     _matcher    = new KNearestNeighborMatching(5);
     _prevPoints = null;
 }
Пример #27
0
        private void cornerDetectorToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Bitmap image = new Bitmap(System.Drawing.Image.FromFile(@"C:\OSU3\UAS\Training Videos\ZoltanTestSets\test\DJI_0247.rd.JPG"));

            float threshold = 0.001f;

            /*int octaves = (int)numOctaves.Value;
            *  int initial = (int)numInitial.Value;*/

            ResizeNearestNeighbor filter = new ResizeNearestNeighbor(600, 400);

            image = filter.Apply(image);

            // Create a new SURF Features Detector using the given parameters
            SpeededUpRobustFeaturesDetector surf =
                new SpeededUpRobustFeaturesDetector(threshold);

            List <SpeededUpRobustFeaturePoint> points = surf.ProcessImage(image);

            // Create a new AForge's Corner Marker Filter
            FeaturesMarker features = new FeaturesMarker(points, 5);

            // Apply the filter and display it on a picturebox
            Bitmap image_feature = features.Apply(image);


            SingleImageWnd form = new SingleImageWnd(image_feature);

            form.MdiParent = this;
            form.Show();

            /*Bitmap image = new Bitmap(System.Drawing.Image.FromFile(@"C:\OSU3\CAR\Reports\Targets\Target1\DSC00217.JPG"));
             * Bitmap image2 = new Bitmap(image.Width, image.Height);
             *
             * using (Graphics g = Graphics.FromImage(image2))
             * {
             *  g.DrawImage(image, 0, 0);
             *
             *  Grayscale filter = new Grayscale(.33, .33, .33);
             *  image = filter.Apply(image);
             *
             *  double sigma = 2;
             *  float k = 4 / 10;
             *  float threshold = 500;
             *
             *  // Create a new Harris Corners Detector using the given parameters
             *  HarrisCornersDetector harris = new HarrisCornersDetector(k)
             *  {
             *      Measure = HarrisCornerMeasure.Harris, //: HarrisCornerMeasure.Noble,
             *      Threshold = threshold,
             *      Sigma = sigma
             *  };
             *
             *  List<IntPoint> corners = harris.ProcessImage(image);
             *
             *  foreach (IntPoint corner in corners)
             *  {
             *      //A circle with Red Color and 2 Pixel wide line
             *      //gf.DrawEllipse(new Pen(Color.Red, 2), new Rectangle(0, 0, 200, 200));
             *      DrawCircle(g, new Pen(Color.Red, 1), corner.X, corner.Y, 1);
             *  }
             *
             * }
             *
             * ImageForm form = new ImageForm(image2);
             * form.MdiParent = this;
             * form.Show();*/
        }
        public void ProcessImageTest3()
        {
            // Load an Image
            Bitmap img = Properties.Resources.sample_trans;

            // Extract the interest points
            var surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

            surf.ComputeDescriptors = SpeededUpRobustFeatureDescriptorType.Extended;
            surf.ComputeOrientation = false;

            List<SpeededUpRobustFeaturePoint> points = surf.ProcessImage(img);

            Assert.AreEqual(8, points.Count);

            SpeededUpRobustFeaturePoint p;

            p = points[0];
            Assert.AreEqual(0, p.Laplacian);
            Assert.AreEqual(25.3803387, p.X, 1e-2);
            Assert.AreEqual(14.7987738, p.Y, 1e-2);
            Assert.AreEqual(1.98713827, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0.0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.026527178478172982, p.Descriptor[23], 1e-10);
            Assert.AreEqual(0.28221266818142571, p.Descriptor[42], 1e-10);

            p = points[1];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(20.4856224, p.X, 1e-2);
            Assert.AreEqual(20.4817181, p.Y, 1e-2);
            Assert.AreEqual(1.90549147, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.0017332996868934826, p.Descriptor[23], 1e-10);
            Assert.AreEqual(0.01141609085546454, p.Descriptor[54], 1e-10);

            p = points[2];
            Assert.AreEqual(0, p.Laplacian, 1e-2);
            Assert.AreEqual(14.7991896, p.X, 1e-2);
            Assert.AreEqual(25.3776169, p.Y, 1e-2);
            Assert.AreEqual(1.9869982, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0.0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.013397918304161798, p.Descriptor[23], 1e-10);
            Assert.AreEqual(0.0000054080612707747483, p.Descriptor[12], 1e-10);

            p = points[6];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(22.4346638, p.X, 1e-2);
            Assert.AreEqual(41.4026527, p.Y, 1e-2);
            Assert.AreEqual(2.83586049, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0.0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.059789486280406236, p.Descriptor[23], 1e-10);
            Assert.AreEqual(-0.0000056629312093282088, p.Descriptor[12], 1e-10);
        }
Пример #29
0
        public void ProcessImageTest()
        {
            // Load an Image
            Bitmap img = Properties.Resources.sample_trans;

            // Extract the interest points
            var surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);
            List <SurfPoint> points = surf.ProcessImage(img);

            // Describe the interest points
            SurfDescriptor descriptor = surf.GetDescriptor();

            descriptor.Describe(points);

            Assert.AreEqual(8, points.Count);

            SurfPoint p;

            p = points[0];
            Assert.AreEqual(0, p.Laplacian);
            Assert.AreEqual(25.3803387, p.X, 1e-2);
            Assert.AreEqual(14.7987738, p.Y, 1e-2);
            Assert.AreEqual(1.98713827, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(4.78528404, p.Orientation, 1e-2);
            Assert.AreEqual(64, p.Descriptor.Length);
            Assert.AreEqual(0.22572951, p.Descriptor[23], 1e-2);
            Assert.AreEqual(0.0962982625, points[1].Descriptor[42], 1e-2);

            p = points[1];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(20.4856224, p.X, 1e-2);
            Assert.AreEqual(20.4817181, p.Y, 1e-2);
            Assert.AreEqual(1.90549147, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(4.89748764, p.Orientation, 1e-2);
            Assert.AreEqual(64, p.Descriptor.Length);
            Assert.AreEqual(0.14823015, p.Descriptor[23], 1e-2);
            Assert.AreEqual(0.0861000642, p.Descriptor[54], 1e-2);

            p = points[2];
            Assert.AreEqual(0, p.Laplacian, 1e-2);
            Assert.AreEqual(14.7991896, p.X, 1e-2);
            Assert.AreEqual(25.3776169, p.Y, 1e-2);
            Assert.AreEqual(1.9869982, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(3.07735944, p.Orientation, 1e-2);
            Assert.AreEqual(64, p.Descriptor.Length);
            Assert.AreEqual(0.209485427, p.Descriptor[23], 1e-2);
            Assert.AreEqual(0.0112418151, p.Descriptor[12], 1e-2);

            p = points[6];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(22.4346638, p.X, 1e-2);
            Assert.AreEqual(41.4026527, p.Y, 1e-2);
            Assert.AreEqual(2.83586049, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(3.13142157, p.Orientation, 1e-2);
            Assert.AreEqual(64, p.Descriptor.Length);
            Assert.AreEqual(0.0467314087, p.Descriptor[23], 1e-2);
            Assert.AreEqual(0.0266618263, p.Descriptor[12], 1e-2);


            descriptor.Extended  = true;
            descriptor.Invariant = false;
            descriptor.Describe(points);

            p = points[5];
            Assert.AreEqual(1, p.Laplacian, 1e-3);
            Assert.AreEqual(41.4027748, p.X, 1e-3);
            Assert.AreEqual(22.4343891, p.Y, 1e-3);
            Assert.AreEqual(2.83486962, p.Scale, 1e-3);
            Assert.AreEqual(0.0, p.Response, 1e-3);
            Assert.AreEqual(4.72728586, p.Orientation, 1e-3);
            Assert.AreEqual(0.00786296651, p.Descriptor[67], 1e-3);
            Assert.AreEqual(-0.0202884115, p.Descriptor[97], 1e-2);
        }
Пример #30
0
        private Bitmap CompareAndDrawImage(Bitmap modelImage, Bitmap observedImage, SurfSettings setting)
        {
            Stopwatch watch1 = new Stopwatch();
            Stopwatch watch2 = new Stopwatch();

            Bitmap returnBitmap;

            watch2.Start();
            watch1.Reset(); watch1.Start();
            double hessianThreshold = setting.HessianThresh.HasValue ? setting.HessianThresh.Value : 500;
            float hessianThreshold2 = (float)hessianThreshold / 1000000;

            Debug.WriteLine("hessianThreshold2: {0}", hessianThreshold2);
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            List<SpeededUpRobustFeaturePoint> surfPoints1 = surf.ProcessImage(modelImage);
            List<SpeededUpRobustFeaturePoint> surfPoints2 = surf.ProcessImage(observedImage);

            Debug.WriteLine("Surf points count: {0}", surfPoints1.Count);
            Debug.WriteLine("Surf points count: {0}", surfPoints2.Count);
            //long memoryFootprint = MemorySize.GetBlobSizeinKb(surfPoints2);
            //Debug.WriteLine("Surf extractor: {0} kb", memoryFootprint);

            watch1.Stop();
            Debug.WriteLine("Surf Detection tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Show the marked points in the original images
            Bitmap img1mark = new FeaturesMarker(surfPoints1, 2).Apply(modelImage);
            Bitmap img2mark = new FeaturesMarker(surfPoints2, 2).Apply(observedImage);
            // Concatenate the two images together in a single image (just to show on screen)
            Concatenate concatenate = new Concatenate(img1mark);
            returnBitmap = concatenate.Apply(img2mark);
            watch1.Stop();
            Debug.WriteLine("Surf point plotting tooked {0} ms", watch1.ElapsedMilliseconds);

            //watch1.Reset(); watch1.Start();
            //List<IntPoint>[] coretionalMatches = getMatches(surfPoints1, surfPoints2);
            //watch1.Stop();
            //Debug.WriteLine("Correctional Match tooked {0} ms", watch1.ElapsedMilliseconds);

            //// Get the two sets of points
            //IntPoint[] correlationPoints11 = coretionalMatches[0].ToArray();
            //IntPoint[] correlationPoints22 = coretionalMatches[1].ToArray();

            //Debug.WriteLine("Correclation points count: {0}", correlationPoints11.Length);
            //Debug.WriteLine("Correclation points count: {0}", correlationPoints22.Length);

            Debug.WriteLine("Threshold: {0}", setting.UniquenessThreshold.Value);
            watch1.Reset(); watch1.Start();
            // Step 2: Match feature points using a k-NN
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(2);
            matcher.Threshold = setting.UniquenessThreshold.Value;
            IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2);
            watch1.Stop();
            Debug.WriteLine("Knn Match tooked {0} ms", watch1.ElapsedMilliseconds);

            // Get the two sets of points
            IntPoint[] correlationPoints1 = matches[0];
            IntPoint[] correlationPoints2 = matches[1];

            Debug.WriteLine("Knn points count: {0}", correlationPoints1.Length);
            Debug.WriteLine("Knn points count: {0}", correlationPoints2.Length);

            //watch1.Reset(); watch1.Start();
            //// Show the marked correlations in the concatenated image
            //PairsMarker pairs = new PairsMarker(
            //    correlationPoints1, // Add image1's width to the X points to show the markings correctly
            //    correlationPoints2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Blue);

            //returnBitmap = pairs.Apply(returnBitmap);
            //watch1.Stop();
            //Debug.WriteLine("Match pair marking tooked {0} ms", watch1.ElapsedMilliseconds);

            if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4)
            {
                MessageBox.Show("Insufficient points to attempt a fit.");
                return null;
            }

            watch1.Reset(); watch1.Start();
            // Step 3: Create the homography matrix using a robust estimator
            //RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homography = ransac.Estimate(correlationPoints1, correlationPoints2);
            watch1.Stop();
            Debug.WriteLine("Ransac tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);
            watch1.Stop();
            Debug.WriteLine("Ransac SubMatrix {0} ms", watch1.ElapsedMilliseconds);

            Debug.WriteLine("Ransac points count: {0}", inliers1.Length);
            Debug.WriteLine("Ransac points count: {0}", inliers2.Length);

            watch1.Reset(); watch1.Start();
            PairsMarker inlierPairs = new PairsMarker(
               inliers1, // Add image1's width to the X points to show the markings correctly
               inliers2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Red);

            returnBitmap = inlierPairs.Apply(returnBitmap);
            watch1.Stop();
            Debug.WriteLine("Ransac plotting tooked {0} ms", watch1.ElapsedMilliseconds);

            watch2.Stop();
            return returnBitmap;
        }
Пример #31
0
        public void IndexFiles(FileInfo[] imageFiles, System.ComponentModel.BackgroundWorker IndexBgWorker,
            Action<string> logWriter,
            SurfSettings surfSetting = null)
        {
            //For Time Profilling
            long readingTime, indexingTime, saveingTime;

            #region Surf Dectator Region
            double hessianThresh = 500;
            double uniquenessThreshold = 0.8;

            if (surfSetting != null)
            {
                hessianThresh = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
            }
            float hessianThreshold2 = (float)hessianThresh / 1000000;
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            #endregion

            int rows = 0;

            List<SURFRecord1> observerSurfImageIndexList = new List<SURFRecord1>();
            List<SpeededUpRobustFeaturePoint> listOfAllObserverImagesSurfPoints = new List<SpeededUpRobustFeaturePoint>();
            Stopwatch sw1;

            sw1 = Stopwatch.StartNew();
            logWriter("Index started...");
            int totalFileCount = imageFiles.Length;
            for (int i = 0; i < totalFileCount; i++)
            {
                var fi = imageFiles[i];
                using (Bitmap observerImage = (Bitmap)Image.FromFile(fi.FullName))
                {
                    List<SpeededUpRobustFeaturePoint> observerImageSurfPoints = surf.ProcessImage(observerImage);

                    if (observerImageSurfPoints.Count > 4)
                    {
                        int initRow = rows; int endRows = rows + observerImageSurfPoints.Count - 1;

                        SURFRecord1 record = new SURFRecord1
                        {
                            Id = i,
                            ImageName = fi.Name,
                            ImagePath = fi.FullName,
                            IndexStart = rows,
                            IndexEnd = endRows
                        };

                        observerSurfImageIndexList.Add(record);

                        listOfAllObserverImagesSurfPoints.AddRange(observerImageSurfPoints);

                        rows = endRows + 1;
                    }
                    else
                    {
                        Debug.WriteLine(fi.Name + " skip from index, because it didn't have significant feature");
                    }
                }
                IndexBgWorker.ReportProgress(i);
            }
            sw1.Stop();
            readingTime = sw1.ElapsedMilliseconds;
            logWriter(string.Format("Reading Surb Complete, it tooked {0} ms. Saving Repository...", readingTime));

            //------------Initialize Tree from Data
            sw1.Reset(); sw1.Start();
            double[][] superMatrix = listOfAllObserverImagesSurfPoints.Select(c => c.Descriptor).ToArray();
            int[] outputs = new int[superMatrix.Length];
            for (int i = 0; i < outputs.Length; i++)
                outputs[i] = i;

            Accord.MachineLearning.Structures.KDTree<int> tree =
                  Accord.MachineLearning.Structures.KDTree.FromData(superMatrix,
                          outputs,
                          Accord.Math.Distance.Euclidean, inPlace: true);
            sw1.Stop();
            indexingTime = sw1.ElapsedMilliseconds;
            logWriter(string.Format("Intializing KD Tree: {0}", indexingTime));

            //--------------Saving Indexed Records
            sw1.Reset(); sw1.Start();
            SurfAccordDataSet surfAccordDataset = new SurfAccordDataSet
            {
                SurfImageIndexRecord = observerSurfImageIndexList,
                IndexedTree = tree
            };
            string repoFileStoragePath = Path.Combine(DirectoryHelper.SaveDirectoryPath, "SurfAccordDataSet.bin");
            if (File.Exists(repoFileStoragePath))
                File.Delete(repoFileStoragePath);
            using (FileStream s = File.Create(repoFileStoragePath))
            {
                //Polenter.Serialization.SharpSerializerBinarySettings bs =
                //    new Polenter.Serialization.SharpSerializerBinarySettings(Polenter.Serialization.BinarySerializationMode.SizeOptimized);
                //Polenter.Serialization.SharpSerializer formatter = new Polenter.Serialization.SharpSerializer(bs);
                //formatter.Serialize(surfAccordDataset, s);
                System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatter
                    = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
                formatter.Serialize(s, surfAccordDataset);
                s.Close();
            }
            sw1.Stop();
            saveingTime = sw1.ElapsedMilliseconds;
            logWriter(string.Format("Saving Surf Accord Dataset: {0}", saveingTime));

            //Invalidating Cache
            CacheHelper.Remove("SurfAccordDataSet");
            CacheHelper.Remove("SurfAccordIntervalTree");

            logWriter(string.Format("Reading: {0} ms, Indexing: {1} ms, Saving Indexed data {2}", readingTime, indexingTime, saveingTime));
        }
Пример #32
0
        public List <ImageRecord> QueryImage(string queryImagePath, out string messageToLog, SurfSettings surfSetting = null)
        {
            List <ImageRecord> rtnImageList = new List <ImageRecord>();

            #region Diagnostic Region
            Stopwatch sw = new Stopwatch();
            Stopwatch sw1 = new Stopwatch();
            long      _loadingTime = 0, _modelImageDectionlong = 0, _queryingTime = 0, _matchingTime = 0;
            #endregion Diagnostic Region

            #region Surf Dectator Region
            double hessianThresh       = 500;
            double uniquenessThreshold = 0.8;
            int    minGoodMatchPercent = 0;

            if (surfSetting != null)
            {
                hessianThresh       = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
                minGoodMatchPercent = surfSetting.GoodMatchThreshold.Value;
            }
            float hessianThreshold2 = (float)hessianThresh / 1000000;
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            #endregion Surf Dectator Region

            #region Get Model Dectection and Validation
            sw.Reset(); sw.Start();
            List <SpeededUpRobustFeaturePoint> modelImageSurfPoints;
            using (Bitmap modelImage = (Bitmap)Image.FromFile(queryImagePath))
            {
                modelImageSurfPoints = surf.ProcessImage(modelImage);
            }

            if (modelImageSurfPoints == null ||
                modelImageSurfPoints.Count < 4)
            {
                throw new InvalidOperationException("Insuffucient interesting point in query image, try another query image");
            }
            sw.Stop();
            _modelImageDectionlong = sw.ElapsedMilliseconds;
            #endregion

            #region Search Images
            sw.Reset(); sw.Start();
            string fullFileName = Path.Combine(DirectoryHelper.SaveDirectoryPath, "SurfAccordLinear.bin");
            if (!File.Exists(fullFileName))
            {
                string exMsg = string.Format("Can't get the Surf Index at {0}, please index first", fullFileName);
                throw new FileNotFoundException(fullFileName);
            }
            using (FileStream fs = new FileStream(fullFileName, FileMode.Open, FileAccess.Read, FileShare.None))
            {
                System.Runtime.Serialization.Formatters.Binary.BinaryFormatter bf
                    = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
                long fileLength = fs.Length;
                while (fs.Position < fileLength)
                {
                    SURFAccordRecord3        record  = (SURFAccordRecord3)bf.Deserialize(fs);
                    KNearestNeighborMatching matcher = new KNearestNeighborMatching(2);
                    matcher.Threshold = uniquenessThreshold;
                    sw1.Start();
                    AForge.IntPoint[][] matches = matcher.Match(modelImageSurfPoints, record.SurfDescriptors);
                    sw1.Stop();
                    var countOfMatchPoint = matches[0].Length;
                    if (countOfMatchPoint > 0)
                    {
                        double totalnumberOfModelFeature = modelImageSurfPoints.Count;
                        double matchPercentage           = ((totalnumberOfModelFeature - (double)countOfMatchPoint) / totalnumberOfModelFeature);
                        matchPercentage = (1 - matchPercentage) * 100;
                        matchPercentage = Math.Round(matchPercentage);
                        if (matchPercentage >= minGoodMatchPercent)
                        {
                            record.Distance = matchPercentage;
                            rtnImageList.Add(record.Clone());
                        }
                    }
                    record = null;
                }
                fs.Close();
            }
            sw.Stop();
            _matchingTime = sw1.ElapsedMilliseconds;
            _queryingTime = sw.ElapsedMilliseconds;
            #endregion

            string msg = String.Format("Loading: {0}, Model detection: {1}, Querying: {2}, Matching: {3}",
                                       _loadingTime, _modelImageDectionlong, _queryingTime, _matchingTime);
            messageToLog = msg;

            if (rtnImageList.Count > 0)
            {
                rtnImageList = rtnImageList.OrderByDescending(rec => rec.Distance)
                               .ToList <ImageRecord>();
            }
            return(rtnImageList);
        }
        public void ProcessImageTest()
        {
            var bitmaps = BagOfVisualWordsTest.GetImages();

            foreach (Bitmap img in bitmaps)
            {

                bool upright = true;
                bool extended = false;

                List<SpeededUpRobustFeaturePoint> expected;
                List<SpeededUpRobustFeaturePoint> actual;

                // Create OpenSURF detector by Chris Evans
                {
                    // Create Integral Image
                    OpenSURFcs.IntegralImage iimg = OpenSURFcs.IntegralImage.FromImage(img);

                    // Extract the interest points
                    var pts = OpenSURFcs.FastHessian.getIpoints(0.0002f, 5, 2, iimg);

                    // Describe the interest points
                    OpenSURFcs.SurfDescriptor.DecribeInterestPoints(pts, upright, extended, iimg);

                    expected = new List<SpeededUpRobustFeaturePoint>();
                    foreach (var p in pts)
                    {
                        expected.Add(new SpeededUpRobustFeaturePoint(
                            p.x, p.y, p.scale,
                            p.laplacian, p.orientation,
                            p.response, p.descriptor.ToDouble()));
                    }
                }

                // Create the detector
                var surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

                // Extract interest points
                actual = surf.ProcessImage(img);

                // Describe the interest points
                var descriptor = surf.GetDescriptor();
                descriptor.Invariant = !upright;
                descriptor.Extended = extended;

                foreach (var expectedPoint in expected)
                {
                    var actualPoint = new SpeededUpRobustFeaturePoint(
                        expectedPoint.X,
                        expectedPoint.Y,
                        expectedPoint.Scale,
                        expectedPoint.Laplacian);

                    descriptor.Compute(actualPoint);

                    Assert.AreEqual(expectedPoint.X, actualPoint.X);
                    Assert.AreEqual(expectedPoint.Y, actualPoint.Y);
                    Assert.AreEqual(expectedPoint.Scale, actualPoint.Scale);
                    Assert.AreEqual(expectedPoint.Orientation, actualPoint.Orientation);
                    Assert.AreEqual(expectedPoint.Response, actualPoint.Response);
                    Assert.AreEqual(expectedPoint.Descriptor.Length, actualPoint.Descriptor.Length);

                    for (int i = 0; i < expectedPoint.Descriptor.Length; i++)
                    {
                        double e = expectedPoint.Descriptor[i];
                        double a = actualPoint.Descriptor[i];

                        double u = System.Math.Abs(e - a);
                        double v = System.Math.Abs(e);
                        Assert.AreEqual(e, a, 0.05);
                    }
                }
            }

        }
        public void ProcessImageTest4()
        {
            Bitmap[] bitmaps = 
            {
                Resources.flower01,
                Resources.flower03,
                Resources.flower06,
                Resources.flower07,
                Resources.flower09,
                Resources.flower10,
            };

            var surf = new SpeededUpRobustFeaturesDetector();

            int current = 0;
            foreach (Bitmap img in bitmaps)
            {
                List<SpeededUpRobustFeaturePoint> expected;
                List<SpeededUpRobustFeaturePoint> actual;

                // Create OpenSURF detector by Chris Evans
                {
                    // Create Integral Image
                    OpenSURFcs.IntegralImage iimg = OpenSURFcs.IntegralImage.FromImage(img);

                    // Extract the interest points
                    var pts = OpenSURFcs.FastHessian.getIpoints(0.0002f, 5, 2, iimg);

                    // Describe the interest points
                    OpenSURFcs.SurfDescriptor.DecribeInterestPoints(pts, false, false, iimg);

                    expected = new List<SpeededUpRobustFeaturePoint>();
                    foreach (var p in pts)
                        expected.Add(new SpeededUpRobustFeaturePoint(p.x, p.y, p.scale,
                            p.laplacian, p.orientation, p.response));
                }

                // Create Accord.NET SURF detector (based on OpenSURF by Chris Evans)
                {
                    actual = surf.ProcessImage(img);
                }

                var img1 = new FeaturesMarker(actual).Apply(img);
                var img2 = new FeaturesMarker(expected).Apply(img);

                // ImageBox.Show(new Concatenate(img1).Apply(img2), PictureBoxSizeMode.Zoom);


                current++;

                for (int i = 0; i < expected.Count; i++)
                {
                    var e = expected[i];
                    var a = actual[i];
                    Assert.AreEqual(e, a);
                }
            }
        }
Пример #35
0
        public void IndexFiles(FileInfo[] imageFiles, System.ComponentModel.BackgroundWorker IndexBgWorker,
                               Action <string> logWriter,
                               SurfSettings surfSetting = null)
        {
            //For Time Profilling
            long readingTime, indexingTime, saveingTime;

            #region Surf Dectator Region
            double hessianThresh       = 500;
            double uniquenessThreshold = 0.8;

            if (surfSetting != null)
            {
                hessianThresh       = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
            }
            float hessianThreshold2 = (float)hessianThresh / 1000000;
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            #endregion

            int rows = 0;

            List <SURFRecord1> observerSurfImageIndexList = new List <SURFRecord1>();
            List <SpeededUpRobustFeaturePoint> listOfAllObserverImagesSurfPoints = new List <SpeededUpRobustFeaturePoint>();
            Stopwatch sw1;

            sw1 = Stopwatch.StartNew();
            logWriter("Index started...");
            int totalFileCount = imageFiles.Length;
            for (int i = 0; i < totalFileCount; i++)
            {
                var fi = imageFiles[i];
                using (Bitmap observerImage = (Bitmap)Image.FromFile(fi.FullName))
                {
                    List <SpeededUpRobustFeaturePoint> observerImageSurfPoints = surf.ProcessImage(observerImage);

                    if (observerImageSurfPoints.Count > 4)
                    {
                        int initRow = rows; int endRows = rows + observerImageSurfPoints.Count - 1;

                        SURFRecord1 record = new SURFRecord1
                        {
                            Id         = i,
                            ImageName  = fi.Name,
                            ImagePath  = fi.FullName,
                            IndexStart = rows,
                            IndexEnd   = endRows
                        };

                        observerSurfImageIndexList.Add(record);

                        listOfAllObserverImagesSurfPoints.AddRange(observerImageSurfPoints);

                        rows = endRows + 1;
                    }
                    else
                    {
                        Debug.WriteLine(fi.Name + " skip from index, because it didn't have significant feature");
                    }
                }
                IndexBgWorker.ReportProgress(i);
            }
            sw1.Stop();
            readingTime = sw1.ElapsedMilliseconds;
            logWriter(string.Format("Reading Surb Complete, it tooked {0} ms. Saving Repository...", readingTime));

            //------------Initialize Tree from Data
            sw1.Reset(); sw1.Start();
            double[][] superMatrix = listOfAllObserverImagesSurfPoints.Select(c => c.Descriptor).ToArray();
            int[]      outputs     = new int[superMatrix.Length];
            for (int i = 0; i < outputs.Length; i++)
            {
                outputs[i] = i;
            }

            Accord.MachineLearning.Structures.KDTree <int> tree =
                Accord.MachineLearning.Structures.KDTree.FromData(superMatrix,
                                                                  outputs,
                                                                  Accord.Math.Distance.Euclidean, inPlace: true);
            sw1.Stop();
            indexingTime = sw1.ElapsedMilliseconds;
            logWriter(string.Format("Intializing KD Tree: {0}", indexingTime));


            //--------------Saving Indexed Records
            sw1.Reset(); sw1.Start();
            SurfAccordDataSet surfAccordDataset = new SurfAccordDataSet
            {
                SurfImageIndexRecord = observerSurfImageIndexList,
                IndexedTree          = tree
            };
            string repoFileStoragePath = Path.Combine(DirectoryHelper.SaveDirectoryPath, "SurfAccordDataSet.bin");
            if (File.Exists(repoFileStoragePath))
            {
                File.Delete(repoFileStoragePath);
            }
            using (FileStream s = File.Create(repoFileStoragePath))
            {
                //Polenter.Serialization.SharpSerializerBinarySettings bs =
                //    new Polenter.Serialization.SharpSerializerBinarySettings(Polenter.Serialization.BinarySerializationMode.SizeOptimized);
                //Polenter.Serialization.SharpSerializer formatter = new Polenter.Serialization.SharpSerializer(bs);
                //formatter.Serialize(surfAccordDataset, s);
                System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatter
                    = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
                formatter.Serialize(s, surfAccordDataset);
                s.Close();
            }
            sw1.Stop();
            saveingTime = sw1.ElapsedMilliseconds;
            logWriter(string.Format("Saving Surf Accord Dataset: {0}", saveingTime));


            //Invalidating Cache
            CacheHelper.Remove("SurfAccordDataSet");
            CacheHelper.Remove("SurfAccordIntervalTree");

            logWriter(string.Format("Reading: {0} ms, Indexing: {1} ms, Saving Indexed data {2}", readingTime, indexingTime, saveingTime));
        }
        public void ProcessImageTest()
        {
            Bitmap[] bitmaps =
            {
                Resources.flower01,
                Resources.flower03,
                Resources.flower06,
                Resources.flower07,
                Resources.flower09,
                Resources.flower10,
            };

            foreach (Bitmap img in bitmaps)
            {
                bool upright  = true;
                bool extended = false;

                List <SpeededUpRobustFeaturePoint> expected;
                List <SpeededUpRobustFeaturePoint> actual;

                // Create OpenSURF detector by Chris Evans
                {
                    // Create Integral Image
                    OpenSURFcs.IntegralImage iimg = OpenSURFcs.IntegralImage.FromImage(img);

                    // Extract the interest points
                    var pts = OpenSURFcs.FastHessian.getIpoints(0.0002f, 5, 2, iimg);

                    // Describe the interest points
                    OpenSURFcs.SurfDescriptor.DecribeInterestPoints(pts, upright, extended, iimg);

                    expected = new List <SpeededUpRobustFeaturePoint>();
                    foreach (var p in pts)
                    {
                        expected.Add(new SpeededUpRobustFeaturePoint(
                                         p.x, p.y, p.scale,
                                         p.laplacian, p.orientation,
                                         p.response, p.descriptor.ToDouble()));
                    }
                }

                // Create the detector
                var surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

                // Extract interest points
                actual = surf.ProcessImage(img);

                // Describe the interest points
                var descriptor = surf.GetDescriptor();
                descriptor.Invariant = !upright;
                descriptor.Extended  = extended;

                foreach (var expectedPoint in expected)
                {
                    var actualPoint = new SpeededUpRobustFeaturePoint(
                        expectedPoint.X,
                        expectedPoint.Y,
                        expectedPoint.Scale,
                        expectedPoint.Laplacian);

                    descriptor.Compute(actualPoint);

                    Assert.AreEqual(expectedPoint.X, actualPoint.X);
                    Assert.AreEqual(expectedPoint.Y, actualPoint.Y);
                    Assert.AreEqual(expectedPoint.Scale, actualPoint.Scale);
                    Assert.AreEqual(expectedPoint.Orientation, actualPoint.Orientation);
                    Assert.AreEqual(expectedPoint.Response, actualPoint.Response);
                    Assert.AreEqual(expectedPoint.Descriptor.Length, actualPoint.Descriptor.Length);

                    for (int i = 0; i < expectedPoint.Descriptor.Length; i++)
                    {
                        double e = expectedPoint.Descriptor[i];
                        double a = actualPoint.Descriptor[i];

                        double u = System.Math.Abs(e - a);
                        double v = System.Math.Abs(e);
                        Assert.AreEqual(e, a, 0.05);
                    }
                }
            }
        }
Пример #37
0
        public void Panorama_Example1()
        {
            Accord.Math.Random.Generator.Seed = 0;

            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Accord.Imaging.Image.Clone(Resources.dc_left);
            Bitmap img2 = Accord.Imaging.Image.Clone(Resources.dc_right);

            // Those pictures are shown below:
            // ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            // ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var     ransac           = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            Assert.AreEqual(1.15707409, homographyMatrix.Elements[0], 1e-5);
            Assert.AreEqual(-0.0233834628, homographyMatrix.Elements[1], 1e-5);
            Assert.AreEqual(-261.8217, homographyMatrix.Elements[2], 1e-2);
            Assert.AreEqual(0.08801343, homographyMatrix.Elements[3], 1e-5);
            Assert.AreEqual(1.12451434, homographyMatrix.Elements[4], 1e-5);
            Assert.AreEqual(-171.191208, homographyMatrix.Elements[5], 1e-2);
            Assert.AreEqual(0.000127789128, homographyMatrix.Elements[6], 1e-5);
            Assert.AreEqual(0.00006173445, homographyMatrix.Elements[7], 1e-5);
            Assert.AreEqual(8, homographyMatrix.Elements.Length);


            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            // ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);


            result = Accord.Imaging.Image.Clone(result);

#if NET35
            // result.Save(@"C:\Projects\Accord.NET\framework\Unit Tests\Accord.Tests.Imaging\Resources\blend_net35.png", ImageFormat.Png);
            Bitmap image = Accord.Imaging.Image.Clone(Resources.blend_net35);
#else
            // result.Save(@"C:\Projects\Accord.NET\framework\Unit Tests\Accord.Tests.Imaging\Resources\blend_net45.png", ImageFormat.Png);
            Bitmap image = Accord.Imaging.Image.Clone(Resources.blend_net45);
#endif

#pragma warning disable 618
            double[,] expected = image.ToDoubleMatrix(channel: 0);
            double[,] actual   = result.ToDoubleMatrix(channel: 0);
            Assert.IsTrue(Matrix.IsEqual(expected, actual, atol: 0.1));
#pragma warning restore 618
        }
Пример #38
0
        public void Panorama_Example1()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Resources.dc_left;
            Bitmap img2 = Resources.dc_right;

            // Those pictures are shown below:
            // ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            // ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            Assert.AreEqual(1.15707409, homographyMatrix.Elements[0], 1e-5);
            Assert.AreEqual(-0.0233834628, homographyMatrix.Elements[1], 1e-5);
            Assert.AreEqual(-261.8217, homographyMatrix.Elements[2], 1e-2);
            Assert.AreEqual(0.08801343, homographyMatrix.Elements[3], 1e-5);
            Assert.AreEqual(1.12451434, homographyMatrix.Elements[4], 1e-5);
            Assert.AreEqual(-171.191208, homographyMatrix.Elements[5], 1e-2);
            Assert.AreEqual(0.000127789128, homographyMatrix.Elements[6], 1e-5);
            Assert.AreEqual(0.00006173445, homographyMatrix.Elements[7], 1e-5);
            Assert.AreEqual(8, homographyMatrix.Elements.Length);


            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            // ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);

            //result.Save(@"C:\Projects\Accord.NET\net35.png", ImageFormat.Png);

#if NET35
            Bitmap image = Properties.Resources.blend_net35;
#else
            Bitmap image = Properties.Resources.blend_net45;
#endif

#pragma warning disable 618
            double[,] expected = image.ToDoubleMatrix(0);
            double[,] actual = result.ToDoubleMatrix(0);
            Assert.IsTrue(Matrix.IsEqual(expected, actual, 0.1));
#pragma warning restore 618
        }
        private Bitmap CompareAndDrawImage(Bitmap modelImage, Bitmap observedImage, SurfSettings setting)
        {
            Stopwatch watch1 = new Stopwatch();
            Stopwatch watch2 = new Stopwatch();

            Bitmap returnBitmap;

            watch2.Start();
            watch1.Reset(); watch1.Start();
            double hessianThreshold  = setting.HessianThresh.HasValue ? setting.HessianThresh.Value : 500;
            float  hessianThreshold2 = (float)hessianThreshold / 1000000;

            Debug.WriteLine("hessianThreshold2: {0}", hessianThreshold2);
            SpeededUpRobustFeaturesDetector    surf        = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            List <SpeededUpRobustFeaturePoint> surfPoints1 = surf.ProcessImage(modelImage);
            List <SpeededUpRobustFeaturePoint> surfPoints2 = surf.ProcessImage(observedImage);


            Debug.WriteLine("Surf points count: {0}", surfPoints1.Count);
            Debug.WriteLine("Surf points count: {0}", surfPoints2.Count);
            //long memoryFootprint = MemorySize.GetBlobSizeinKb(surfPoints2);
            //Debug.WriteLine("Surf extractor: {0} kb", memoryFootprint);

            watch1.Stop();
            Debug.WriteLine("Surf Detection tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Show the marked points in the original images
            Bitmap img1mark = new FeaturesMarker(surfPoints1, 2).Apply(modelImage);
            Bitmap img2mark = new FeaturesMarker(surfPoints2, 2).Apply(observedImage);
            // Concatenate the two images together in a single image (just to show on screen)
            Concatenate concatenate = new Concatenate(img1mark);

            returnBitmap = concatenate.Apply(img2mark);
            watch1.Stop();
            Debug.WriteLine("Surf point plotting tooked {0} ms", watch1.ElapsedMilliseconds);


            //watch1.Reset(); watch1.Start();
            //List<IntPoint>[] coretionalMatches = getMatches(surfPoints1, surfPoints2);
            //watch1.Stop();
            //Debug.WriteLine("Correctional Match tooked {0} ms", watch1.ElapsedMilliseconds);

            //// Get the two sets of points
            //IntPoint[] correlationPoints11 = coretionalMatches[0].ToArray();
            //IntPoint[] correlationPoints22 = coretionalMatches[1].ToArray();

            //Debug.WriteLine("Correclation points count: {0}", correlationPoints11.Length);
            //Debug.WriteLine("Correclation points count: {0}", correlationPoints22.Length);

            Debug.WriteLine("Threshold: {0}", setting.UniquenessThreshold.Value);
            watch1.Reset(); watch1.Start();
            // Step 2: Match feature points using a k-NN
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(2);

            matcher.Threshold = setting.UniquenessThreshold.Value;
            IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2);
            watch1.Stop();
            Debug.WriteLine("Knn Match tooked {0} ms", watch1.ElapsedMilliseconds);

            // Get the two sets of points
            IntPoint[] correlationPoints1 = matches[0];
            IntPoint[] correlationPoints2 = matches[1];

            Debug.WriteLine("Knn points count: {0}", correlationPoints1.Length);
            Debug.WriteLine("Knn points count: {0}", correlationPoints2.Length);

            //watch1.Reset(); watch1.Start();
            //// Show the marked correlations in the concatenated image
            //PairsMarker pairs = new PairsMarker(
            //    correlationPoints1, // Add image1's width to the X points to show the markings correctly
            //    correlationPoints2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Blue);

            //returnBitmap = pairs.Apply(returnBitmap);
            //watch1.Stop();
            //Debug.WriteLine("Match pair marking tooked {0} ms", watch1.ElapsedMilliseconds);

            if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4)
            {
                MessageBox.Show("Insufficient points to attempt a fit.");
                return(null);
            }

            watch1.Reset(); watch1.Start();
            // Step 3: Create the homography matrix using a robust estimator
            //RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            watch1.Stop();
            Debug.WriteLine("Ransac tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);
            watch1.Stop();
            Debug.WriteLine("Ransac SubMatrix {0} ms", watch1.ElapsedMilliseconds);

            Debug.WriteLine("Ransac points count: {0}", inliers1.Length);
            Debug.WriteLine("Ransac points count: {0}", inliers2.Length);

            watch1.Reset(); watch1.Start();
            PairsMarker inlierPairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Red);

            returnBitmap = inlierPairs.Apply(returnBitmap);
            watch1.Stop();
            Debug.WriteLine("Ransac plotting tooked {0} ms", watch1.ElapsedMilliseconds);

            watch2.Stop();
            return(returnBitmap);
        }
        public void ProcessImageTest3()
        {
            // Load an Image
            Bitmap img = Properties.Resources.sample_trans;

            // Extract the interest points
            var surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

            surf.ComputeDescriptors = SpeededUpRobustFeatureDescriptorType.Extended;
            surf.ComputeOrientation = false;

            List<SpeededUpRobustFeaturePoint> points = surf.ProcessImage(img);

            Assert.AreEqual(8, points.Count);

            SpeededUpRobustFeaturePoint p;

            p = points[0];
            Assert.AreEqual(0, p.Laplacian);
            Assert.AreEqual(25.3803387, p.X, 1e-2);
            Assert.AreEqual(14.7987738, p.Y, 1e-2);
            Assert.AreEqual(1.98713827, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0.0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.026527178478172982, p.Descriptor[23], 1e-10);
            Assert.AreEqual(0.28221266818142571, p.Descriptor[42], 1e-10);

            p = points[1];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(20.4856224, p.X, 1e-2);
            Assert.AreEqual(20.4817181, p.Y, 1e-2);
            Assert.AreEqual(1.90549147, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.0017332996868934826, p.Descriptor[23], 1e-10);
            Assert.AreEqual(0.01141609085546454, p.Descriptor[54], 1e-10);

            p = points[2];
            Assert.AreEqual(0, p.Laplacian, 1e-2);
            Assert.AreEqual(14.7991896, p.X, 1e-2);
            Assert.AreEqual(25.3776169, p.Y, 1e-2);
            Assert.AreEqual(1.9869982, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0.0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.013397918304161798, p.Descriptor[23], 1e-10);
            Assert.AreEqual(0.0000054080612707747483, p.Descriptor[12], 1e-10);

            p = points[6];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(22.4346638, p.X, 1e-2);
            Assert.AreEqual(41.4026527, p.Y, 1e-2);
            Assert.AreEqual(2.83586049, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(0.0, p.Orientation, 1e-2);
            Assert.AreEqual(128, p.Descriptor.Length);
            Assert.AreEqual(0.059789486280406236, p.Descriptor[23], 1e-10);
            Assert.AreEqual(-0.0000056629312093282088, p.Descriptor[12], 1e-10);
        }
        public void ZeroWidthTest()
        {
            Bitmap img = Properties.Resources.surf_bug_1;

            var iimg = OpenSURFcs.IntegralImage.FromImage(img);
            var expected = OpenSURFcs.FastHessian.getIpoints(0.0002f, 5, 2, iimg);
            OpenSURFcs.SurfDescriptor.DecribeInterestPoints(expected, false, false, iimg);


            var surf = new SpeededUpRobustFeaturesDetector();
            var actual = surf.ProcessImage(img);

            Assert.AreEqual(0, expected.Count);
            Assert.AreEqual(0, actual.Count);
        }
Пример #42
0
        public List<ImageRecord> QueryImage(string queryImagePath, out string messageToLog, SurfSettings surfSetting = null)
        {
            List<ImageRecord> rtnImageList = new List<ImageRecord>();

            #region Diagnostic Region
            Stopwatch sw = new Stopwatch();
            Stopwatch sw1 = new Stopwatch();
            long _loadingTime = 0, _modelImageDectionlong = 0, _queryingTime = 0, _matchingTime = 0;
            #endregion Diagnostic Region

            #region Surf Dectator Region
            double hessianThresh = 500;
            double uniquenessThreshold = 0.8;
            int minGoodMatchPercent = 0;

            if (surfSetting != null)
            {
                hessianThresh = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
                minGoodMatchPercent = surfSetting.GoodMatchThreshold.Value;
            }
            float hessianThreshold2 = (float)hessianThresh / 1000000;
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            #endregion Surf Dectator Region

            #region Get Model Dectection and Validation
            sw.Reset(); sw.Start();
            List<SpeededUpRobustFeaturePoint> modelImageSurfPoints;
            using (Bitmap modelImage = (Bitmap)Image.FromFile(queryImagePath))
            {
                modelImageSurfPoints = surf.ProcessImage(modelImage);
            }

            if (modelImageSurfPoints == null
                || modelImageSurfPoints.Count < 4)
            {
                throw new InvalidOperationException("Insuffucient interesting point in query image, try another query image");
            }
            sw.Stop();
            _modelImageDectionlong = sw.ElapsedMilliseconds;
            #endregion

            #region Search Images
            sw.Reset(); sw.Start();
            string fullFileName = Path.Combine(DirectoryHelper.SaveDirectoryPath, "SurfAccordLinear.bin");
            if (!File.Exists(fullFileName))
            {
                string exMsg = string.Format("Can't get the Surf Index at {0}, please index first", fullFileName);
                throw new FileNotFoundException(fullFileName);
            }
            using (FileStream fs = new FileStream(fullFileName, FileMode.Open, FileAccess.Read, FileShare.None))
            {
                System.Runtime.Serialization.Formatters.Binary.BinaryFormatter bf
                   = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
                long fileLength = fs.Length;
                while (fs.Position < fileLength)
                {
                    SURFAccordRecord3 record = (SURFAccordRecord3)bf.Deserialize(fs);
                    KNearestNeighborMatching matcher = new KNearestNeighborMatching(2);
                    matcher.Threshold = uniquenessThreshold;
                    sw1.Start();
                    AForge.IntPoint[][] matches = matcher.Match(modelImageSurfPoints, record.SurfDescriptors);
                    sw1.Stop();
                    var countOfMatchPoint = matches[0].Length;
                    if (countOfMatchPoint > 0)
                    {
                        double totalnumberOfModelFeature = modelImageSurfPoints.Count;
                        double matchPercentage = ((totalnumberOfModelFeature - (double)countOfMatchPoint) / totalnumberOfModelFeature);
                        matchPercentage = (1 - matchPercentage) * 100;
                        matchPercentage = Math.Round(matchPercentage);
                        if (matchPercentage >= minGoodMatchPercent)
                        {
                            record.Distance = matchPercentage;
                            rtnImageList.Add(record.Clone());
                        }
                    }
                    record = null;
                }
                fs.Close();
            }
            sw.Stop();
            _matchingTime = sw1.ElapsedMilliseconds;
            _queryingTime = sw.ElapsedMilliseconds;
            #endregion

            string msg = String.Format("Loading: {0}, Model detection: {1}, Querying: {2}, Matching: {3}",
                                                _loadingTime, _modelImageDectionlong, _queryingTime, _matchingTime);
            messageToLog = msg;

            if (rtnImageList.Count > 0)
            {
                rtnImageList = rtnImageList.OrderByDescending(rec => rec.Distance)
                                       .ToList<ImageRecord>();
            }
            return rtnImageList;
        }
        public void ProcessImageTest()
        {
            // Load an Image
            Bitmap img = Properties.Resources.sample_trans;

            // Extract the interest points
            var surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

            surf.ComputeDescriptors = SpeededUpRobustFeatureDescriptorType.None;

            List<SpeededUpRobustFeaturePoint> points = surf.ProcessImage(img);

            // Describe the interest points
            SpeededUpRobustFeaturesDescriptor descriptor = surf.GetDescriptor();
            descriptor.Compute(points);

            Assert.AreEqual(8, points.Count);

            SpeededUpRobustFeaturePoint p;

            p = points[0];
            Assert.AreEqual(0, p.Laplacian);
            Assert.AreEqual(25.3803387, p.X, 1e-2);
            Assert.AreEqual(14.7987738, p.Y, 1e-2);
            Assert.AreEqual(1.98713827, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(4.78528404, p.Orientation, 1e-2);

            p = points[1];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(20.4856224, p.X, 1e-2);
            Assert.AreEqual(20.4817181, p.Y, 1e-2);
            Assert.AreEqual(1.90549147, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(4.89748764, p.Orientation, 1e-2);

            p = points[2];
            Assert.AreEqual(0, p.Laplacian, 1e-2);
            Assert.AreEqual(14.7991896, p.X, 1e-2);
            Assert.AreEqual(25.3776169, p.Y, 1e-2);
            Assert.AreEqual(1.9869982, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(3.07735944, p.Orientation, 1e-2);

            p = points[6];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(22.4346638, p.X, 1e-2);
            Assert.AreEqual(41.4026527, p.Y, 1e-2);
            Assert.AreEqual(2.83586049, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(3.13142157, p.Orientation, 1e-2);


            descriptor.Extended = true;
            descriptor.Invariant = false;
            descriptor.Compute(points);

            p = points[5];
            Assert.AreEqual(1, p.Laplacian, 1e-3);
            Assert.AreEqual(41.4027748, p.X, 1e-3);
            Assert.AreEqual(22.4343891, p.Y, 1e-3);
            Assert.AreEqual(2.83486962, p.Scale, 1e-3);
            Assert.AreEqual(0.0, p.Response, 1e-3);
            Assert.AreEqual(4.72728586, p.Orientation, 1e-3);
        }
        public void ProcessImageTest2()
        {
            // Load an Image
            Bitmap img = Accord.Imaging.Image.Clone(Properties.Resources.sample_trans);

            // Extract the interest points
            var surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

            surf.ComputeDescriptors = SpeededUpRobustFeatureDescriptorType.None;
            surf.ComputeOrientation = true;

            List<SpeededUpRobustFeaturePoint> points = surf.ProcessImage(img);

            Assert.AreEqual(8, points.Count);

            SpeededUpRobustFeaturePoint p;

            p = points[0];
            Assert.AreEqual(0, p.Laplacian);
            Assert.AreEqual(25.3803387, p.X, 1e-2);
            Assert.AreEqual(14.7987738, p.Y, 1e-2);
            Assert.AreEqual(1.98713827, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(4.78528404, p.Orientation, 1e-2);
            Assert.IsNull(p.Descriptor);

            p = points[1];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(20.4856224, p.X, 1e-2);
            Assert.AreEqual(20.4817181, p.Y, 1e-2);
            Assert.AreEqual(1.90549147, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(4.89748764, p.Orientation, 1e-2);
            Assert.IsNull(p.Descriptor);

            p = points[2];
            Assert.AreEqual(0, p.Laplacian, 1e-2);
            Assert.AreEqual(14.7991896, p.X, 1e-2);
            Assert.AreEqual(25.3776169, p.Y, 1e-2);
            Assert.AreEqual(1.9869982, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(3.07735944, p.Orientation, 1e-2);
            Assert.IsNull(p.Descriptor);

            p = points[6];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(22.4346638, p.X, 1e-2);
            Assert.AreEqual(41.4026527, p.Y, 1e-2);
            Assert.AreEqual(2.83586049, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(3.13142157, p.Orientation, 1e-2);
            Assert.IsNull(p.Descriptor);
        }
        public void ProcessImageTest()
        {
            // Load an Image
            Bitmap img = Properties.Resources.sample_trans;

            // Extract the interest points
            var surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

            surf.ComputeDescriptors = SpeededUpRobustFeatureDescriptorType.None;

            List<SpeededUpRobustFeaturePoint> points = surf.ProcessImage(img);

            // Describe the interest points
            SpeededUpRobustFeaturesDescriptor descriptor = surf.GetDescriptor();
            descriptor.Compute(points);

            Assert.AreEqual(8, points.Count);

            SpeededUpRobustFeaturePoint p;

            p = points[0];
            Assert.AreEqual(0, p.Laplacian);
            Assert.AreEqual(25.3803387, p.X, 1e-2);
            Assert.AreEqual(14.7987738, p.Y, 1e-2);
            Assert.AreEqual(1.98713827, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(4.78528404, p.Orientation, 1e-2);

            p = points[1];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(20.4856224, p.X, 1e-2);
            Assert.AreEqual(20.4817181, p.Y, 1e-2);
            Assert.AreEqual(1.90549147, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(4.89748764, p.Orientation, 1e-2);

            p = points[2];
            Assert.AreEqual(0, p.Laplacian, 1e-2);
            Assert.AreEqual(14.7991896, p.X, 1e-2);
            Assert.AreEqual(25.3776169, p.Y, 1e-2);
            Assert.AreEqual(1.9869982, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(3.07735944, p.Orientation, 1e-2);

            p = points[6];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(22.4346638, p.X, 1e-2);
            Assert.AreEqual(41.4026527, p.Y, 1e-2);
            Assert.AreEqual(2.83586049, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(3.13142157, p.Orientation, 1e-2);


            descriptor.Extended = true;
            descriptor.Invariant = false;
            descriptor.Compute(points);

            p = points[5];
            Assert.AreEqual(1, p.Laplacian, 1e-3);
            Assert.AreEqual(41.4027748, p.X, 1e-3);
            Assert.AreEqual(22.4343891, p.Y, 1e-3);
            Assert.AreEqual(2.83486962, p.Scale, 1e-3);
            Assert.AreEqual(0.0, p.Response, 1e-3);
            Assert.AreEqual(4.72728586, p.Orientation, 1e-3);
        }
Пример #46
0
        public List<ImageRecord> QueryImage(string queryImagePath, out string messageToLog, SurfSettings surfSetting = null)
        {
            List<ImageRecord> rtnImageList = new List<ImageRecord>();

            #region Diagnostic Region
            Stopwatch sw = new Stopwatch();
            long _loadingTime, _modelImageDectionlong, _queryingTime, _treeQuery, _loopTime = 0;
            #endregion Diagnostic Region

            #region Get KD-Tree Index
            sw.Reset(); sw.Start();
            //--------------Getting Indexed Records
            SurfAccordDataSet surfAccordDataset;
            bool isExist= CacheHelper.Get<SurfAccordDataSet>("SurfAccordDataSet", out surfAccordDataset);
            if (!isExist)
            {
                string repoFileStoragePath = Path.Combine(DirectoryHelper.SaveDirectoryPath, "SurfAccordDataSet.bin");
                if (!File.Exists(repoFileStoragePath))
                {
                    string exMsg = string.Format("Can't get the Surf Index at {0}, please index first", repoFileStoragePath);
                    throw new FileNotFoundException(exMsg);
                }
                using (FileStream s = File.OpenRead(repoFileStoragePath))
                {
                    //Polenter.Serialization.SharpSerializerBinarySettings bs =
                    //    new Polenter.Serialization.SharpSerializerBinarySettings(Polenter.Serialization.BinarySerializationMode.SizeOptimized);
                    //Polenter.Serialization.SharpSerializer formatter = new Polenter.Serialization.SharpSerializer(bs);
                    //formatter.Serialize(surfAccordDataset, s);
                    System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatter
                        = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
                    surfAccordDataset = (SurfAccordDataSet)formatter.Deserialize(s);
                    s.Close();
                }
                CacheHelper.Add<SurfAccordDataSet>(surfAccordDataset, "SurfAccordDataSet");
            }
            if (surfAccordDataset == null)
                throw new InvalidOperationException("Can't get the Surf Index, please index first");
            sw.Stop();
            _loadingTime = sw.ElapsedMilliseconds;
            #endregion

            #region Surf Dectator Region
            double hessianThresh = 500;
            double uniquenessThreshold = 0.8;
            int goodMatchDistThreshold = 0;

            if (surfSetting != null)
            {
                hessianThresh = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
                goodMatchDistThreshold = surfSetting.GoodMatchThreshold.Value;
            }
            float hessianThreshold2 = (float)hessianThresh / 1000000;
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            #endregion Surf Dectator Region

            #region Get Model Dectection and Validation
            sw.Reset(); sw.Start();
            List<SpeededUpRobustFeaturePoint> modelImageSurfPoints;
            using (Bitmap modelImage = (Bitmap)Image.FromFile(queryImagePath))
            {
                modelImageSurfPoints = surf.ProcessImage(modelImage);
            }

            if (modelImageSurfPoints == null
                || modelImageSurfPoints.Count < 4)
            {
                throw new InvalidOperationException("Insuffucient interesting point in query image, try another query image");
            }
            sw.Stop();
            _modelImageDectionlong = sw.ElapsedMilliseconds;
            #endregion

            #region Search Images
            sw.Reset(); sw.Start();
            //------------Search Images
            Accord.MachineLearning.Structures.KDTree<int> tree = surfAccordDataset.IndexedTree;
            double[][] listofQueryDescriptors = modelImageSurfPoints.Select(ft => ft.Descriptor).ToArray();
            double[] myscores = new double[listofQueryDescriptors.Length];
            int[] labels = Enumerable.Repeat(-1, listofQueryDescriptors.Length).ToArray();
            for (int i = 0; i < listofQueryDescriptors.Length; i++)
            {
                KDTreeNodeCollection<int> neighbors = tree.ApproximateNearest(listofQueryDescriptors[i],2, 90d);
                //KDTreeNodeCollection<int> neighbors = tree.Nearest(listofQueryDescriptors[i], uniquenessThreshold, 2);
                Dictionary<int, double> keyValueStore = new Dictionary<int, double>();
                double similarityDist = 0;
                foreach (KDTreeNodeDistance<int> point in neighbors)
                {
                    int label = point.Node.Value;
                    double d = point.Distance;

                    // Convert to similarity measure
                    if (keyValueStore.ContainsKey(label))
                    {
                        similarityDist = keyValueStore[label];
                        similarityDist += 1.0 / (1.0 + d);
                        keyValueStore[label] = similarityDist;
                    }
                    else
                    {
                        similarityDist = 1.0 / (1.0 + d);
                        keyValueStore.Add(label, similarityDist);
                    }
                }
                if (keyValueStore.Count > 0)
                {
                    int maxIndex = keyValueStore.Aggregate((l, r) => l.Value > r.Value ? l : r).Key;
                    labels[i] = maxIndex;
                    double sumOfAllValues = keyValueStore.Values.Sum();
                    myscores[i] = keyValueStore[maxIndex] / sumOfAllValues;
                }
            }
            sw.Stop();
            _queryingTime = sw.ElapsedMilliseconds;

            sw.Reset(); sw.Start();
            List<SURFRecord1> listOfSurfImages = surfAccordDataset.SurfImageIndexRecord;
            //----------Create Interval Tree from ImageMetaData
            IntervalTreeLib.IntervalTree<SURFRecord1, int> intervalTree;
            bool isTreeExist = CacheHelper.Get<IntervalTreeLib.IntervalTree<SURFRecord1, int>>("SurfAccordIntervalTree", out intervalTree);
            if (!isTreeExist)
            {
                intervalTree = new IntervalTreeLib.IntervalTree<SURFRecord1, int>();
                foreach (var record in listOfSurfImages)
                {
                    intervalTree.AddInterval(record.IndexStart, record.IndexEnd, record);
                }
                CacheHelper.Add<IntervalTreeLib.IntervalTree<SURFRecord1, int>>(intervalTree, "SurfAccordIntervalTree");
            }

            //--------------Matching Target image similarity
            for (int i = 0; i < listofQueryDescriptors.Length; i++)
            {
                int rowNum = labels[i];
                if (rowNum == -1) continue;
                double dist = myscores[i];
                SURFRecord1 rec = intervalTree.Get(rowNum, IntervalTreeLib.StubMode.ContainsStartThenEnd).FirstOrDefault();
                rec.Distance++;
            }
            sw.Stop();
            _loopTime = sw.ElapsedMilliseconds;
            #endregion

            string msg = String.Format("Loading: {0}, Model detection: {1}, Querying: {2}, Looping: {3}",
                                                _loadingTime, _modelImageDectionlong, _queryingTime, _loopTime);
            messageToLog = msg;
            rtnImageList = listOfSurfImages.Where(rec => rec.Distance > goodMatchDistThreshold)
                                    .OrderByDescending(rec => rec.Distance)
                                    .ToList<ImageRecord>();

            return rtnImageList;
        }
Пример #47
0
        public void ProcessImageTest()
        {
            // Load an Image
            Bitmap img = Properties.Resources.sample_trans;

            // Extract the interest points
            var surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);
            List<SurfPoint> points = surf.ProcessImage(img);

            // Describe the interest points
            SurfDescriptor descriptor = surf.GetDescriptor();
            descriptor.Describe(points);

            Assert.AreEqual(8, points.Count);

            SurfPoint p;

            p = points[0];
            Assert.AreEqual(0, p.Laplacian);
            Assert.AreEqual(25.3803387, p.X, 1e-2);
            Assert.AreEqual(14.7987738, p.Y, 1e-2);
            Assert.AreEqual(1.98713827, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(4.78528404, p.Orientation, 1e-2);
            Assert.AreEqual(64, p.Descriptor.Length);
            Assert.AreEqual(0.22572951, p.Descriptor[23], 1e-2);
            Assert.AreEqual(0.0962982625, points[1].Descriptor[42], 1e-2);

            p = points[1];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(20.4856224, p.X, 1e-2);
            Assert.AreEqual(20.4817181, p.Y, 1e-2);
            Assert.AreEqual(1.90549147, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(4.89748764, p.Orientation, 1e-2);
            Assert.AreEqual(64, p.Descriptor.Length);
            Assert.AreEqual(0.14823015, p.Descriptor[23], 1e-2);
            Assert.AreEqual(0.0861000642, p.Descriptor[54], 1e-2);

            p = points[2];
            Assert.AreEqual(0, p.Laplacian, 1e-2);
            Assert.AreEqual(14.7991896, p.X, 1e-2);
            Assert.AreEqual(25.3776169, p.Y, 1e-2);
            Assert.AreEqual(1.9869982, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(3.07735944, p.Orientation, 1e-2);
            Assert.AreEqual(64, p.Descriptor.Length);
            Assert.AreEqual(0.209485427, p.Descriptor[23], 1e-2);
            Assert.AreEqual(0.0112418151, p.Descriptor[12], 1e-2);

            p = points[6];
            Assert.AreEqual(1, p.Laplacian, 1e-2);
            Assert.AreEqual(22.4346638, p.X, 1e-2);
            Assert.AreEqual(41.4026527, p.Y, 1e-2);
            Assert.AreEqual(2.83586049, p.Scale, 1e-2);
            Assert.AreEqual(0.0, p.Response, 1e-2);
            Assert.AreEqual(3.13142157, p.Orientation, 1e-2);
            Assert.AreEqual(64, p.Descriptor.Length);
            Assert.AreEqual(0.0467314087, p.Descriptor[23], 1e-2);
            Assert.AreEqual(0.0266618263, p.Descriptor[12], 1e-2);


            descriptor.Extended = true;
            descriptor.Invariant = false;
            descriptor.Describe(points);

            p = points[5];
            Assert.AreEqual(1, p.Laplacian, 1e-3);
            Assert.AreEqual(41.4027748, p.X, 1e-3);
            Assert.AreEqual(22.4343891, p.Y, 1e-3);
            Assert.AreEqual(2.83486962, p.Scale, 1e-3);
            Assert.AreEqual(0.0, p.Response, 1e-3);
            Assert.AreEqual(4.72728586, p.Orientation, 1e-3);
            Assert.AreEqual(0.00786296651, p.Descriptor[67], 1e-3);
            Assert.AreEqual(-0.0202884115, p.Descriptor[97], 1e-2);
        }
Пример #48
0
        public List <ImageRecord> QueryImage(string queryImagePath, out string messageToLog, SurfSettings surfSetting = null)
        {
            List <ImageRecord> rtnImageList = new List <ImageRecord>();

            #region Diagnostic Region
            Stopwatch sw = new Stopwatch();
            long      _loadingTime, _modelImageDectionlong, _queryingTime, _treeQuery, _loopTime = 0;
            #endregion Diagnostic Region

            #region Get KD-Tree Index
            sw.Reset(); sw.Start();
            //--------------Getting Indexed Records
            SurfAccordDataSet surfAccordDataset;
            bool isExist = CacheHelper.Get <SurfAccordDataSet>("SurfAccordDataSet", out surfAccordDataset);
            if (!isExist)
            {
                string repoFileStoragePath = Path.Combine(DirectoryHelper.SaveDirectoryPath, "SurfAccordDataSet.bin");
                if (!File.Exists(repoFileStoragePath))
                {
                    string exMsg = string.Format("Can't get the Surf Index at {0}, please index first", repoFileStoragePath);
                    throw new FileNotFoundException(exMsg);
                }
                using (FileStream s = File.OpenRead(repoFileStoragePath))
                {
                    //Polenter.Serialization.SharpSerializerBinarySettings bs =
                    //    new Polenter.Serialization.SharpSerializerBinarySettings(Polenter.Serialization.BinarySerializationMode.SizeOptimized);
                    //Polenter.Serialization.SharpSerializer formatter = new Polenter.Serialization.SharpSerializer(bs);
                    //formatter.Serialize(surfAccordDataset, s);
                    System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatter
                        = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
                    surfAccordDataset = (SurfAccordDataSet)formatter.Deserialize(s);
                    s.Close();
                }
                CacheHelper.Add <SurfAccordDataSet>(surfAccordDataset, "SurfAccordDataSet");
            }
            if (surfAccordDataset == null)
            {
                throw new InvalidOperationException("Can't get the Surf Index, please index first");
            }
            sw.Stop();
            _loadingTime = sw.ElapsedMilliseconds;
            #endregion

            #region Surf Dectator Region
            double hessianThresh          = 500;
            double uniquenessThreshold    = 0.8;
            int    goodMatchDistThreshold = 0;

            if (surfSetting != null)
            {
                hessianThresh          = surfSetting.HessianThresh.Value;
                uniquenessThreshold    = surfSetting.UniquenessThreshold.Value;
                goodMatchDistThreshold = surfSetting.GoodMatchThreshold.Value;
            }
            float hessianThreshold2 = (float)hessianThresh / 1000000;
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            #endregion Surf Dectator Region

            #region Get Model Dectection and Validation
            sw.Reset(); sw.Start();
            List <SpeededUpRobustFeaturePoint> modelImageSurfPoints;
            using (Bitmap modelImage = (Bitmap)Image.FromFile(queryImagePath))
            {
                modelImageSurfPoints = surf.ProcessImage(modelImage);
            }

            if (modelImageSurfPoints == null ||
                modelImageSurfPoints.Count < 4)
            {
                throw new InvalidOperationException("Insuffucient interesting point in query image, try another query image");
            }
            sw.Stop();
            _modelImageDectionlong = sw.ElapsedMilliseconds;
            #endregion

            #region Search Images
            sw.Reset(); sw.Start();
            //------------Search Images
            Accord.MachineLearning.Structures.KDTree <int> tree = surfAccordDataset.IndexedTree;
            double[][] listofQueryDescriptors = modelImageSurfPoints.Select(ft => ft.Descriptor).ToArray();
            double[]   myscores = new double[listofQueryDescriptors.Length];
            int[]      labels   = Enumerable.Repeat(-1, listofQueryDescriptors.Length).ToArray();
            for (int i = 0; i < listofQueryDescriptors.Length; i++)
            {
                KDTreeNodeCollection <int> neighbors = tree.ApproximateNearest(listofQueryDescriptors[i], 2, 90d);
                //KDTreeNodeCollection<int> neighbors = tree.Nearest(listofQueryDescriptors[i], uniquenessThreshold, 2);
                Dictionary <int, double> keyValueStore = new Dictionary <int, double>();
                double similarityDist = 0;
                foreach (KDTreeNodeDistance <int> point in neighbors)
                {
                    int    label = point.Node.Value;
                    double d     = point.Distance;

                    // Convert to similarity measure
                    if (keyValueStore.ContainsKey(label))
                    {
                        similarityDist       = keyValueStore[label];
                        similarityDist      += 1.0 / (1.0 + d);
                        keyValueStore[label] = similarityDist;
                    }
                    else
                    {
                        similarityDist = 1.0 / (1.0 + d);
                        keyValueStore.Add(label, similarityDist);
                    }
                }
                if (keyValueStore.Count > 0)
                {
                    int maxIndex = keyValueStore.Aggregate((l, r) => l.Value > r.Value ? l : r).Key;
                    labels[i] = maxIndex;
                    double sumOfAllValues = keyValueStore.Values.Sum();
                    myscores[i] = keyValueStore[maxIndex] / sumOfAllValues;
                }
            }
            sw.Stop();
            _queryingTime = sw.ElapsedMilliseconds;

            sw.Reset(); sw.Start();
            List <SURFRecord1> listOfSurfImages = surfAccordDataset.SurfImageIndexRecord;
            //----------Create Interval Tree from ImageMetaData
            IntervalTreeLib.IntervalTree <SURFRecord1, int> intervalTree;
            bool isTreeExist = CacheHelper.Get <IntervalTreeLib.IntervalTree <SURFRecord1, int> >("SurfAccordIntervalTree", out intervalTree);
            if (!isTreeExist)
            {
                intervalTree = new IntervalTreeLib.IntervalTree <SURFRecord1, int>();
                foreach (var record in listOfSurfImages)
                {
                    intervalTree.AddInterval(record.IndexStart, record.IndexEnd, record);
                }
                CacheHelper.Add <IntervalTreeLib.IntervalTree <SURFRecord1, int> >(intervalTree, "SurfAccordIntervalTree");
            }

            //--------------Matching Target image similarity
            for (int i = 0; i < listofQueryDescriptors.Length; i++)
            {
                int rowNum = labels[i];
                if (rowNum == -1)
                {
                    continue;
                }
                double      dist = myscores[i];
                SURFRecord1 rec  = intervalTree.Get(rowNum, IntervalTreeLib.StubMode.ContainsStartThenEnd).FirstOrDefault();
                rec.Distance++;
            }
            sw.Stop();
            _loopTime = sw.ElapsedMilliseconds;
            #endregion

            string msg = String.Format("Loading: {0}, Model detection: {1}, Querying: {2}, Looping: {3}",
                                       _loadingTime, _modelImageDectionlong, _queryingTime, _loopTime);
            messageToLog = msg;
            rtnImageList = listOfSurfImages.Where(rec => rec.Distance > goodMatchDistThreshold)
                           .OrderByDescending(rec => rec.Distance)
                           .ToList <ImageRecord>();

            return(rtnImageList);
        }