コード例 #1
0
        private void timer2_Tick(object sender, EventArgs e)
        {
            try
            {
                Bitmap img1 = ImageL;
                Bitmap img2 = ImageR;
                FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector();
                FastRetinaKeypoint[]       keyPoints1;
                FastRetinaKeypoint[]       keyPoints2;
                keyPoints1 = freak.ProcessImage(img1).ToArray();
                keyPoints2 = freak.ProcessImage(img2).ToArray();

                var          matcher = new KNearestNeighborMatching <byte[]>(5, new Hamming());
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);

                // Get the two sets of points
                correlationPoints1 = matches[0];
                correlationPoints2 = matches[1];

                RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
                homography = ransac.Estimate(correlationPoints1, correlationPoints2);

                // Plot RANSAC results against correlation results
                //IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
                //IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);

                Blend blend = new Blend(homography, img1);
                m_stitpic_box.Image = blend.Apply(img2);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.ToString());
            }
        }
コード例 #2
0
        public void MatchTest2()
        {
            var imgOld = Accord.Imaging.Image.Clone(Properties.Resources.old);
            var imgNew = Accord.Imaging.Image.Clone(Properties.Resources._new); 
            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector();

            var keyPoints1 = freak.ProcessImage(imgOld).ToArray();
            var keyPoints2 = freak.ProcessImage(imgNew).ToArray();

            var matcher = new KNearestNeighborMatching<byte[]>(5, new Hamming());

            { // direct
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(1, matches[0].Length);
                Assert.AreEqual(1, matches[1].Length);
            }

            { // reverse
                IntPoint[][] matches = matcher.Match(keyPoints2, keyPoints1);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(1, matches[0].Length);
                Assert.AreEqual(1, matches[1].Length);
            }

        }
コード例 #3
0
ファイル: MainForm.cs プロジェクト: natepan/framework
        private void btnCorrelation_Click(object sender, EventArgs e)
        {
            if (surfPoints1 == null)
            {
                MessageBox.Show("Please, click SURF button first! :-)");
                return;
            }

            // Step 2: Match feature points using a k-NN
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(5);
            IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2);

            // Get the two sets of points
            correlationPoints1 = matches[0];
            correlationPoints2 = matches[1];

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap img3 = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                correlationPoints1, // Add image1's width to the X points to show the markings correctly
                correlationPoints2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            pictureBox.Image = pairs.Apply(img3);
        }
コード例 #4
0
        private void btnCorrelation_Click(object sender, EventArgs e)
        {
            if (surfPoints1 == null)
            {
                MessageBox.Show("Please, click SURF button first! :-)");
                return;
            }

            // Step 2: Match feature points using a k-NN
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(5);

            IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2);

            // Get the two sets of points
            correlationPoints1 = matches[0];
            correlationPoints2 = matches[1];

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap      img3   = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                correlationPoints1, // Add image1's width to the X points to show the markings correctly
                correlationPoints2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            pictureBox.Image = pairs.Apply(img3);
        }
コード例 #5
0
        public void MatchTest2()
        {
            var imgOld = Accord.Imaging.Image.Clone(Resources.old);
            var imgNew = Accord.Imaging.Image.Clone(Resources._new);
            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector();

            var keyPoints1 = freak.ProcessImage(imgOld).ToArray();
            var keyPoints2 = freak.ProcessImage(imgNew).ToArray();

            var matcher = new KNearestNeighborMatching <byte[]>(5, new Hamming());

            { // direct
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(1, matches[0].Length);
                Assert.AreEqual(1, matches[1].Length);
            }

            { // reverse
                IntPoint[][] matches = matcher.Match(keyPoints2, keyPoints1);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(1, matches[0].Length);
                Assert.AreEqual(1, matches[1].Length);
            }
        }
コード例 #6
0
ファイル: Form1.cs プロジェクト: radtek/LogoRec
        private void knn()
        {
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(5);

            IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2);

            // Get the two sets of points
            correlationPoints1 = matches[0];
            correlationPoints2 = matches[1];

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap      img3   = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                correlationPoints1, // Add image1's width to the X points to show the markings correctly
                correlationPoints2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            img3 = img3.Clone(new Rectangle(0, 0, img3.Width, img3.Height), PixelFormat.Format24bppRgb);

            var pic = pairs.Apply(img3);

            pictureBox1.Image = pic;
        }
コード例 #7
0
        public void MatchTest3()
        {
            FastCornersDetector fast = new FastCornersDetector(threshold: 10);

            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector(fast);

            var keyPoints1 = freak.ProcessImage(Properties.Resources.old).ToArray();
            var keyPoints2 = freak.ProcessImage(Properties.Resources.flower01).ToArray();

            var matcher = new KNearestNeighborMatching <byte[]>(5, Distance.BitwiseHamming);

            { // direct
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(138, matches[0].Length);
                Assert.AreEqual(138, matches[1].Length);
                Assert.AreEqual(532, matches[0][0].X);
                Assert.AreEqual(159, matches[0][0].Y);
                Assert.AreEqual(keyPoints2[0].ToIntPoint(), matches[1][0]);
            }

            { // reverse
                IntPoint[][] matches = matcher.Match(keyPoints2, keyPoints1);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(138, matches[0].Length);
                Assert.AreEqual(138, matches[1].Length);
                Assert.AreEqual(keyPoints2[0].ToIntPoint(), matches[0][0]);
                Assert.AreEqual(532, matches[1][0].X);
                Assert.AreEqual(159, matches[1][0].Y);
            }
        }
コード例 #8
0
    protected void surfRansacBlendStraight(List <Bitmap> imgs)
    {
        MatrixH homography;

        List <SpeededUpRobustFeaturePoint[]> surfPoints = new List <SpeededUpRobustFeaturePoint[]>();
        //Calculate all the Surf Points
        SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector();
        double lastAngle = 0;

        for (int i = 0; i < imgs.Count; i++)
        {
            //Grayscale to find the edges and adjust the normal to point up
            AForge.Imaging.Filters.GrayscaleBT709 grayscale = new AForge.Imaging.Filters.GrayscaleBT709();
            AForge.Imaging.DocumentSkewChecker    skew      = new AForge.Imaging.DocumentSkewChecker();

            double angle = skew.GetSkewAngle(grayscale.Apply(imgs[i]));

            //Less than 5 deg change in angle to account for wobble, ignore big shifts
            if (Math.Abs(angle - lastAngle) < 5)
            {
                AForge.Imaging.Filters.RotateBilinear rotate = new AForge.Imaging.Filters.RotateBilinear(angle);
                rotate.FillColor = Color.FromArgb(0, 255, 255, 255);
                imgs[i]          = rotate.Apply(imgs[i]);
                lastAngle        = angle;
            }
            showImage(imgs[i]);
            surfPoints.Add(surf.ProcessImage(imgs[i]).ToArray());
        }


        Bitmap final = imgs[0];

        for (int i = 1; i < imgs.Count; i++)
        {
            SpeededUpRobustFeaturePoint[] surfFinal = surf.ProcessImage(final).ToArray();

            //Correlate the Harris pts between imgs
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(5);
            matcher.Threshold = 0.05;

            IntPoint[][] matches = matcher.Match(surfFinal, surfPoints[i]);

            //Create the homography matrix using RANSAC
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.015, 1);
            homography = ransac.Estimate(matches[0], matches[1]);

            Blend blend = new Blend(homography, final);
            blend.Gradient = true;
            final          = blend.Apply(imgs[i]);
        }

        //Smooth/Sharpen if I wanted to
        AForge.Imaging.Filters.Sharpen filter = new AForge.Imaging.Filters.Sharpen();
        //AForge.Imaging.Filters.Gaussian filter = new AForge.Imaging.Filters.Guassian(5);
        //filter.ApplyInPlace(final);

        showImage(final);
    }
コード例 #9
0
ファイル: BlendTest.cs プロジェクト: RLaumeyer/framework
        public void Panorama_Example1()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Resources.dc_left;
            Bitmap img2 = Resources.dc_right;

            // Those pictures are shown below:
            // ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            // ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            Assert.AreEqual(1.13583624f, homographyMatrix.Elements[0], 1e-5);
            Assert.AreEqual(-0.0229569562f, homographyMatrix.Elements[1], 1e-5);
            Assert.AreEqual(-255.243988f, homographyMatrix.Elements[2], 1e-2);
            Assert.AreEqual(0.080111593f, homographyMatrix.Elements[3], 1e-5);
            Assert.AreEqual(1.11404252f, homographyMatrix.Elements[4], 1e-5);
            Assert.AreEqual(-167.362167f, homographyMatrix.Elements[5], 1e-2);
            Assert.AreEqual(0.00011207442f, homographyMatrix.Elements[6], 1e-5);
            Assert.AreEqual(0.0000529394056f, homographyMatrix.Elements[7], 1e-5);
            Assert.AreEqual(8, homographyMatrix.Elements.Length);


            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            // ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);

#pragma warning disable 618
            double[,] expected = Properties.Resources.blend_result.ToDoubleMatrix(0);
            double[,] actual = result.ToDoubleMatrix(0);
            Assert.IsTrue(Matrix.IsEqual(expected, actual, 0.1));
#pragma warning restore 618
        }
コード例 #10
0
    protected void surfRansacBlend(List <Bitmap> imgs)
    {
        MatrixH homography;

        List <SpeededUpRobustFeaturePoint[]> surfPoints = new List <SpeededUpRobustFeaturePoint[]>();
        //Calculate all the Surf Points
        SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector();

        for (int i = 0; i < imgs.Count; i++)
        {
            surfPoints.Add(surf.ProcessImage(imgs[i]).ToArray());
        }


        Bitmap final = imgs[0];

        for (int i = 1; i < imgs.Count; i++)
        {
            SpeededUpRobustFeaturePoint[] surfFinal = surf.ProcessImage(final).ToArray();

            //Correlate the Harris pts between imgs
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(5);
            matcher.Threshold = 0.05;

            IntPoint[][] matches = matcher.Match(surfFinal, surfPoints[i]);

            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.015, 1);
            homography = ransac.Estimate(matches[0], matches[1]);

            Blend blend = new Blend(homography, final);
            blend.Gradient = true;
            final          = blend.Apply(imgs[i]);
        }

        //Smooth/Sharpen if I wanted to
        AForge.Imaging.Filters.Sharpen filter = new AForge.Imaging.Filters.Sharpen();
        //AForge.Imaging.Filters.Gaussian filter = new AForge.Imaging.Filters.Guassian(5);
        //filter.ApplyInPlace(final);

        showImage(final);
    }
コード例 #11
0
        public void MatchTest()
        {
            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector();

            var keyPoints1 = freak.ProcessImage(Properties.Resources.image1).ToArray();
            var keyPoints2 = freak.ProcessImage(Properties.Resources.image2).ToArray();

            bool thrown = false;

            try
            {
                var          matcher = new KNearestNeighborMatching <byte[]>(5, Distance.BitwiseHamming);
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);
            }
            catch (ArgumentException)
            {
                thrown = true;
            }

            Assert.IsTrue(thrown);
        }
コード例 #12
0
        public void MatchTest()
        {
            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector();

            var keyPoints1 = freak.ProcessImage(Properties.Resources.image1).ToArray();
            var keyPoints2 = freak.ProcessImage(Properties.Resources.image2).ToArray();

            bool thrown = false;

            try
            {
                var matcher = new KNearestNeighborMatching<byte[]>(5, Distance.BitwiseHamming);
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);
            }
            catch (ArgumentException)
            {
                thrown = true;
            }

            Assert.IsTrue(thrown);
        }
コード例 #13
0
    protected void freakRansacBlend(List <Bitmap> imgs)
    {
        MatrixH homography;

        List <FastRetinaKeypoint[]> freakPoints = new List <FastRetinaKeypoint[]>();
        //Calculate all the FREAK Points
        FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector();

        foreach (Bitmap img in imgs)
        {
            freakPoints.Add(freak.ProcessImage(img).ToArray());
        }

        //Map them and draw them!
        Bitmap final = imgs[0];

        for (int i = 1; i < imgs.Count; i++)
        {
            FastRetinaKeypoint[] freakFinal = freak.ProcessImage(final).ToArray();

            KNearestNeighborMatching matcher = new KNearestNeighborMatching(500);
            matcher.Threshold = 0.005;
            IntPoint[][] matches = matcher.Match(freakFinal, freakPoints[i]);

            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.015, 1);
            homography = ransac.Estimate(matches[0], matches[1]);

            Blend blend = new Blend(homography, final);
            blend.Gradient = true;
            final          = blend.Apply(imgs[i]);
        }

        //Smooth/Sharpen if I wanted to
        AForge.Imaging.Filters.Sharpen filter = new AForge.Imaging.Filters.Sharpen();
        //AForge.Imaging.Filters.Gaussian filter = new AForge.Imaging.Filters.Guassian(5);
        //filter.ApplyInPlace(final);

        showImage(final);
    }
コード例 #14
0
    protected void drawFreakFeaturesCorrelations(List <Bitmap> imgs)
    {
        List <FastRetinaKeypoint[]> freakPoints = new List <FastRetinaKeypoint[]>();
        //Calculate all the FREAK Points
        FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector();

        foreach (Bitmap img in imgs)
        {
            freakPoints.Add(freak.ProcessImage(img).ToArray());
        }

        //Map them and draw them!
        Bitmap img2 = imgs[0];

        for (int i = 0; i < imgs.Count - 1; i++)
        {
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(200);
            matcher.Threshold = 0.015;
            IntPoint[][] matches = matcher.Match(freakPoints[i], freakPoints[i + 1]);

            Concatenate concat = new Concatenate(img2);
            Bitmap      img3   = concat.Apply(imgs[i + 1]);

            Color color = Color.White;
            if (i % 3 == 1)
            {
                color = Color.OrangeRed;
            }
            if (i % 3 == 2)
            {
                color = Color.Blue;
            }
            PairsMarker pairs = new PairsMarker(matches[0].Apply(p => new IntPoint(p.X + img2.Width - imgs[0].Width, p.Y)), matches[1].Apply(p => new IntPoint(p.X + img2.Width, p.Y)), color);
            img2 = pairs.Apply(img3);
        }

        showImage(img2);
    }
コード例 #15
0
    protected void drawSurfFeaturesCorrelations(List <Bitmap> imgs)
    {
        List <SpeededUpRobustFeaturePoint[]> surfPoints = new List <SpeededUpRobustFeaturePoint[]>();
        //Calculate all the Surf Points
        SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector();

        foreach (Bitmap img in imgs)
        {
            surfPoints.Add(surf.ProcessImage(img).ToArray());
        }

        //Map them and draw them!
        Bitmap surfImg = imgs[0];

        for (int i = 0; i < imgs.Count - 1; i++)
        {
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(5);
            matcher.Threshold = 0.005;
            IntPoint[][] matches = matcher.Match(surfPoints[i], surfPoints[i + 1]);

            Concatenate concat = new Concatenate(surfImg);
            Bitmap      img    = concat.Apply(imgs[i + 1]);

            Color color = Color.White;
            if (i % 3 == 1)
            {
                color = Color.OrangeRed;
            }
            if (i % 3 == 2)
            {
                color = Color.Blue;
            }
            PairsMarker pairs = new PairsMarker(matches[0].Apply(p => new IntPoint(p.X + surfImg.Width - imgs[0].Width, p.Y)), matches[1].Apply(p => new IntPoint(p.X + surfImg.Width, p.Y)), color);
            surfImg = pairs.Apply(img);
        }

        showImage(surfImg);
    }
コード例 #16
0
        public void MatchTest2()
        {
            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector();

            var keyPoints1 = freak.ProcessImage(Properties.Resources.old).ToArray();
            var keyPoints2 = freak.ProcessImage(Properties.Resources._new).ToArray();

            var matcher = new KNearestNeighborMatching <byte[]>(5, Distance.BitwiseHamming);

            { // direct
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(1, matches[0].Length);
                Assert.AreEqual(1, matches[1].Length);
            }

            { // reverse
                IntPoint[][] matches = matcher.Match(keyPoints2, keyPoints1);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(1, matches[0].Length);
                Assert.AreEqual(1, matches[1].Length);
            }
        }
コード例 #17
0
        public void MatchTest()
        {
            var image1 = Accord.Imaging.Image.Clone(Resources.image1);
            var image2 = Accord.Imaging.Image.Clone(Resources.image2);
            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector();

            var keyPoints1 = freak.ProcessImage(image1).ToArray();
            var keyPoints2 = freak.ProcessImage(image2).ToArray();

            bool thrown = false;

            try
            {
                var          matcher = new KNearestNeighborMatching <byte[]>(5, new Hamming());
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);
            }
            catch (ArgumentException)
            {
                thrown = true;
            }

            Assert.IsTrue(thrown);
        }
コード例 #18
0
ファイル: BlendTest.cs プロジェクト: sami1971/framework
        public void Example1()
        {
            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Resources.dc_left;
            Bitmap img2 = Resources.dc_right;

            // Those pictures are shown below:
            ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var     ransac           = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);
        }
コード例 #19
0
        public void Example1()
        {
            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Resources.dc_left;
            Bitmap img2 = Resources.dc_right;

            // Those pictures are shown below:
            ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);
        }
コード例 #20
0
        public void MatchTest2()
        {
            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector();

            var keyPoints1 = freak.ProcessImage(Properties.Resources.old).ToArray();
            var keyPoints2 = freak.ProcessImage(Properties.Resources._new).ToArray();

            var matcher = new KNearestNeighborMatching<byte[]>(5, Distance.BitwiseHamming);

            { // direct
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(1, matches[0].Length);
                Assert.AreEqual(1, matches[1].Length);
            }

            { // reverse
                IntPoint[][] matches = matcher.Match(keyPoints2, keyPoints1);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(1, matches[0].Length);
                Assert.AreEqual(1, matches[1].Length);
            }

        }
コード例 #21
0
        public void MatchTest3()
        {
            Accord.Math.Random.Generator.Seed = 0;

            var old      = Accord.Imaging.Image.Clone(Resources.old);
            var flower01 = Accord.Imaging.Image.Clone(Resources.flower01);

            FastCornersDetector fast = new FastCornersDetector(threshold: 10);

            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector(fast);

            var keyPoints1 = freak.ProcessImage(old).ToArray();
            var keyPoints2 = freak.ProcessImage(flower01).ToArray();

            var matcher = new KNearestNeighborMatching <byte[]>(5, new Hamming());

            { // direct
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(143, matches[0].Length);
                Assert.AreEqual(143, matches[1].Length);
                Assert.AreEqual(532, matches[0][0].X);
                Assert.AreEqual(159, matches[0][0].Y);
                Assert.AreEqual(keyPoints2[0].ToIntPoint(), matches[1][0]);
            }

            { // reverse
                IntPoint[][] matches = matcher.Match(keyPoints2, keyPoints1);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(143, matches[0].Length);
                Assert.AreEqual(143, matches[1].Length);
                Assert.AreEqual(keyPoints2[0].ToIntPoint(), matches[0][0]);
                Assert.AreEqual(532, matches[1][0].X);
                Assert.AreEqual(159, matches[1][0].Y);
            }
        }
コード例 #22
0
        /// <summary>
        /// Extract keypoints from an image using the FREAK methodology.
        /// </summary>
        /// <param name="image">Input color image.</param>
        /// <param name="depth">Input depth map.</param>
        /// <param name="width">Input image width.</param>
        /// <param name="height">Input image height.</param>
        /// <param name="threshold">Selection threshold value. Higher gives less keypoints.</param>
        /// <returns>List of keypoints in measurement space.</returns>
        private List <SparseItem> ExtractKeypoints(Color[] image, float[][] depth, int width, int height, int threshold)
        {
            List <IFeaturePoint> keypointsF = ExtractRawKeyPoints(image, width, height, KeypointFilter);
            List <SparseItem>    keypoints  = new List <SparseItem>();

            List <IFeaturePoint> filtered = new List <IFeaturePoint>();

            if (KeypointFilter && keypointsF.Count > 4 && prevkeypoints.Count > 4)
            {
                var descriptors = new Dictionary <IntPoint, IFeaturePoint>();

                foreach (var point in keypointsF)
                {
                    descriptors[new IntPoint((int)point.X, (int)point.Y)] = point;
                }

                var matcher = new KNearestNeighborMatching(3, Distance.Hamming);
                matcher.Threshold = 0.37;

                var matches = matcher.Match(prevkeypoints, keypointsF);
                var ransac  = new RansacHomographyEstimator(0.1, 0.999);

                this.matches    = new IntPoint[2][];
                this.matches[0] = new IntPoint[0];

                try {
                    if (matches[0].Length > 4)
                    {
                        ransac.Estimate(matches);
                        int[] inliers = ransac.Inliers;

                        filtered = new List <IFeaturePoint>();

                        this.matches    = new IntPoint[2][];
                        this.matches[0] = new IntPoint[inliers.Length];
                        this.matches[1] = new IntPoint[inliers.Length];

                        for (int i = 0; i < inliers.Length; i++)
                        {
                            int x = matches[1][inliers[i]].X;
                            int y = matches[1][inliers[i]].Y;

                            this.matches[0][i] = matches[0][inliers[i]];
                            this.matches[1][i] = matches[1][inliers[i]];

                            if (depth[x][y] > 0)
                            {
                                filtered.Add(descriptors[matches[1][inliers[i]]]);
                            }
                        }
                    }
                }
                catch (Accord.ConvergenceException) {
                    // just continue as if not enough points were found
                }
            }
            else
            {
                for (int i = 0; i < keypointsF.Count; i++)
                {
                    int x = (int)keypointsF[i].X;
                    int y = (int)keypointsF[i].Y;

                    if (depth[x][y] > 0)
                    {
                        filtered.Add(keypointsF[i]);
                    }
                }
            }

            this.prevprevkeypoints = this.prevkeypoints;
            this.prevkeypoints     = keypointsF;

            foreach (var point in filtered)
            {
                int x = (int)point.X;
                int y = (int)point.Y;

                keypoints.Add(new SparseItem(x, y, depth[x][y]));
            }

            return(keypoints);
        }
 public FeaturePointMotionDetector()
 {
     _surf       = new SpeededUpRobustFeaturesDetector();
     _matcher    = new KNearestNeighborMatching(5);
     _prevPoints = null;
 }
コード例 #24
0
        private Bitmap CompareAndDrawImage(Bitmap modelImage, Bitmap observedImage, SurfSettings setting)
        {
            Stopwatch watch1 = new Stopwatch();
            Stopwatch watch2 = new Stopwatch();

            Bitmap returnBitmap;

            watch2.Start();
            watch1.Reset(); watch1.Start();
            double hessianThreshold  = setting.HessianThresh.HasValue ? setting.HessianThresh.Value : 500;
            float  hessianThreshold2 = (float)hessianThreshold / 1000000;

            Debug.WriteLine("hessianThreshold2: {0}", hessianThreshold2);
            SpeededUpRobustFeaturesDetector    surf        = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            List <SpeededUpRobustFeaturePoint> surfPoints1 = surf.ProcessImage(modelImage);
            List <SpeededUpRobustFeaturePoint> surfPoints2 = surf.ProcessImage(observedImage);


            Debug.WriteLine("Surf points count: {0}", surfPoints1.Count);
            Debug.WriteLine("Surf points count: {0}", surfPoints2.Count);
            //long memoryFootprint = MemorySize.GetBlobSizeinKb(surfPoints2);
            //Debug.WriteLine("Surf extractor: {0} kb", memoryFootprint);

            watch1.Stop();
            Debug.WriteLine("Surf Detection tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Show the marked points in the original images
            Bitmap img1mark = new FeaturesMarker(surfPoints1, 2).Apply(modelImage);
            Bitmap img2mark = new FeaturesMarker(surfPoints2, 2).Apply(observedImage);
            // Concatenate the two images together in a single image (just to show on screen)
            Concatenate concatenate = new Concatenate(img1mark);

            returnBitmap = concatenate.Apply(img2mark);
            watch1.Stop();
            Debug.WriteLine("Surf point plotting tooked {0} ms", watch1.ElapsedMilliseconds);


            //watch1.Reset(); watch1.Start();
            //List<IntPoint>[] coretionalMatches = getMatches(surfPoints1, surfPoints2);
            //watch1.Stop();
            //Debug.WriteLine("Correctional Match tooked {0} ms", watch1.ElapsedMilliseconds);

            //// Get the two sets of points
            //IntPoint[] correlationPoints11 = coretionalMatches[0].ToArray();
            //IntPoint[] correlationPoints22 = coretionalMatches[1].ToArray();

            //Debug.WriteLine("Correclation points count: {0}", correlationPoints11.Length);
            //Debug.WriteLine("Correclation points count: {0}", correlationPoints22.Length);

            Debug.WriteLine("Threshold: {0}", setting.UniquenessThreshold.Value);
            watch1.Reset(); watch1.Start();
            // Step 2: Match feature points using a k-NN
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(2);

            matcher.Threshold = setting.UniquenessThreshold.Value;
            IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2);
            watch1.Stop();
            Debug.WriteLine("Knn Match tooked {0} ms", watch1.ElapsedMilliseconds);

            // Get the two sets of points
            IntPoint[] correlationPoints1 = matches[0];
            IntPoint[] correlationPoints2 = matches[1];

            Debug.WriteLine("Knn points count: {0}", correlationPoints1.Length);
            Debug.WriteLine("Knn points count: {0}", correlationPoints2.Length);

            //watch1.Reset(); watch1.Start();
            //// Show the marked correlations in the concatenated image
            //PairsMarker pairs = new PairsMarker(
            //    correlationPoints1, // Add image1's width to the X points to show the markings correctly
            //    correlationPoints2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Blue);

            //returnBitmap = pairs.Apply(returnBitmap);
            //watch1.Stop();
            //Debug.WriteLine("Match pair marking tooked {0} ms", watch1.ElapsedMilliseconds);

            if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4)
            {
                MessageBox.Show("Insufficient points to attempt a fit.");
                return(null);
            }

            watch1.Reset(); watch1.Start();
            // Step 3: Create the homography matrix using a robust estimator
            //RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            watch1.Stop();
            Debug.WriteLine("Ransac tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);
            watch1.Stop();
            Debug.WriteLine("Ransac SubMatrix {0} ms", watch1.ElapsedMilliseconds);

            Debug.WriteLine("Ransac points count: {0}", inliers1.Length);
            Debug.WriteLine("Ransac points count: {0}", inliers2.Length);

            watch1.Reset(); watch1.Start();
            PairsMarker inlierPairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Red);

            returnBitmap = inlierPairs.Apply(returnBitmap);
            watch1.Stop();
            Debug.WriteLine("Ransac plotting tooked {0} ms", watch1.ElapsedMilliseconds);

            watch2.Stop();
            return(returnBitmap);
        }
コード例 #25
0
        public void MatchTest3()
        {
            FastCornersDetector fast = new FastCornersDetector(threshold: 10);

            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector(fast);

            var keyPoints1 = freak.ProcessImage(Properties.Resources.old).ToArray();
            var keyPoints2 = freak.ProcessImage(Properties.Resources.flower01).ToArray();

            var matcher = new KNearestNeighborMatching<byte[]>(5, Distance.BitwiseHamming);

            { // direct
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(143, matches[0].Length);
                Assert.AreEqual(143, matches[1].Length);
                Assert.AreEqual(532, matches[0][0].X);
                Assert.AreEqual(159, matches[0][0].Y);
                Assert.AreEqual(keyPoints2[0].ToIntPoint(), matches[1][0]);
            }

            { // reverse
                IntPoint[][] matches = matcher.Match(keyPoints2, keyPoints1);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(143, matches[0].Length);
                Assert.AreEqual(143, matches[1].Length);
                Assert.AreEqual(keyPoints2[0].ToIntPoint(), matches[0][0]);
                Assert.AreEqual(532, matches[1][0].X);
                Assert.AreEqual(159, matches[1][0].Y);
            }

        }
コード例 #26
0
        public List <ImageRecord> QueryImage(string queryImagePath, out string messageToLog, SurfSettings surfSetting = null)
        {
            List <ImageRecord> rtnImageList = new List <ImageRecord>();

            #region Diagnostic Region
            Stopwatch sw = new Stopwatch();
            Stopwatch sw1 = new Stopwatch();
            long      _loadingTime = 0, _modelImageDectionlong = 0, _queryingTime = 0, _matchingTime = 0;
            #endregion Diagnostic Region

            #region Surf Dectator Region
            double hessianThresh       = 500;
            double uniquenessThreshold = 0.8;
            int    minGoodMatchPercent = 0;

            if (surfSetting != null)
            {
                hessianThresh       = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
                minGoodMatchPercent = surfSetting.GoodMatchThreshold.Value;
            }
            float hessianThreshold2 = (float)hessianThresh / 1000000;
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            #endregion Surf Dectator Region

            #region Get Model Dectection and Validation
            sw.Reset(); sw.Start();
            List <SpeededUpRobustFeaturePoint> modelImageSurfPoints;
            using (Bitmap modelImage = (Bitmap)Image.FromFile(queryImagePath))
            {
                modelImageSurfPoints = surf.ProcessImage(modelImage);
            }

            if (modelImageSurfPoints == null ||
                modelImageSurfPoints.Count < 4)
            {
                throw new InvalidOperationException("Insuffucient interesting point in query image, try another query image");
            }
            sw.Stop();
            _modelImageDectionlong = sw.ElapsedMilliseconds;
            #endregion

            #region Search Images
            sw.Reset(); sw.Start();
            string fullFileName = Path.Combine(DirectoryHelper.SaveDirectoryPath, "SurfAccordLinear.bin");
            if (!File.Exists(fullFileName))
            {
                string exMsg = string.Format("Can't get the Surf Index at {0}, please index first", fullFileName);
                throw new FileNotFoundException(fullFileName);
            }
            using (FileStream fs = new FileStream(fullFileName, FileMode.Open, FileAccess.Read, FileShare.None))
            {
                System.Runtime.Serialization.Formatters.Binary.BinaryFormatter bf
                    = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
                long fileLength = fs.Length;
                while (fs.Position < fileLength)
                {
                    SURFAccordRecord3        record  = (SURFAccordRecord3)bf.Deserialize(fs);
                    KNearestNeighborMatching matcher = new KNearestNeighborMatching(2);
                    matcher.Threshold = uniquenessThreshold;
                    sw1.Start();
                    AForge.IntPoint[][] matches = matcher.Match(modelImageSurfPoints, record.SurfDescriptors);
                    sw1.Stop();
                    var countOfMatchPoint = matches[0].Length;
                    if (countOfMatchPoint > 0)
                    {
                        double totalnumberOfModelFeature = modelImageSurfPoints.Count;
                        double matchPercentage           = ((totalnumberOfModelFeature - (double)countOfMatchPoint) / totalnumberOfModelFeature);
                        matchPercentage = (1 - matchPercentage) * 100;
                        matchPercentage = Math.Round(matchPercentage);
                        if (matchPercentage >= minGoodMatchPercent)
                        {
                            record.Distance = matchPercentage;
                            rtnImageList.Add(record.Clone());
                        }
                    }
                    record = null;
                }
                fs.Close();
            }
            sw.Stop();
            _matchingTime = sw1.ElapsedMilliseconds;
            _queryingTime = sw.ElapsedMilliseconds;
            #endregion

            string msg = String.Format("Loading: {0}, Model detection: {1}, Querying: {2}, Matching: {3}",
                                       _loadingTime, _modelImageDectionlong, _queryingTime, _matchingTime);
            messageToLog = msg;

            if (rtnImageList.Count > 0)
            {
                rtnImageList = rtnImageList.OrderByDescending(rec => rec.Distance)
                               .ToList <ImageRecord>();
            }
            return(rtnImageList);
        }
コード例 #27
0
ファイル: BlendTest.cs プロジェクト: CanerPatir/framework
        public void Panorama_Example1()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Resources.dc_left;
            Bitmap img2 = Resources.dc_right;

            // Those pictures are shown below:
            // ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            // ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            Assert.AreEqual(1.15707409, homographyMatrix.Elements[0], 1e-5);
            Assert.AreEqual(-0.0233834628, homographyMatrix.Elements[1], 1e-5);
            Assert.AreEqual(-261.8217, homographyMatrix.Elements[2], 1e-2);
            Assert.AreEqual(0.08801343, homographyMatrix.Elements[3], 1e-5);
            Assert.AreEqual(1.12451434, homographyMatrix.Elements[4], 1e-5);
            Assert.AreEqual(-171.191208, homographyMatrix.Elements[5], 1e-2);
            Assert.AreEqual(0.000127789128, homographyMatrix.Elements[6], 1e-5);
            Assert.AreEqual(0.00006173445, homographyMatrix.Elements[7], 1e-5);
            Assert.AreEqual(8, homographyMatrix.Elements.Length);


            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            // ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);

            //result.Save(@"C:\Projects\Accord.NET\net35.png", ImageFormat.Png);

#if NET35
            Bitmap image = Properties.Resources.blend_net35;
#else
            Bitmap image = Properties.Resources.blend_net45;
#endif

#pragma warning disable 618
            double[,] expected = image.ToDoubleMatrix(0);
            double[,] actual = result.ToDoubleMatrix(0);
            Assert.IsTrue(Matrix.IsEqual(expected, actual, 0.1));
#pragma warning restore 618
        }
コード例 #28
0
ファイル: HarrisDetector.cs プロジェクト: rachwal/RTM-Tools
 private void UpdateDetector()
 {
     harris = new HarrisCornersDetector(configuration.K)
     {
         Threshold = configuration.Threshold,
         Sigma = configuration.Sigma
     };
     matcher = new KNearestNeighborMatching(configuration.NoNearestNeighborMatching);
 }
コード例 #29
0
ファイル: BlendTest.cs プロジェクト: zadiran/framework
        public void Panorama_Example1()
        {
            Accord.Math.Random.Generator.Seed = 0;

            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Accord.Imaging.Image.Clone(Resources.dc_left);
            Bitmap img2 = Accord.Imaging.Image.Clone(Resources.dc_right);

            // Those pictures are shown below:
            // ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            // ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var     ransac           = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            Assert.AreEqual(1.15707409, homographyMatrix.Elements[0], 1e-5);
            Assert.AreEqual(-0.0233834628, homographyMatrix.Elements[1], 1e-5);
            Assert.AreEqual(-261.8217, homographyMatrix.Elements[2], 1e-2);
            Assert.AreEqual(0.08801343, homographyMatrix.Elements[3], 1e-5);
            Assert.AreEqual(1.12451434, homographyMatrix.Elements[4], 1e-5);
            Assert.AreEqual(-171.191208, homographyMatrix.Elements[5], 1e-2);
            Assert.AreEqual(0.000127789128, homographyMatrix.Elements[6], 1e-5);
            Assert.AreEqual(0.00006173445, homographyMatrix.Elements[7], 1e-5);
            Assert.AreEqual(8, homographyMatrix.Elements.Length);


            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            // ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);


            result = Accord.Imaging.Image.Clone(result);

#if NET35
            // result.Save(@"C:\Projects\Accord.NET\framework\Unit Tests\Accord.Tests.Imaging\Resources\blend_net35.png", ImageFormat.Png);
            Bitmap image = Accord.Imaging.Image.Clone(Resources.blend_net35);
#else
            // result.Save(@"C:\Projects\Accord.NET\framework\Unit Tests\Accord.Tests.Imaging\Resources\blend_net45.png", ImageFormat.Png);
            Bitmap image = Accord.Imaging.Image.Clone(Resources.blend_net45);
#endif

#pragma warning disable 618
            double[,] expected = image.ToDoubleMatrix(channel: 0);
            double[,] actual   = result.ToDoubleMatrix(channel: 0);
            Assert.IsTrue(Matrix.IsEqual(expected, actual, atol: 0.1));
#pragma warning restore 618
        }
コード例 #30
0
ファイル: Form1.cs プロジェクト: totitot/SpecialTopicsFinals
        private void process1()
        {
            var bitmap1 = (Bitmap)sourcebox1.Image;
            var bitmap2 = (Bitmap)sourcebox2.Image;
            var hash1   = ImagePhash.ComputeDigest(bitmap1.ToLuminanceImage());
            var hash2   = ImagePhash.ComputeDigest(bitmap2.ToLuminanceImage());
            var score   = ImagePhash.GetCrossCorrelation(hash1, hash2);

            Console.WriteLine("score: {0}", score);

            //threshold value
            var thres = new Threshold(110);

            Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
            // apply the filter to the model
            Bitmap grey1 = filter.Apply(bitmap1);

            thres.ApplyInPlace(grey1);

            // Apply the filter to the observed image
            Bitmap grey2 = filter.Apply(bitmap2);

            thres.ApplyInPlace(grey2);

            int modelPoints = 0, matchingPoints = 0;

            var skewChecker     = new DocumentSkewChecker();
            var angle1          = skewChecker.GetSkewAngle(grey1);
            var rotationFilter1 = new RotateBicubic(-angle1);

            rotationFilter1.FillColor = Color.White;
            grey1 = rotationFilter1.Apply(grey1);

            var angle2          = skewChecker.GetSkewAngle(grey2);
            var rotationFilter2 = new RotateBicubic(-angle2);

            rotationFilter2.FillColor = Color.White;
            grey2 = rotationFilter2.Apply(grey2);

            //CorrelationMatching matcher = new CorrelationMatching(5, grey1, grey2);
            //var results = matcher.GetHashCode();
            var detector = new FastCornersDetector(15);
            var freak    = new FastRetinaKeypointDetector(detector);

            FastRetinaKeypoint[] features1 = freak.Transform(grey1).ToArray();
            modelPoints = features1.Count();

            Console.WriteLine("count: {0}", modelPoints);

            FastRetinaKeypoint[] features2 = freak.Transform(grey2).ToArray();

            Console.WriteLine("count: {0}", features2.Count());

            KNearestNeighborMatching matcher = new KNearestNeighborMatching(7);

            //var length = 0;

            IntPoint[][] results = matcher.Match(features1, features2);
            matchingPoints = results[0].Count(); // similarity of image1 to image2
            ////matchingPoints = results[1].Count(); // similarity of image2 to image1

            Console.WriteLine("matched points: {0}", matchingPoints);

            sourcebox1.Image = bitmap1;
            sourcebox2.Image = bitmap2;
            var marker1 = new FeaturesMarker(features1, 30);
            var marker2 = new FeaturesMarker(features2, 30);



            double similPercent = 0;

            if (matchingPoints <= 0)
            {
                similPercent = 0.0f;
            }
            similPercent = (matchingPoints * 100d) / (double)modelPoints;

            Console.WriteLine("score: {0}", similPercent);

            simil1.Text = similPercent.ToString("##.##") + "%";
            simil2.Text = (score * 100.00d).ToString("##.##") + "%";

            angle_text.Text  = angle2.ToString("##.##") + "°";
            resultbox1.Image = marker1.Apply(grey1);
            resultbox2.Image = marker2.Apply(grey2);
        }
コード例 #31
0
ファイル: SurfQuery4.cs プロジェクト: gjtjx/ImageDatabase
        public List<ImageRecord> QueryImage(string queryImagePath, out string messageToLog, SurfSettings surfSetting = null)
        {
            List<ImageRecord> rtnImageList = new List<ImageRecord>();

            #region Diagnostic Region
            Stopwatch sw = new Stopwatch();
            Stopwatch sw1 = new Stopwatch();
            long _loadingTime = 0, _modelImageDectionlong = 0, _queryingTime = 0, _matchingTime = 0;
            #endregion Diagnostic Region

            #region Surf Dectator Region
            double hessianThresh = 500;
            double uniquenessThreshold = 0.8;
            int minGoodMatchPercent = 0;

            if (surfSetting != null)
            {
                hessianThresh = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
                minGoodMatchPercent = surfSetting.GoodMatchThreshold.Value;
            }
            float hessianThreshold2 = (float)hessianThresh / 1000000;
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            #endregion Surf Dectator Region

            #region Get Model Dectection and Validation
            sw.Reset(); sw.Start();
            List<SpeededUpRobustFeaturePoint> modelImageSurfPoints;
            using (Bitmap modelImage = (Bitmap)Image.FromFile(queryImagePath))
            {
                modelImageSurfPoints = surf.ProcessImage(modelImage);
            }

            if (modelImageSurfPoints == null
                || modelImageSurfPoints.Count < 4)
            {
                throw new InvalidOperationException("Insuffucient interesting point in query image, try another query image");
            }
            sw.Stop();
            _modelImageDectionlong = sw.ElapsedMilliseconds;
            #endregion

            #region Search Images
            sw.Reset(); sw.Start();
            string fullFileName = Path.Combine(DirectoryHelper.SaveDirectoryPath, "SurfAccordLinear.bin");
            if (!File.Exists(fullFileName))
            {
                string exMsg = string.Format("Can't get the Surf Index at {0}, please index first", fullFileName);
                throw new FileNotFoundException(fullFileName);
            }
            using (FileStream fs = new FileStream(fullFileName, FileMode.Open, FileAccess.Read, FileShare.None))
            {
                System.Runtime.Serialization.Formatters.Binary.BinaryFormatter bf
                   = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
                long fileLength = fs.Length;
                while (fs.Position < fileLength)
                {
                    SURFAccordRecord3 record = (SURFAccordRecord3)bf.Deserialize(fs);
                    KNearestNeighborMatching matcher = new KNearestNeighborMatching(2);
                    matcher.Threshold = uniquenessThreshold;
                    sw1.Start();
                    AForge.IntPoint[][] matches = matcher.Match(modelImageSurfPoints, record.SurfDescriptors);
                    sw1.Stop();
                    var countOfMatchPoint = matches[0].Length;
                    if (countOfMatchPoint > 0)
                    {
                        double totalnumberOfModelFeature = modelImageSurfPoints.Count;
                        double matchPercentage = ((totalnumberOfModelFeature - (double)countOfMatchPoint) / totalnumberOfModelFeature);
                        matchPercentage = (1 - matchPercentage) * 100;
                        matchPercentage = Math.Round(matchPercentage);
                        if (matchPercentage >= minGoodMatchPercent)
                        {
                            record.Distance = matchPercentage;
                            rtnImageList.Add(record.Clone());
                        }
                    }
                    record = null;
                }
                fs.Close();
            }
            sw.Stop();
            _matchingTime = sw1.ElapsedMilliseconds;
            _queryingTime = sw.ElapsedMilliseconds;
            #endregion

            string msg = String.Format("Loading: {0}, Model detection: {1}, Querying: {2}, Matching: {3}",
                                                _loadingTime, _modelImageDectionlong, _queryingTime, _matchingTime);
            messageToLog = msg;

            if (rtnImageList.Count > 0)
            {
                rtnImageList = rtnImageList.OrderByDescending(rec => rec.Distance)
                                       .ToList<ImageRecord>();
            }
            return rtnImageList;
        }
コード例 #32
0
        private Bitmap CompareAndDrawImage(Bitmap modelImage, Bitmap observedImage, SurfSettings setting)
        {
            Stopwatch watch1 = new Stopwatch();
            Stopwatch watch2 = new Stopwatch();

            Bitmap returnBitmap;

            watch2.Start();
            watch1.Reset(); watch1.Start();
            double hessianThreshold = setting.HessianThresh.HasValue ? setting.HessianThresh.Value : 500;
            float hessianThreshold2 = (float)hessianThreshold / 1000000;

            Debug.WriteLine("hessianThreshold2: {0}", hessianThreshold2);
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            List<SpeededUpRobustFeaturePoint> surfPoints1 = surf.ProcessImage(modelImage);
            List<SpeededUpRobustFeaturePoint> surfPoints2 = surf.ProcessImage(observedImage);

            Debug.WriteLine("Surf points count: {0}", surfPoints1.Count);
            Debug.WriteLine("Surf points count: {0}", surfPoints2.Count);
            //long memoryFootprint = MemorySize.GetBlobSizeinKb(surfPoints2);
            //Debug.WriteLine("Surf extractor: {0} kb", memoryFootprint);

            watch1.Stop();
            Debug.WriteLine("Surf Detection tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Show the marked points in the original images
            Bitmap img1mark = new FeaturesMarker(surfPoints1, 2).Apply(modelImage);
            Bitmap img2mark = new FeaturesMarker(surfPoints2, 2).Apply(observedImage);
            // Concatenate the two images together in a single image (just to show on screen)
            Concatenate concatenate = new Concatenate(img1mark);
            returnBitmap = concatenate.Apply(img2mark);
            watch1.Stop();
            Debug.WriteLine("Surf point plotting tooked {0} ms", watch1.ElapsedMilliseconds);

            //watch1.Reset(); watch1.Start();
            //List<IntPoint>[] coretionalMatches = getMatches(surfPoints1, surfPoints2);
            //watch1.Stop();
            //Debug.WriteLine("Correctional Match tooked {0} ms", watch1.ElapsedMilliseconds);

            //// Get the two sets of points
            //IntPoint[] correlationPoints11 = coretionalMatches[0].ToArray();
            //IntPoint[] correlationPoints22 = coretionalMatches[1].ToArray();

            //Debug.WriteLine("Correclation points count: {0}", correlationPoints11.Length);
            //Debug.WriteLine("Correclation points count: {0}", correlationPoints22.Length);

            Debug.WriteLine("Threshold: {0}", setting.UniquenessThreshold.Value);
            watch1.Reset(); watch1.Start();
            // Step 2: Match feature points using a k-NN
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(2);
            matcher.Threshold = setting.UniquenessThreshold.Value;
            IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2);
            watch1.Stop();
            Debug.WriteLine("Knn Match tooked {0} ms", watch1.ElapsedMilliseconds);

            // Get the two sets of points
            IntPoint[] correlationPoints1 = matches[0];
            IntPoint[] correlationPoints2 = matches[1];

            Debug.WriteLine("Knn points count: {0}", correlationPoints1.Length);
            Debug.WriteLine("Knn points count: {0}", correlationPoints2.Length);

            //watch1.Reset(); watch1.Start();
            //// Show the marked correlations in the concatenated image
            //PairsMarker pairs = new PairsMarker(
            //    correlationPoints1, // Add image1's width to the X points to show the markings correctly
            //    correlationPoints2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Blue);

            //returnBitmap = pairs.Apply(returnBitmap);
            //watch1.Stop();
            //Debug.WriteLine("Match pair marking tooked {0} ms", watch1.ElapsedMilliseconds);

            if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4)
            {
                MessageBox.Show("Insufficient points to attempt a fit.");
                return null;
            }

            watch1.Reset(); watch1.Start();
            // Step 3: Create the homography matrix using a robust estimator
            //RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homography = ransac.Estimate(correlationPoints1, correlationPoints2);
            watch1.Stop();
            Debug.WriteLine("Ransac tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);
            watch1.Stop();
            Debug.WriteLine("Ransac SubMatrix {0} ms", watch1.ElapsedMilliseconds);

            Debug.WriteLine("Ransac points count: {0}", inliers1.Length);
            Debug.WriteLine("Ransac points count: {0}", inliers2.Length);

            watch1.Reset(); watch1.Start();
            PairsMarker inlierPairs = new PairsMarker(
               inliers1, // Add image1's width to the X points to show the markings correctly
               inliers2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Red);

            returnBitmap = inlierPairs.Apply(returnBitmap);
            watch1.Stop();
            Debug.WriteLine("Ransac plotting tooked {0} ms", watch1.ElapsedMilliseconds);

            watch2.Stop();
            return returnBitmap;
        }