Пример #1
0
    //Relies less on neighbor pixels and more on RANSAC
    protected void fastHarrisRansacBlend(List <Bitmap> imgs)
    {
        List <IntPoint[]> harrisPoints = new List <IntPoint[]>();
        MatrixH           homography;

        //Calculate all the Harris Points
        HarrisCornersDetector harris = new HarrisCornersDetector(0.03f, 10000f);

        for (int i = 0; i < imgs.Count; i++)
        {
            harrisPoints.Add(harris.ProcessImage(imgs[i]).ToArray());
        }

        Bitmap final = imgs[0];

        for (int i = 1; i < imgs.Count; i++)
        {
            IntPoint[] harrisFinal = harris.ProcessImage(final).ToArray();

            //Correlate the Harris pts between imgs
            CorrelationMatching matcher = new CorrelationMatching(5, final, imgs[i]);
            IntPoint[][]        matches = matcher.Match(harrisFinal, harrisPoints[i]);

            //Create the homography matrix using ransac
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.025, 0.99);
            homography = ransac.Estimate(matches[0], matches[1]);

            Blend blend = new Blend(homography, final);
            blend.Gradient = true;
            final          = blend.Apply(imgs[i]);
        }

        showImage(final);
    }
Пример #2
0
        private void btnRansac_Click(object sender, EventArgs e)
        {
            if (correlationPoints1 == null)
            {
                MessageBox.Show("Please, click Nearest Neighbor button first! :-)");
                return;
            }

            if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4)
            {
                MessageBox.Show("Insufficient points to attempt a fit.");
                return;
            }

            // Step 3: Create the homography matrix using a robust estimator
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);

            homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Get(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Get(ransac.Inliers);

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap      img3   = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            pictureBox.Image = pairs.Apply(img3);
        }
Пример #3
0
        /*
         * Creates the homography matrix using a robust estimator.
         */
        private void HomographyEstimator()
        {
            //First parameter is the threshold, second parameter the probability
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);

            homography = ransac.Estimate(correlationPoints1, correlationPoints2);
        }
Пример #4
0
        public int[,] HomographyProjection(int[,] alignPoints, int[,] recordedPoints, int[,] padPoints)
        {
            Accord.IntPoint[] correlationPoints1 = new Accord.IntPoint[alignPoints.GetLength(0)];
            Accord.IntPoint[] correlationPoints2 = new Accord.IntPoint[recordedPoints.GetLength(0)];
            for (int i = 0; i < alignPoints.GetLength(0); i++)
            {
                correlationPoints1[i] = new Accord.IntPoint(alignPoints[i, 0], alignPoints[i, 1]);
                correlationPoints2[i] = new Accord.IntPoint(recordedPoints[i, 0], recordedPoints[i, 1]);
            }
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            var homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            PointF[] testPoints = new PointF[padPoints.GetLength(0)];
            for (int i = 0; i < padPoints.GetLength(0); i++)
            {
                testPoints[i] = new PointF(padPoints[i, 0], padPoints[i, 1]);
            }

            var transformed = homography.TransformPoints(testPoints);



            int[,] results = new int[padPoints.GetLength(0), 3];
            float[] equ = equation_plane(recordedPoints[0, 0], recordedPoints[0, 1], recordedPoints[0, 2],
                                         recordedPoints[1, 0], recordedPoints[1, 1], recordedPoints[1, 2],
                                         recordedPoints[2, 0], recordedPoints[2, 1], recordedPoints[2, 2]
                                         );
            for (int i = 0; i < padPoints.GetLength(0); i++)
            {
                results[i, 0] = (int)transformed[i].X;
                results[i, 1] = (int)transformed[i].Y;
                results[i, 2] = (int)(results[i, 0] * equ[0] + results[i, 1] * equ[1] + equ[2]);
            }
            return(results);
        }
Пример #5
0
        private void timer2_Tick(object sender, EventArgs e)
        {
            try
            {
                Bitmap img1 = ImageL;
                Bitmap img2 = ImageR;
                FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector();
                FastRetinaKeypoint[]       keyPoints1;
                FastRetinaKeypoint[]       keyPoints2;
                keyPoints1 = freak.ProcessImage(img1).ToArray();
                keyPoints2 = freak.ProcessImage(img2).ToArray();

                var          matcher = new KNearestNeighborMatching <byte[]>(5, new Hamming());
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);

                // Get the two sets of points
                correlationPoints1 = matches[0];
                correlationPoints2 = matches[1];

                RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
                homography = ransac.Estimate(correlationPoints1, correlationPoints2);

                // Plot RANSAC results against correlation results
                //IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
                //IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);

                Blend blend = new Blend(homography, img1);
                m_stitpic_box.Image = blend.Apply(img2);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.ToString());
            }
        }
Пример #6
0
        private void button2_Click(object sender, EventArgs e)
        {
            // Step 3: Create the homography matrix using a robust estimator
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);

            homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap      img3   = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            pictureBox1.Image = pairs.Apply(img3);

            numA.Value = (decimal)homography.Elements[0];
            numB.Value = (decimal)homography.Elements[1];
            numC.Value = (decimal)homography.Elements[2];

            numD.Value = (decimal)homography.Elements[3];
            numE.Value = (decimal)homography.Elements[4];
            numF.Value = (decimal)homography.Elements[5];

            numG.Value = (decimal)homography.Elements[6];
            numH.Value = (decimal)homography.Elements[7];
        }
Пример #7
0
    protected void surfRansacBlendStraight(List <Bitmap> imgs)
    {
        MatrixH homography;

        List <SpeededUpRobustFeaturePoint[]> surfPoints = new List <SpeededUpRobustFeaturePoint[]>();
        //Calculate all the Surf Points
        SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector();
        double lastAngle = 0;

        for (int i = 0; i < imgs.Count; i++)
        {
            //Grayscale to find the edges and adjust the normal to point up
            AForge.Imaging.Filters.GrayscaleBT709 grayscale = new AForge.Imaging.Filters.GrayscaleBT709();
            AForge.Imaging.DocumentSkewChecker    skew      = new AForge.Imaging.DocumentSkewChecker();

            double angle = skew.GetSkewAngle(grayscale.Apply(imgs[i]));

            //Less than 5 deg change in angle to account for wobble, ignore big shifts
            if (Math.Abs(angle - lastAngle) < 5)
            {
                AForge.Imaging.Filters.RotateBilinear rotate = new AForge.Imaging.Filters.RotateBilinear(angle);
                rotate.FillColor = Color.FromArgb(0, 255, 255, 255);
                imgs[i]          = rotate.Apply(imgs[i]);
                lastAngle        = angle;
            }
            showImage(imgs[i]);
            surfPoints.Add(surf.ProcessImage(imgs[i]).ToArray());
        }


        Bitmap final = imgs[0];

        for (int i = 1; i < imgs.Count; i++)
        {
            SpeededUpRobustFeaturePoint[] surfFinal = surf.ProcessImage(final).ToArray();

            //Correlate the Harris pts between imgs
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(5);
            matcher.Threshold = 0.05;

            IntPoint[][] matches = matcher.Match(surfFinal, surfPoints[i]);

            //Create the homography matrix using RANSAC
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.015, 1);
            homography = ransac.Estimate(matches[0], matches[1]);

            Blend blend = new Blend(homography, final);
            blend.Gradient = true;
            final          = blend.Apply(imgs[i]);
        }

        //Smooth/Sharpen if I wanted to
        AForge.Imaging.Filters.Sharpen filter = new AForge.Imaging.Filters.Sharpen();
        //AForge.Imaging.Filters.Gaussian filter = new AForge.Imaging.Filters.Guassian(5);
        //filter.ApplyInPlace(final);

        showImage(final);
    }
Пример #8
0
    protected void fastHarrisRansacBlendStraight(List <Bitmap> imgs)
    {
        List <IntPoint[]> harrisPoints = new List <IntPoint[]>();
        MatrixH           homography;

        //Calculate all the Harris Points
        HarrisCornersDetector harris = new HarrisCornersDetector(0.03f, 10000f);

        for (int i = 0; i < imgs.Count; i++)
        {
            harrisPoints.Add(harris.ProcessImage(imgs[i]).ToArray());
        }

        Bitmap final = imgs[0];

        for (int i = 1; i < imgs.Count; i++)
        {
            //Convert my frames to grayscale so I can find and adjust the normal vectors
            AForge.Imaging.Filters.GrayscaleBT709 grayscale = new AForge.Imaging.Filters.GrayscaleBT709();
            AForge.Imaging.DocumentSkewChecker    skew      = new AForge.Imaging.DocumentSkewChecker();

            double finalAngle = skew.GetSkewAngle(grayscale.Apply(final));
            double imgAngle   = skew.GetSkewAngle(grayscale.Apply(imgs[i]));

            //Less than 5% to account for human error with rotations and wobbles
            if (Math.Abs(finalAngle - imgAngle) < 5)
            {
                AForge.Imaging.Filters.RotateBilinear rotate = new AForge.Imaging.Filters.RotateBilinear(finalAngle - imgAngle);
                rotate.FillColor = Color.FromArgb(0, 255, 255, 255);
                imgs[i]          = rotate.Apply(imgs[i]);

                //Update harris
                harrisPoints[i] = harris.ProcessImage(imgs[i]).ToArray();
            }

            IntPoint[] harrisFinal = harris.ProcessImage(final).ToArray();

            //Correlate the Harris pts between imgs
            CorrelationMatching matcher = new CorrelationMatching(5, final, imgs[i]);
            IntPoint[][]        matches = matcher.Match(harrisFinal, harrisPoints[i]);

            //Create the homography matrix using ransac
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.025, 0.99);
            homography = ransac.Estimate(matches[0], matches[1]);

            Blend blend = new Blend(homography, final);
            blend.Gradient = true;
            final          = blend.Apply(imgs[i]);
        }

        showImage(final);
    }
Пример #9
0
        public Task <MatrixH> Adjust(Bitmap baseBitmap, Size baseOriginalSize, Bitmap targetBitmap, Size targetOriginalSize)
        {
            return(Task.Run(() =>
            {
                var basePoints = keypointDetector.Transform(baseBitmap).Select(p => new IntPoint((int)Math.Round(p.X), (int)Math.Round(p.Y))).ToArray();
                var targetPoints = keypointDetector.Transform(targetBitmap).Select(p => new IntPoint((int)Math.Round(p.X), (int)Math.Round(p.Y))).ToArray();

                var matches = new CorrelationMatching(9, 50, baseBitmap, targetBitmap).Match(basePoints, targetPoints);

                var baseMatchPoints = matches[0].Select(p => new IntPoint(p.X * baseOriginalSize.Width / baseBitmap.Width, p.Y * baseOriginalSize.Height / baseBitmap.Height)).ToArray();
                var targetMatchPoints = matches[1].Select(p => new IntPoint(p.X * targetOriginalSize.Width / targetBitmap.Width, p.Y * targetOriginalSize.Height / targetBitmap.Height)).ToArray();

                var tasks = Enumerable.Range(0, 20).AsParallel().Select(a =>
                {
                    RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
                    ransac.Ransac.MaxEvaluations = 1000;
                    ransac.Ransac.MaxSamplings = 100;

                    var homography = ransac.Estimate(baseMatchPoints, targetMatchPoints);

                    if (ransac.Inliers.Length > 0)
                    {
                        var directions =
                            Accord.Math.Matrix.Get(baseMatchPoints, ransac.Inliers).Zip(
                                Accord.Math.Matrix.Get(targetMatchPoints, ransac.Inliers),
                                (point1, point2) => Math.Atan2(point2.Y - point1.Y, point2.X - point1.X) + Math.PI * 2).ToArray();

                        var avgDirection = directions.Average();
                        var variance = directions.Average(d => Math.Pow(d - avgDirection, 2));

                        return new
                        {
                            homography,
                            ransac.Inliers.Length,
                            variance
                        };
                    }
                    else
                    {
                        return null;
                    }
                }).Where(h => h != null).OrderByDescending(h => h.Length).ThenBy(h => h.variance).ToArray();

                return tasks.FirstOrDefault()?.homography;
            }));
        }
Пример #10
0
    protected void drawFastHarrisFeaturesCorrelations(List <Bitmap> imgs)
    {
        List <IntPoint[]> harrisPoints = new List <IntPoint[]>();
        MatrixH           homography;
        //Calculate all the Harris Points
        HarrisCornersDetector harris = new HarrisCornersDetector(0.03f, 10000f);

        foreach (Bitmap img in imgs)
        {
            harrisPoints.Add(harris.ProcessImage(img).ToArray());
        }

        //Map them and draw them!
        Bitmap harrisImg = imgs[0];

        for (int i = 0; i < imgs.Count - 1; i++)
        {
            //Correlate the Harris pts between imgs
            CorrelationMatching matcher = new CorrelationMatching(5, imgs[i], imgs[i + 1]);
            IntPoint[][]        matches = matcher.Match(harrisPoints[i], harrisPoints[i + 1]);

            //Create the homography matrix using ransac
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.025, 0.99);
            homography = ransac.Estimate(matches[0], matches[1]);

            Concatenate concat = new Concatenate(harrisImg);
            Bitmap      img    = concat.Apply(imgs[i + 1]);

            Color color = Color.White;
            if (i % 3 == 1)
            {
                color = Color.OrangeRed;
            }
            if (i % 3 == 2)
            {
                color = Color.Blue;
            }
            PairsMarker pairs = new PairsMarker(matches[0].Apply(p => new IntPoint(p.X + harrisImg.Width - imgs[0].Width, p.Y)), matches[1].Apply(p => new IntPoint(p.X + harrisImg.Width, p.Y)), color);
            harrisImg = pairs.Apply(img);
        }

        showImage(harrisImg);
    }
Пример #11
0
    protected void surfRansacBlend(List <Bitmap> imgs)
    {
        MatrixH homography;

        List <SpeededUpRobustFeaturePoint[]> surfPoints = new List <SpeededUpRobustFeaturePoint[]>();
        //Calculate all the Surf Points
        SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector();

        for (int i = 0; i < imgs.Count; i++)
        {
            surfPoints.Add(surf.ProcessImage(imgs[i]).ToArray());
        }


        Bitmap final = imgs[0];

        for (int i = 1; i < imgs.Count; i++)
        {
            SpeededUpRobustFeaturePoint[] surfFinal = surf.ProcessImage(final).ToArray();

            //Correlate the Harris pts between imgs
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(5);
            matcher.Threshold = 0.05;

            IntPoint[][] matches = matcher.Match(surfFinal, surfPoints[i]);

            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.015, 1);
            homography = ransac.Estimate(matches[0], matches[1]);

            Blend blend = new Blend(homography, final);
            blend.Gradient = true;
            final          = blend.Apply(imgs[i]);
        }

        //Smooth/Sharpen if I wanted to
        AForge.Imaging.Filters.Sharpen filter = new AForge.Imaging.Filters.Sharpen();
        //AForge.Imaging.Filters.Gaussian filter = new AForge.Imaging.Filters.Guassian(5);
        //filter.ApplyInPlace(final);

        showImage(final);
    }
Пример #12
0
    protected void freakRansacBlend(List <Bitmap> imgs)
    {
        MatrixH homography;

        List <FastRetinaKeypoint[]> freakPoints = new List <FastRetinaKeypoint[]>();
        //Calculate all the FREAK Points
        FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector();

        foreach (Bitmap img in imgs)
        {
            freakPoints.Add(freak.ProcessImage(img).ToArray());
        }

        //Map them and draw them!
        Bitmap final = imgs[0];

        for (int i = 1; i < imgs.Count; i++)
        {
            FastRetinaKeypoint[] freakFinal = freak.ProcessImage(final).ToArray();

            KNearestNeighborMatching matcher = new KNearestNeighborMatching(500);
            matcher.Threshold = 0.005;
            IntPoint[][] matches = matcher.Match(freakFinal, freakPoints[i]);

            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.015, 1);
            homography = ransac.Estimate(matches[0], matches[1]);

            Blend blend = new Blend(homography, final);
            blend.Gradient = true;
            final          = blend.Apply(imgs[i]);
        }

        //Smooth/Sharpen if I wanted to
        AForge.Imaging.Filters.Sharpen filter = new AForge.Imaging.Filters.Sharpen();
        //AForge.Imaging.Filters.Gaussian filter = new AForge.Imaging.Filters.Guassian(5);
        //filter.ApplyInPlace(final);

        showImage(final);
    }
Пример #13
0
        /// <summary>
        /// </summary>
        /// <param name="sender">
        /// </param>
        /// <param name="e">
        /// </param>
        private void BtnRansac_OnClick(object sender, RoutedEventArgs e)
        {
            // Step 3: Create the homography matrix using a robust estimator
            var ransac = new RansacHomographyEstimator(0.001, 0.99);

            this.homography = ransac.Estimate(this.correlationPoints1, this.correlationPoints2);

            // Plot RANSAC results against correlation results
            var inliers1 = this.correlationPoints1.Submatrix(ransac.Inliers);
            var inliers2 = this.correlationPoints2.Submatrix(ransac.Inliers);

            // Concatenate the two images in a single image (just to show on screen)
            var concat = new Concatenate(this.img1);
            var img3   = concat.Apply(this.img2);

            // Show the marked correlations in the concatenated image
            var pairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + this.img1.Width, p.Y)));

            this.PictureBox.Source = (ImageSource)pairs.Apply(img3);
        }
Пример #14
0
        // Robust homography estimation
        private void RansacRobustHomographer()
        {
            // Step 3: Create the homography matrix using a robust estimator
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);

            homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(_img1);
            Bitmap      img3   = concat.Apply(_img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + _img1.Width, p.Y)));

            _processImage3 = pairs.Apply(img3);
        }
Пример #15
0
        public void Example1()
        {
            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Resources.dc_left;
            Bitmap img2 = Resources.dc_right;

            // Those pictures are shown below:
            ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var     ransac           = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);
        }
Пример #16
0
        public static Geometry AutoAlign(Bitmap needle, Bitmap haystack, double retryThreshold = 1, int retryLimit = 10)
        {
            IntPoint[] harrisPoints1;
            IntPoint[] harrisPoints2;
            IntPoint[] correlationPoints1;
            IntPoint[] correlationPoints2;
            MatrixH    homography;

            var mi1 = new MagickImage(needle); mi1.Equalize(); needle = mi1.ToBitmap();
            var mi2 = new MagickImage(haystack); mi2.Equalize(); haystack = mi2.ToBitmap();

            HarrisCornersDetector harris = new HarrisCornersDetector(0.04f, 20000f);

            harrisPoints1 = harris.ProcessImage(needle).ToArray();
            harrisPoints2 = harris.ProcessImage(haystack).ToArray();

            CorrelationMatching matcher = new CorrelationMatching(9, needle, haystack);

            IntPoint[][] matches = matcher.Match(harrisPoints1, harrisPoints2);

            correlationPoints1 = matches[0];
            correlationPoints2 = matches[1];

            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.999);

            homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            IntPoint[] inliers1 = correlationPoints1.Get(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Get(ransac.Inliers);

            Concatenate concat = new Concatenate(needle);
            Bitmap      img3   = concat.Apply(haystack);

            PairsMarker pairs = new PairsMarker(
                inliers1,
                inliers2.Apply(p => new IntPoint(p.X + needle.Width, p.Y)));

            var Image = pairs.Apply(img3);

            Image.Save(@"C:\AutoAlignDebug.png");

            var pointCount = inliers1.Length;

            int[] xList1 = new int[pointCount];
            int[] yList1 = new int[pointCount];
            int[] xList2 = new int[pointCount];
            int[] yList2 = new int[pointCount];

            for (int n = 0; n < pointCount; n++)
            {
                xList1[n] = inliers1[n].X;
                yList1[n] = inliers1[n].Y;
                xList2[n] = inliers2[n].X;
                yList2[n] = inliers2[n].Y;
            }

            var f = new double[8] {
                xList1.Min(), yList1.Min(), xList1.Max(), yList1.Max(), xList2.Min(), yList2.Min(), xList2.Max(), yList2.Max()
            };

            double distFromX1  = f[0] / needle.Width;
            double distFromX2  = f[2] / needle.Width;
            double leftRatio   = f[0] / (f[2] - f[0]);
            double rightRatio  = (needle.Width - f[2]) / (f[2] - f[0]);
            double distFromY1  = f[1] / needle.Height;
            double distFromY2  = f[3] / needle.Height;
            double topRatio    = f[1] / (f[3] - f[1]);
            double bottomRatio = (needle.Height - f[3]) / (f[3] - f[1]);

            double leftDist   = (f[6] - f[4]) * leftRatio;
            double rightDist  = (f[6] - f[4]) * rightRatio;
            double topDist    = (f[7] - f[5]) * topRatio;
            double bottomDist = (f[7] - f[5]) * bottomRatio;

            double x      = f[4] - leftDist;
            double y      = f[5] - topDist;
            double width  = leftDist + (f[6] - f[4]) + rightDist;
            double height = topDist + (f[7] - f[5]) + bottomDist;

            mi1.Resize(new MagickGeometry((int)Math.Round(width), (int)Math.Round(height))
            {
                IgnoreAspectRatio = true
            });
            var mg = new MagickGeometry((int)Math.Round(x), (int)Math.Round(y), (int)Math.Round(width), (int)Math.Round(height))
            {
                IgnoreAspectRatio = true
            };

            mi2.Extent(mg, Gravity.Northwest, MagickColor.FromRgba(0, 0, 0, 0));

            double delta = mi1.Compare(mi2, ErrorMetric.NormalizedCrossCorrelation);

            Geometry outGeo = new Geometry(x, y, width, height);

            if (delta < retryThreshold && retryLimit > 0)
            {
                retryLimit--;
                outGeo = AutoAlign(needle, haystack, delta, retryLimit);
            }

            return(outGeo);
        }
Пример #17
0
        public void Load(string fileName)
        {
            FileStream   strm   = new FileStream(fileName, FileMode.Open);
            StreamReader reader = new StreamReader(strm);

            // read header
            string rawLine = reader.ReadLine();
            int    counter = 0;

            while ((rawLine = reader.ReadLine()) != null)
            {
                string[] elements = rawLine.Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries);

                double parameter1 = Convert.ToDouble(elements[1]);
                double parameter2 = Convert.ToDouble(elements[2]);

                Line2D line = new Line2D(parameter1, parameter2, counter < m_rows);
                if (counter < m_rows)
                {
                    m_rowLines.Add(line);
                }
                else
                {
                    m_columnLines.Add(line);
                }

                counter++;
            }

            // now do the intersections
            for (int i = 0; i < m_rows; i++)
            {
                Line2D row = m_rowLines[i];
                for (int j = 0; j < m_cols; j++)
                {
                    Line2D column       = m_columnLines[j];
                    Point? intersection = row.IntersectWith(column);

                    if (intersection != null)
                    {
                        m_gridPoints[i, j] = intersection.Value;
                    }
                }
            }

            // now fill the polygons
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);

            for (int i = 0; i < m_rows - 1; i++)
            {
                for (int j = 0; j < m_cols - 1; j++)
                {
                    m_polygons[i, j] = new Polygon2D();
                    m_polygons[i, j].Points.Add(m_gridPoints[i, j]);
                    m_polygons[i, j].Points.Add(m_gridPoints[i, j + 1]);
                    m_polygons[i, j].Points.Add(m_gridPoints[i + 1, j + 1]);
                    m_polygons[i, j].Points.Add(m_gridPoints[i + 1, j]);

                    if (Constants.DebugPrint)
                    {
                        Debug.WriteLine("\tPolygon:");
                        Debug.WriteLine("\t\t" + m_polygons[i, j].Points[0]);
                        Debug.WriteLine("\t\t" + m_polygons[i, j].Points[1]);
                        Debug.WriteLine("\t\t" + m_polygons[i, j].Points[2]);
                        Debug.WriteLine("\t\t" + m_polygons[i, j].Points[3]);
                    }

                    Accord.Point[] templatePts = new Accord.Point[4];
                    templatePts[0] = new Accord.Point(j / (float)(m_cols - 1), i / (float)(m_rows - 1));
                    templatePts[1] = new Accord.Point((j + 1) / (float)(m_cols - 1), i / (float)(m_rows - 1));
                    templatePts[2] = new Accord.Point((j + 1) / (float)(m_cols - 1), (i + 1) / (float)(m_rows - 1));
                    templatePts[3] = new Accord.Point(j / (float)(m_cols - 1), (i + 1) / (float)(m_rows - 1));

                    Accord.Point[] realPoints = new Accord.Point[4];
                    for (int k = 0; k < 4; k++)
                    {
                        realPoints[k] = new Accord.Point((float)m_polygons[i, j].Points[k].X, (float)m_polygons[i, j].Points[k].Y);
                    }

                    Accord.Point[][] matchedPoints = new Accord.Point[2][];
                    matchedPoints[1] = templatePts;
                    matchedPoints[0] = realPoints;

                    MatrixH homography = ransac.Estimate(matchedPoints);
                    m_homographies.Add(m_polygons[i, j], homography);
                }
            }

            reader.Close();
            reader.Dispose();
            reader = null;

            strm.Close();
            strm.Dispose();
            strm = null;
        }
        private Bitmap CompareAndDrawImage(Bitmap modelImage, Bitmap observedImage, SurfSettings setting)
        {
            Stopwatch watch1 = new Stopwatch();
            Stopwatch watch2 = new Stopwatch();

            Bitmap returnBitmap;

            watch2.Start();
            watch1.Reset(); watch1.Start();
            double hessianThreshold  = setting.HessianThresh.HasValue ? setting.HessianThresh.Value : 500;
            float  hessianThreshold2 = (float)hessianThreshold / 1000000;

            Debug.WriteLine("hessianThreshold2: {0}", hessianThreshold2);
            SpeededUpRobustFeaturesDetector    surf        = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            List <SpeededUpRobustFeaturePoint> surfPoints1 = surf.ProcessImage(modelImage);
            List <SpeededUpRobustFeaturePoint> surfPoints2 = surf.ProcessImage(observedImage);


            Debug.WriteLine("Surf points count: {0}", surfPoints1.Count);
            Debug.WriteLine("Surf points count: {0}", surfPoints2.Count);
            //long memoryFootprint = MemorySize.GetBlobSizeinKb(surfPoints2);
            //Debug.WriteLine("Surf extractor: {0} kb", memoryFootprint);

            watch1.Stop();
            Debug.WriteLine("Surf Detection tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Show the marked points in the original images
            Bitmap img1mark = new FeaturesMarker(surfPoints1, 2).Apply(modelImage);
            Bitmap img2mark = new FeaturesMarker(surfPoints2, 2).Apply(observedImage);
            // Concatenate the two images together in a single image (just to show on screen)
            Concatenate concatenate = new Concatenate(img1mark);

            returnBitmap = concatenate.Apply(img2mark);
            watch1.Stop();
            Debug.WriteLine("Surf point plotting tooked {0} ms", watch1.ElapsedMilliseconds);


            //watch1.Reset(); watch1.Start();
            //List<IntPoint>[] coretionalMatches = getMatches(surfPoints1, surfPoints2);
            //watch1.Stop();
            //Debug.WriteLine("Correctional Match tooked {0} ms", watch1.ElapsedMilliseconds);

            //// Get the two sets of points
            //IntPoint[] correlationPoints11 = coretionalMatches[0].ToArray();
            //IntPoint[] correlationPoints22 = coretionalMatches[1].ToArray();

            //Debug.WriteLine("Correclation points count: {0}", correlationPoints11.Length);
            //Debug.WriteLine("Correclation points count: {0}", correlationPoints22.Length);

            Debug.WriteLine("Threshold: {0}", setting.UniquenessThreshold.Value);
            watch1.Reset(); watch1.Start();
            // Step 2: Match feature points using a k-NN
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(2);

            matcher.Threshold = setting.UniquenessThreshold.Value;
            IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2);
            watch1.Stop();
            Debug.WriteLine("Knn Match tooked {0} ms", watch1.ElapsedMilliseconds);

            // Get the two sets of points
            IntPoint[] correlationPoints1 = matches[0];
            IntPoint[] correlationPoints2 = matches[1];

            Debug.WriteLine("Knn points count: {0}", correlationPoints1.Length);
            Debug.WriteLine("Knn points count: {0}", correlationPoints2.Length);

            //watch1.Reset(); watch1.Start();
            //// Show the marked correlations in the concatenated image
            //PairsMarker pairs = new PairsMarker(
            //    correlationPoints1, // Add image1's width to the X points to show the markings correctly
            //    correlationPoints2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Blue);

            //returnBitmap = pairs.Apply(returnBitmap);
            //watch1.Stop();
            //Debug.WriteLine("Match pair marking tooked {0} ms", watch1.ElapsedMilliseconds);

            if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4)
            {
                MessageBox.Show("Insufficient points to attempt a fit.");
                return(null);
            }

            watch1.Reset(); watch1.Start();
            // Step 3: Create the homography matrix using a robust estimator
            //RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            watch1.Stop();
            Debug.WriteLine("Ransac tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);
            watch1.Stop();
            Debug.WriteLine("Ransac SubMatrix {0} ms", watch1.ElapsedMilliseconds);

            Debug.WriteLine("Ransac points count: {0}", inliers1.Length);
            Debug.WriteLine("Ransac points count: {0}", inliers2.Length);

            watch1.Reset(); watch1.Start();
            PairsMarker inlierPairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Red);

            returnBitmap = inlierPairs.Apply(returnBitmap);
            watch1.Stop();
            Debug.WriteLine("Ransac plotting tooked {0} ms", watch1.ElapsedMilliseconds);

            watch2.Stop();
            return(returnBitmap);
        }
Пример #19
0
        public void Panorama_Example1()
        {
            Accord.Math.Random.Generator.Seed = 0;

            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Accord.Imaging.Image.Clone(Resources.dc_left);
            Bitmap img2 = Accord.Imaging.Image.Clone(Resources.dc_right);

            // Those pictures are shown below:
            // ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            // ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var     ransac           = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            Assert.AreEqual(1.15707409, homographyMatrix.Elements[0], 1e-5);
            Assert.AreEqual(-0.0233834628, homographyMatrix.Elements[1], 1e-5);
            Assert.AreEqual(-261.8217, homographyMatrix.Elements[2], 1e-2);
            Assert.AreEqual(0.08801343, homographyMatrix.Elements[3], 1e-5);
            Assert.AreEqual(1.12451434, homographyMatrix.Elements[4], 1e-5);
            Assert.AreEqual(-171.191208, homographyMatrix.Elements[5], 1e-2);
            Assert.AreEqual(0.000127789128, homographyMatrix.Elements[6], 1e-5);
            Assert.AreEqual(0.00006173445, homographyMatrix.Elements[7], 1e-5);
            Assert.AreEqual(8, homographyMatrix.Elements.Length);


            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            // ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);


            result = Accord.Imaging.Image.Clone(result);

#if NET35
            // result.Save(@"C:\Projects\Accord.NET\framework\Unit Tests\Accord.Tests.Imaging\Resources\blend_net35.png", ImageFormat.Png);
            Bitmap image = Accord.Imaging.Image.Clone(Resources.blend_net35);
#else
            // result.Save(@"C:\Projects\Accord.NET\framework\Unit Tests\Accord.Tests.Imaging\Resources\blend_net45.png", ImageFormat.Png);
            Bitmap image = Accord.Imaging.Image.Clone(Resources.blend_net45);
#endif

#pragma warning disable 618
            double[,] expected = image.ToDoubleMatrix(channel: 0);
            double[,] actual   = result.ToDoubleMatrix(channel: 0);
            Assert.IsTrue(Matrix.IsEqual(expected, actual, atol: 0.1));
#pragma warning restore 618
        }
        public void EstimateTest()
        {
            Point[] points1 =
            {
                new Point(86,    3),
                new Point(262,   7),
                new Point(72,   12),
                new Point(233,  14),
                new Point(222,  16),
                new Point(242,  19),
                new Point(174,  21),
                new Point(199,  22),
                new Point(210,  23),
                new Point(245,  27),
                new Point(223,  28),
                new Point(171,  29),
                new Point(213,  32),
                new Point(206,  34),
                new Point(158,  36),
                new Point(215,  36),
                new Point(194,  40),
                new Point(155,  43),
                new Point(390, 145),
                new Point(325, 151),
                new Point(430, 165),
                new Point(569, 166),
                new Point(548, 171),
                new Point(486, 172),
                new Point(585, 174),
                new Point(513, 175),
                new Point(581, 178)
            };


            //Points 2
            Point[] points2 =
            {
                new Point(94,    3),
                new Point(129,  10),
                new Point(135,   6),
                new Point(100,  16),
                new Point(88,   18),
                new Point(109,  22),
                new Point(35,   23),
                new Point(63,   24),
                new Point(75,   25),
                new Point(112,  30),
                new Point(89,   31),
                new Point(32,   31),
                new Point(78,   35),
                new Point(70,   37),
                new Point(19,   38),
                new Point(80,   39),
                new Point(58,   43),
                new Point(15,   46),
                new Point(259, 151),
                new Point(194, 158),
                new Point(299, 171),
                new Point(433, 171),
                new Point(414, 176),
                new Point(354, 177),
                new Point(449, 178),
                new Point(380, 180),
                new Point(445, 183)
            };


            double[,] expected =
            {
                {     0.60628712500923021,  0.00059969215221173516, -85.656775800903588 },
                {    0.010863088422024825,     0.58853684011367191, -1.6919055825149059 },
                { 0.000088084825486304467, 0.000063754043404499572, 0.53717560168513312 }
            };

            expected = (double[, ])(new MatrixH(expected));

            // Set a fixed seed to transform RANSAC into a deterministic algorithm
            Accord.Math.Tools.SetupGenerator(0);

            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);

            double[,] actual = (double[, ])ransac.Estimate(points1, points2);


            for (int i = 0; i < 3; i++)
            {
                for (int j = 0; j < 3; j++)
                {
                    Assert.AreEqual(actual[i, j], expected[i, j], 0.001);
                }
            }
        }
Пример #21
0
        /// <summary>
        /// Extract keypoints from an image using the FREAK methodology.
        /// </summary>
        /// <param name="image">Input color image.</param>
        /// <param name="depth">Input depth map.</param>
        /// <param name="width">Input image width.</param>
        /// <param name="height">Input image height.</param>
        /// <param name="threshold">Selection threshold value. Higher gives less keypoints.</param>
        /// <returns>List of keypoints in measurement space.</returns>
        private List <SparseItem> ExtractKeypoints(Color[] image, float[][] depth, int width, int height, int threshold)
        {
            List <IFeaturePoint> keypointsF = ExtractRawKeyPoints(image, width, height, KeypointFilter);
            List <SparseItem>    keypoints  = new List <SparseItem>();

            List <IFeaturePoint> filtered = new List <IFeaturePoint>();

            if (KeypointFilter && keypointsF.Count > 4 && prevkeypoints.Count > 4)
            {
                var descriptors = new Dictionary <IntPoint, IFeaturePoint>();

                foreach (var point in keypointsF)
                {
                    descriptors[new IntPoint((int)point.X, (int)point.Y)] = point;
                }

                var matcher = new KNearestNeighborMatching(3, Distance.Hamming);
                matcher.Threshold = 0.37;

                var matches = matcher.Match(prevkeypoints, keypointsF);
                var ransac  = new RansacHomographyEstimator(0.1, 0.999);

                this.matches    = new IntPoint[2][];
                this.matches[0] = new IntPoint[0];

                try {
                    if (matches[0].Length > 4)
                    {
                        ransac.Estimate(matches);
                        int[] inliers = ransac.Inliers;

                        filtered = new List <IFeaturePoint>();

                        this.matches    = new IntPoint[2][];
                        this.matches[0] = new IntPoint[inliers.Length];
                        this.matches[1] = new IntPoint[inliers.Length];

                        for (int i = 0; i < inliers.Length; i++)
                        {
                            int x = matches[1][inliers[i]].X;
                            int y = matches[1][inliers[i]].Y;

                            this.matches[0][i] = matches[0][inliers[i]];
                            this.matches[1][i] = matches[1][inliers[i]];

                            if (depth[x][y] > 0)
                            {
                                filtered.Add(descriptors[matches[1][inliers[i]]]);
                            }
                        }
                    }
                }
                catch (Accord.ConvergenceException) {
                    // just continue as if not enough points were found
                }
            }
            else
            {
                for (int i = 0; i < keypointsF.Count; i++)
                {
                    int x = (int)keypointsF[i].X;
                    int y = (int)keypointsF[i].Y;

                    if (depth[x][y] > 0)
                    {
                        filtered.Add(keypointsF[i]);
                    }
                }
            }

            this.prevprevkeypoints = this.prevkeypoints;
            this.prevkeypoints     = keypointsF;

            foreach (var point in filtered)
            {
                int x = (int)point.X;
                int y = (int)point.Y;

                keypoints.Add(new SparseItem(x, y, depth[x][y]));
            }

            return(keypoints);
        }
Пример #22
0
        void SurfMatch(Bitmap img1, Bitmap img2)
        {
            Stopwatch watch = Stopwatch.StartNew();

            ////主线程
            //List<IPoint2> ipts1 = GetFtPntList(img1, thread);//图片1的特征点
            //List<IPoint2> ipts2 = GetFtPntList(img2, thread);//图片2的特征点
            //List<IPoint2>[] matches = Utils.getMatches(ipts1, ipts2);

            //多线程且对图像进行分割
            ImageManager imgM = new ImageManager(img1, img2, thread, thread, cutSize);

            List <IPoint2>[] matches = imgM.GetMatchPoint();

            IntPoint[] correlationPoints1 = new IntPoint[matches[0].Count];
            IntPoint[] correlationPoints2 = new IntPoint[matches[1].Count];

            List <IPoint2> list1 = matches[0];
            int            num   = 0;

            foreach (IPoint2 kv in list1)
            {
                correlationPoints1[num] = new IntPoint {
                    X = (int)kv.x, Y = (int)kv.y
                };
                num++;
            }

            int            num1  = 0;
            List <IPoint2> list2 = matches[1];

            foreach (IPoint2 kv in list2)
            {
                correlationPoints2[num1] = new IntPoint {
                    X = (int)kv.x, Y = (int)kv.y
                };
                num1++;
            }

            if (correlationPoints1.Length > 0)
            {
                RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
                MatrixH homography = ransac.Estimate(correlationPoints1, correlationPoints2);

                Blend blend = new Blend(homography, img1);
                pictureBox.Image = blend.Apply(img2);

                //计算时间
                long matchTime = watch.ElapsedMilliseconds;
                this.Invoke(new Action(delegate()
                {
                    if (matchTime < 1000)
                    {
                        this.label1.Text = "完成!耗时 " + matchTime.ToString() + " 毫秒!";
                    }
                    else
                    {
                        this.label1.Text = "完成!耗时 " + (matchTime / 1000.0).ToString() + " 秒!";
                    }

                    this.btnSave.Visible  = true;
                    this.btnBlend.Enabled = true;
                }));
            }
            else
            {
                //计算时间
                long matchTime = watch.ElapsedMilliseconds;
                this.Invoke(new Action(delegate()
                {
                    this.label1.Text = "没有找到相同点!耗时 " + matchTime.ToString() + " 毫秒!";
                }));
            }

            watch.Stop();
            thd.Abort();
        }