RANSAC Robust Homography Matrix Estimator.

Fitting a homography using RANSAC is pretty straightforward. Being a iterative method, in a single iteration a random sample of four correspondences is selected from the given correspondence points and a homography H is then computed from those points.

The original points are then transformed using this homography and their distances to where those transforms should be is then computed and matching points can classified as inliers and non-matching points as outliers.

After a given number of iterations, the iteration which produced the largest number of inliers is then selected as the best estimation for H.

References: E. Dubrofsky. Homography Estimation. Master thesis. Available on: http://www.cs.ubc.ca/~dubroe/courses/MastersEssay.pdf

Ejemplo n.º 1
0
        public void Panorama_Example1()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Resources.dc_left;
            Bitmap img2 = Resources.dc_right;

            // Those pictures are shown below:
            // ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            // ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            Assert.AreEqual(1.13583624f, homographyMatrix.Elements[0], 1e-5);
            Assert.AreEqual(-0.0229569562f, homographyMatrix.Elements[1], 1e-5);
            Assert.AreEqual(-255.243988f, homographyMatrix.Elements[2], 1e-2);
            Assert.AreEqual(0.080111593f, homographyMatrix.Elements[3], 1e-5);
            Assert.AreEqual(1.11404252f, homographyMatrix.Elements[4], 1e-5);
            Assert.AreEqual(-167.362167f, homographyMatrix.Elements[5], 1e-2);
            Assert.AreEqual(0.00011207442f, homographyMatrix.Elements[6], 1e-5);
            Assert.AreEqual(0.0000529394056f, homographyMatrix.Elements[7], 1e-5);
            Assert.AreEqual(8, homographyMatrix.Elements.Length);


            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            // ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);

#pragma warning disable 618
            double[,] expected = Properties.Resources.blend_result.ToDoubleMatrix(0);
            double[,] actual = result.ToDoubleMatrix(0);
            Assert.IsTrue(Matrix.IsEqual(expected, actual, 0.1));
#pragma warning restore 618
        }
Ejemplo n.º 2
0
        public void Example1()
        {
            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Resources.dc_left;
            Bitmap img2 = Resources.dc_right;

            // Those pictures are shown below:
            ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);
        }
Ejemplo n.º 3
0
        private void btnRansac_Click(object sender, EventArgs e)
        {
            if (correlationPoints1 == null)
            {
                MessageBox.Show("Please, click Nearest Neighbor button first! :-)");
                return;
            }

            if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4)
            {
                MessageBox.Show("Insufficient points to attempt a fit.");
                return;
            }

            // Step 3: Create the homography matrix using a robust estimator
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap img3 = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            pictureBox.Image = pairs.Apply(img3);
        }
Ejemplo n.º 4
0
        /// <summary>
        /// Matches detected keypoints.
        /// </summary>
        private void MatchKeypoints()
        {
            // matching keypoints step needs at least two images
            if (input_images.Count < 2) return;

            // using RANSAC homography estimator
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            CorrelationMatching matcher = new CorrelationMatching(9);

            Bitmap mergedImage = new Bitmap(input_images.Count * input_images[0].Width, input_images[0].Height); //it is assumed the images are of same size!
            Graphics graphics = Graphics.FromImage(mergedImage);

            IntPoint[][] matches;

            int cumulativeWidth = 0;
            int keypoints_cntr = 0;

            // draw all images
            for (int i = 0; i < input_images.Count; i++) {
                graphics.DrawImage(input_images[i], cumulativeWidth, 0, input_images[i].Width, input_images[i].Height);
                cumulativeWidth += input_images[i].Width;
            }

            // iterate through all images
            for (int i = 0; i < input_images.Count; i++)
            {

                if (i != input_images.Count - 1)
                {
                    // match detected keypoints with maximum cross-correlation algorithm
                    matches = matcher.Match(new Bitmap(input_images[i]), new Bitmap(input_images[i + 1]), keypoints[keypoints_cntr].ToArray(), keypoints[keypoints_cntr + 1].ToArray());

                    IntPoint[] correlationPoints1 = matches[0];
                    IntPoint[] correlationPoints2 = matches[1];

                    // Plot RANSAC results against correlation results
                    homography = ransac.Estimate(correlationPoints1, correlationPoints2);

                    // take only inliers - good matches
                    IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
                    IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);

                    PairsMarker pairs = new PairsMarker(correlationPoints1, correlationPoints2.Apply(p => new IntPoint(p.X + input_images[i].Width, p.Y)));

                    // draw all matching pairs
                    for (int j = 0; j < pairs.Points1.Count(); j++) {
                        if (i % 2 == 0) graphics.DrawLine(new Pen(Color.Red, 1.5f), new System.Drawing.Point(correlationPoints1[j].X + i * input_images[i].Width, correlationPoints1[j].Y), new System.Drawing.Point(correlationPoints2[j].X + (i + 1) * input_images[i].Width, correlationPoints2[j].Y));
                    }

                    // draw inliers
                    for (int j = 0; j < inliers1.Count(); j++) {
                        if (i % 2 == 0) graphics.DrawLine(new Pen(Color.GreenYellow, 1.5f), new System.Drawing.Point(inliers1[j].X + i * input_images[i].Width, inliers1[j].Y), new System.Drawing.Point(inliers2[j].X + (i + 1) * input_images[i].Width, inliers2[j].Y));
                        else            graphics.DrawLine(new Pen(Color.Blue, 1.5f), new System.Drawing.Point(inliers1[j].X + i * input_images[i].Width, inliers1[j].Y), new System.Drawing.Point(inliers2[j].X + (i + 1) * input_images[i].Width, inliers2[j].Y));
                    }

                    keypoints_cntr += 2;
                }
            }

            ShowThumbnail(mergedImage, true, PanoramaPhase.MatchKeypoints);
        }
        public void EstimateTest()
        {

            Point[] points1 = 
            {
                new Point(86, 3),
                new Point(262, 7),
                new Point(72, 12),
                new Point(233, 14),
                new Point(222, 16),
                new Point(242, 19),
                new Point(174, 21),
                new Point(199, 22),
                new Point(210, 23),
                new Point(245, 27),
                new Point(223, 28),
                new Point(171, 29),
                new Point(213, 32),
                new Point(206, 34),
                new Point(158, 36),
                new Point(215, 36),
                new Point(194, 40),
                new Point(155, 43),
                new Point(390, 145),
                new Point(325, 151),
                new Point(430, 165),
                new Point(569, 166),
                new Point(548, 171),
                new Point(486, 172),
                new Point(585, 174),
                new Point(513, 175),
                new Point(581, 178)
            };


            //Points 2
            Point[] points2 = 
            {
                new Point(94, 3),
                new Point(129, 10),
                new Point(135, 6),
                new Point(100, 16),
                new Point(88, 18),
                new Point(109, 22),
                new Point(35, 23),
                new Point(63, 24),
                new Point(75, 25),
                new Point(112, 30),
                new Point(89, 31),
                new Point(32, 31),
                new Point(78, 35),
                new Point(70, 37),
                new Point(19, 38),
                new Point(80, 39),
                new Point(58, 43),
                new Point(15, 46),
                new Point(259, 151),
                new Point(194, 158),
                new Point(299, 171),
                new Point(433, 171),
                new Point(414, 176),
                new Point(354, 177),
                new Point(449, 178),
                new Point(380, 180),
                new Point(445, 183)
            };


            double[,] expected = 
            {
                { 0.60628712500923021,     0.00059969215221173516, -85.656775800903588   },
                { 0.010863088422024825,    0.58853684011367191,     -1.6919055825149059  },
                { 0.000088084825486304467, 0.000063754043404499572,  0.53717560168513312 }
            };

            expected = (double[,])(new MatrixH(expected));

            // Set a fixed seed to transform RANSAC into a deterministic algorithm
            Accord.Math.Random.Generator.Seed = 0;

            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            double[,] actual = (double[,])ransac.Estimate(points1, points2);


            for (int i = 0; i < 3; i++)
                for (int j = 0; j < 3; j++)
                    Assert.AreEqual(actual[i, j], expected[i, j], 0.001);
        }
Ejemplo n.º 6
0
        private Bitmap CompareAndDrawImage(Bitmap modelImage, Bitmap observedImage, SurfSettings setting)
        {
            Stopwatch watch1 = new Stopwatch();
            Stopwatch watch2 = new Stopwatch();

            Bitmap returnBitmap;

            watch2.Start();
            watch1.Reset(); watch1.Start();
            double hessianThreshold = setting.HessianThresh.HasValue ? setting.HessianThresh.Value : 500;
            float hessianThreshold2 = (float)hessianThreshold / 1000000;

            Debug.WriteLine("hessianThreshold2: {0}", hessianThreshold2);
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            List<SpeededUpRobustFeaturePoint> surfPoints1 = surf.ProcessImage(modelImage);
            List<SpeededUpRobustFeaturePoint> surfPoints2 = surf.ProcessImage(observedImage);

            Debug.WriteLine("Surf points count: {0}", surfPoints1.Count);
            Debug.WriteLine("Surf points count: {0}", surfPoints2.Count);
            //long memoryFootprint = MemorySize.GetBlobSizeinKb(surfPoints2);
            //Debug.WriteLine("Surf extractor: {0} kb", memoryFootprint);

            watch1.Stop();
            Debug.WriteLine("Surf Detection tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Show the marked points in the original images
            Bitmap img1mark = new FeaturesMarker(surfPoints1, 2).Apply(modelImage);
            Bitmap img2mark = new FeaturesMarker(surfPoints2, 2).Apply(observedImage);
            // Concatenate the two images together in a single image (just to show on screen)
            Concatenate concatenate = new Concatenate(img1mark);
            returnBitmap = concatenate.Apply(img2mark);
            watch1.Stop();
            Debug.WriteLine("Surf point plotting tooked {0} ms", watch1.ElapsedMilliseconds);

            //watch1.Reset(); watch1.Start();
            //List<IntPoint>[] coretionalMatches = getMatches(surfPoints1, surfPoints2);
            //watch1.Stop();
            //Debug.WriteLine("Correctional Match tooked {0} ms", watch1.ElapsedMilliseconds);

            //// Get the two sets of points
            //IntPoint[] correlationPoints11 = coretionalMatches[0].ToArray();
            //IntPoint[] correlationPoints22 = coretionalMatches[1].ToArray();

            //Debug.WriteLine("Correclation points count: {0}", correlationPoints11.Length);
            //Debug.WriteLine("Correclation points count: {0}", correlationPoints22.Length);

            Debug.WriteLine("Threshold: {0}", setting.UniquenessThreshold.Value);
            watch1.Reset(); watch1.Start();
            // Step 2: Match feature points using a k-NN
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(2);
            matcher.Threshold = setting.UniquenessThreshold.Value;
            IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2);
            watch1.Stop();
            Debug.WriteLine("Knn Match tooked {0} ms", watch1.ElapsedMilliseconds);

            // Get the two sets of points
            IntPoint[] correlationPoints1 = matches[0];
            IntPoint[] correlationPoints2 = matches[1];

            Debug.WriteLine("Knn points count: {0}", correlationPoints1.Length);
            Debug.WriteLine("Knn points count: {0}", correlationPoints2.Length);

            //watch1.Reset(); watch1.Start();
            //// Show the marked correlations in the concatenated image
            //PairsMarker pairs = new PairsMarker(
            //    correlationPoints1, // Add image1's width to the X points to show the markings correctly
            //    correlationPoints2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Blue);

            //returnBitmap = pairs.Apply(returnBitmap);
            //watch1.Stop();
            //Debug.WriteLine("Match pair marking tooked {0} ms", watch1.ElapsedMilliseconds);

            if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4)
            {
                MessageBox.Show("Insufficient points to attempt a fit.");
                return null;
            }

            watch1.Reset(); watch1.Start();
            // Step 3: Create the homography matrix using a robust estimator
            //RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homography = ransac.Estimate(correlationPoints1, correlationPoints2);
            watch1.Stop();
            Debug.WriteLine("Ransac tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);
            watch1.Stop();
            Debug.WriteLine("Ransac SubMatrix {0} ms", watch1.ElapsedMilliseconds);

            Debug.WriteLine("Ransac points count: {0}", inliers1.Length);
            Debug.WriteLine("Ransac points count: {0}", inliers2.Length);

            watch1.Reset(); watch1.Start();
            PairsMarker inlierPairs = new PairsMarker(
               inliers1, // Add image1's width to the X points to show the markings correctly
               inliers2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Red);

            returnBitmap = inlierPairs.Apply(returnBitmap);
            watch1.Stop();
            Debug.WriteLine("Ransac plotting tooked {0} ms", watch1.ElapsedMilliseconds);

            watch2.Stop();
            return returnBitmap;
        }
Ejemplo n.º 7
0
        private bool GenerateKeypointMatches(Bitmap target, Bitmap frame, Rectangle prevTarget)
        {
            HarrisCornersDetector harrisDetector = new HarrisCornersDetector(HarrisCornerMeasure.Harris, 1000f, 1f, 2);
            targetKeyPoints=harrisDetector.ProcessImage(target).ToArray();
            frameKeyPoints=harrisDetector.ProcessImage(frame).ToArray();
            //Console.WriteLine("tr={0} fr={1}", targetKeyPoints.Length, frameKeyPoints.Length);
            if (targetKeyPoints.Length==0||frameKeyPoints.Length==0)
            {
                return false;
            }

            CorrelationMatching matcher=new CorrelationMatching(15);
            IntPoint[][] matches=matcher.Match(target, frame, targetKeyPoints, frameKeyPoints);
            targetMacthes=matches[0];
            frameMatches=matches[1];
            if (targetMacthes.Length<4||frameMatches.Length<4)
            {
                return false;
            }
            RansacHomographyEstimator ransac=new RansacHomographyEstimator(0.1, 0.50);
            MatrixH estiment = new MatrixH();
            try
            {

                estiment = ransac.Estimate(targetMacthes, frameMatches);
            }
            catch
            {
                return false;
            }
            IntPoint[] targetGoodMatch=targetMacthes.Submatrix(ransac.Inliers);
            IntPoint[] frameGoodMatch=frameMatches.Submatrix(ransac.Inliers);
            CalculatePosChange(targetGoodMatch, frameGoodMatch, prevTarget);
            return true;
        }
Ejemplo n.º 8
0
        private void BtnRansac_OnClick(object sender, RoutedEventArgs e)
        {
            // Step 3: Create the homography matrix using a robust estimator
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap img3 = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            PictureBox.Source = pairs.Apply(img3);
        }
Ejemplo n.º 9
0
        public void Panorama_Example1()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Let's start with two pictures that have been
            // taken from slightly different points of view:
            //
            Bitmap img1 = Resources.dc_left;
            Bitmap img2 = Resources.dc_right;

            // Those pictures are shown below:
            // ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480);
            // ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480);


            // Step 1: Detect feature points using Surf Corners Detector
            var surf = new SpeededUpRobustFeaturesDetector();

            var points1 = surf.ProcessImage(img1);
            var points2 = surf.ProcessImage(img2);

            // Step 2: Match feature points using a k-NN
            var matcher = new KNearestNeighborMatching(5);
            var matches = matcher.Match(points1, points2);

            // Step 3: Create the matrix using a robust estimator
            var ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homographyMatrix = ransac.Estimate(matches);

            Assert.AreEqual(1.15707409, homographyMatrix.Elements[0], 1e-5);
            Assert.AreEqual(-0.0233834628, homographyMatrix.Elements[1], 1e-5);
            Assert.AreEqual(-261.8217, homographyMatrix.Elements[2], 1e-2);
            Assert.AreEqual(0.08801343, homographyMatrix.Elements[3], 1e-5);
            Assert.AreEqual(1.12451434, homographyMatrix.Elements[4], 1e-5);
            Assert.AreEqual(-171.191208, homographyMatrix.Elements[5], 1e-2);
            Assert.AreEqual(0.000127789128, homographyMatrix.Elements[6], 1e-5);
            Assert.AreEqual(0.00006173445, homographyMatrix.Elements[7], 1e-5);
            Assert.AreEqual(8, homographyMatrix.Elements.Length);


            // Step 4: Project and blend using the homography
            Blend blend = new Blend(homographyMatrix, img1);


            // Compute the blending algorithm
            Bitmap result = blend.Apply(img2);

            // Show on screen
            // ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480);

            //result.Save(@"C:\Projects\Accord.NET\net35.png", ImageFormat.Png);

#if NET35
            Bitmap image = Properties.Resources.blend_net35;
#else
            Bitmap image = Properties.Resources.blend_net45;
#endif

#pragma warning disable 618
            double[,] expected = image.ToDoubleMatrix(0);
            double[,] actual = result.ToDoubleMatrix(0);
            Assert.IsTrue(Matrix.IsEqual(expected, actual, 0.1));
#pragma warning restore 618
        }
Ejemplo n.º 10
0
        private void button2_Click(object sender, EventArgs e)
        {
            // Step 3: Create the homography matrix using a robust estimator
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap img3 = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            pictureBox1.Image = pairs.Apply(img3);

            numA.Value = (decimal)homography.Elements[0];
            numB.Value = (decimal)homography.Elements[1];
            numC.Value = (decimal)homography.Elements[2];

            numD.Value = (decimal)homography.Elements[3];
            numE.Value = (decimal)homography.Elements[4];
            numF.Value = (decimal)homography.Elements[5];

            numG.Value = (decimal)homography.Elements[6];
            numH.Value = (decimal)homography.Elements[7];
        }
Ejemplo n.º 11
0
        // PANORAMIC STICHING CODE
        // Accord.NET and AForge.NET frameworks and code examples provided from
        // http://www.codeproject.com/KB/recipes/automatic_panoramas.aspx
        private void panoramicStitchingToolStripMenuItem_Click(object sender, EventArgs e)
        {
            // Save a copy of the current image, ask user to open another image to merge with
            Bitmap img2 = img;
            openToolStripMenuItem_Click(sender, e);

            // Check whether the current loaded image is different
            // (If the user cancelled the open image operation, the current image in the viewer
            // is still the same image object)
            if (img2 != img)
            {
                Bitmap img1 = img;

                AForge.IntPoint[] harrisPoints1;
                AForge.IntPoint[] harrisPoints2;

                AForge.IntPoint[] correlationPoints1;
                AForge.IntPoint[] correlationPoints2;

                MatrixH homography;

                // Use Harris Corners Detector to find points of interest
                HarrisCornersDetector harris = new HarrisCornersDetector(0.04f, 1000f);
                harrisPoints1 = harris.ProcessImage(img1).ToArray();
                harrisPoints2 = harris.ProcessImage(img2).ToArray();

                // This check fixes an out of bounds exception generated by matcher.Match() below when a
                // monocolour image (0 harris points) is stitched with a non-monocolour image
                if (harrisPoints1.Length == 0 || harrisPoints2.Length == 0)
                {
                    MessageBox.Show("Panoramic stitching cannot continue because at least one of the images does not contain any Harris points.", "Warning");
                }
                else
                {
                    // Match detected points using correlation
                    CorrelationMatching matcher = new CorrelationMatching(9);
                    AForge.IntPoint[][] matches = matcher.Match(img1, img2, harrisPoints1, harrisPoints2);

                    // Separate the two arrays
                    correlationPoints1 = matches[0];
                    correlationPoints2 = matches[1];

                    // Find homography matrix using RANSAC algorithm
                    RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);

                    // This check is to handle the ransac.Estimate() function
                    // which throws an exception if the array parameters do not contain at least 4 elements
                    if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4)
                    {
                        MessageBox.Show("Panoramic stitching cannot continue because at least one of the images does not contain at least 4 correlation points.", "Warning");
                    }
                    else
                    {
                        homography = ransac.Estimate(correlationPoints1, correlationPoints2);

                        // Merge the images
                        Blend blend = new Blend(homography, img1);
                        img = blend.Apply(img2);

                        //save the image properly and resize main form
                        origImg.Dispose();
                        origImg = new Bitmap(img);
                        pictureBox.Image = img;
                        mainForm.ActiveForm.Width = img.Width + widthPad;
                        mainForm.ActiveForm.Height = img.Height + heightPad;
                    }
                }
            }
        }