public void ApplyTest() { var img1 = Properties.Resources.image2; var img2 = Properties.Resources.image2; MatrixH homography = new MatrixH(1, 0, 32, 0, 1, 0, 0, 0); Blend blend = new Blend(homography, img1); var actual = blend.Apply(img2); Assert.AreEqual(64, actual.Size.Width); Assert.AreEqual(32, actual.Size.Height); homography = new MatrixH(1, 0, 32, 0, 1, 32, 0, 0); blend = new Blend(homography, img1); actual = blend.Apply(img2); Assert.AreEqual(64, actual.Size.Width); Assert.AreEqual(64, actual.Size.Height); // ImageBox.Show(img3, PictureBoxSizeMode.Zoom); }
public void Panorama_Example1() { Accord.Math.Tools.SetupGenerator(0); // Let's start with two pictures that have been // taken from slightly different points of view: // Bitmap img1 = Resources.dc_left; Bitmap img2 = Resources.dc_right; // Those pictures are shown below: // ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480); // ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480); // Step 1: Detect feature points using Surf Corners Detector var surf = new SpeededUpRobustFeaturesDetector(); var points1 = surf.ProcessImage(img1); var points2 = surf.ProcessImage(img2); // Step 2: Match feature points using a k-NN var matcher = new KNearestNeighborMatching(5); var matches = matcher.Match(points1, points2); // Step 3: Create the matrix using a robust estimator var ransac = new RansacHomographyEstimator(0.001, 0.99); MatrixH homographyMatrix = ransac.Estimate(matches); Assert.AreEqual(1.13583624f, homographyMatrix.Elements[0], 1e-5); Assert.AreEqual(-0.0229569562f, homographyMatrix.Elements[1], 1e-5); Assert.AreEqual(-255.243988f, homographyMatrix.Elements[2], 1e-2); Assert.AreEqual(0.080111593f, homographyMatrix.Elements[3], 1e-5); Assert.AreEqual(1.11404252f, homographyMatrix.Elements[4], 1e-5); Assert.AreEqual(-167.362167f, homographyMatrix.Elements[5], 1e-2); Assert.AreEqual(0.00011207442f, homographyMatrix.Elements[6], 1e-5); Assert.AreEqual(0.0000529394056f, homographyMatrix.Elements[7], 1e-5); Assert.AreEqual(8, homographyMatrix.Elements.Length); // Step 4: Project and blend using the homography Blend blend = new Blend(homographyMatrix, img1); // Compute the blending algorithm Bitmap result = blend.Apply(img2); // Show on screen // ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480); #pragma warning disable 618 double[,] expected = Properties.Resources.blend_result.ToDoubleMatrix(0); double[,] actual = result.ToDoubleMatrix(0); Assert.IsTrue(Matrix.IsEqual(expected, actual, 0.1)); #pragma warning restore 618 }
private void btnBlend_Click(object sender, EventArgs e) { if (homography == null) { MessageBox.Show("Please, click RANSAC button first! :-)"); return; } // Step 4: Project and blend the second image using the homography Blend blend = new Blend(homography, img1); pictureBox.Image = blend.Apply(img2); }
public void ApplyTest2() { var img1 = Properties.Resources.image2; var img2 = Properties.Resources.image2; var img3 = Properties.Resources.image2; var img4 = Properties.Resources.image2; MatrixH homography; Blend blend; homography = new MatrixH(1, 0, 32, 0, 1, 0, 0, 0); blend = new Blend(homography, img1); var img12 = blend.Apply(img2); //ImageBox.Show("Blend of 1 and 2", img12, PictureBoxSizeMode.Zoom); Assert.AreEqual(img12.PixelFormat, PixelFormat.Format32bppArgb); blend = new Blend(homography, img3); var img34 = blend.Apply(img4); //ImageBox.Show("Blend of 3 and 4", img34, PictureBoxSizeMode.Zoom); Assert.AreEqual(img34.PixelFormat, PixelFormat.Format32bppArgb); homography = new MatrixH(1, 0, 64, 0, 1, 0, 0, 0); blend = new Blend(homography, img12); var img1234 = blend.Apply(img34); //ImageBox.Show("Blend of 1, 2, 3, 4", img1234, PictureBoxSizeMode.Zoom); Assert.AreEqual(img1234.PixelFormat, PixelFormat.Format32bppArgb); // Blend of 1 and 5 (8bpp and 32bpp) homography = new MatrixH(1, 0, 0, 0, 1, 32, 0, 0); //ImageBox.Show("Image 1", img1, PictureBoxSizeMode.Zoom); blend = new Blend(homography, img1234); var img15 = blend.Apply(img1); //ImageBox.Show("Blend of 1 and 5", img15, PictureBoxSizeMode.Zoom); Assert.AreEqual(img1234.PixelFormat, PixelFormat.Format32bppArgb); Assert.AreEqual(img1.PixelFormat, PixelFormat.Format8bppIndexed); Assert.AreEqual(img15.PixelFormat, PixelFormat.Format32bppArgb); Assert.AreEqual(128, img15.Width); Assert.AreEqual(64, img15.Height); }
/// <summary> /// Blends two images with homography matrix. /// </summary> private void BlendImages() { Blend blend = new Blend(homography, new Bitmap(input_images[0])); panorama = blend.Apply(new Bitmap(input_images[1])); ShowThumbnail(panorama, true, PanoramaPhase.Blend); }
private void BtnBlend_OnClick(object sender, RoutedEventArgs e) { // Step 4: Project and blend the second image using the homography Blend blend = new Blend(homography, img1); PictureBox.Source = blend.Apply(img2); }
public void Panorama_Example1() { Accord.Math.Tools.SetupGenerator(0); // Let's start with two pictures that have been // taken from slightly different points of view: // Bitmap img1 = Resources.dc_left; Bitmap img2 = Resources.dc_right; // Those pictures are shown below: // ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480); // ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480); // Step 1: Detect feature points using Surf Corners Detector var surf = new SpeededUpRobustFeaturesDetector(); var points1 = surf.ProcessImage(img1); var points2 = surf.ProcessImage(img2); // Step 2: Match feature points using a k-NN var matcher = new KNearestNeighborMatching(5); var matches = matcher.Match(points1, points2); // Step 3: Create the matrix using a robust estimator var ransac = new RansacHomographyEstimator(0.001, 0.99); MatrixH homographyMatrix = ransac.Estimate(matches); Assert.AreEqual(1.15707409, homographyMatrix.Elements[0], 1e-5); Assert.AreEqual(-0.0233834628, homographyMatrix.Elements[1], 1e-5); Assert.AreEqual(-261.8217, homographyMatrix.Elements[2], 1e-2); Assert.AreEqual(0.08801343, homographyMatrix.Elements[3], 1e-5); Assert.AreEqual(1.12451434, homographyMatrix.Elements[4], 1e-5); Assert.AreEqual(-171.191208, homographyMatrix.Elements[5], 1e-2); Assert.AreEqual(0.000127789128, homographyMatrix.Elements[6], 1e-5); Assert.AreEqual(0.00006173445, homographyMatrix.Elements[7], 1e-5); Assert.AreEqual(8, homographyMatrix.Elements.Length); // Step 4: Project and blend using the homography Blend blend = new Blend(homographyMatrix, img1); // Compute the blending algorithm Bitmap result = blend.Apply(img2); // Show on screen // ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480); //result.Save(@"C:\Projects\Accord.NET\net35.png", ImageFormat.Png); #if NET35 Bitmap image = Properties.Resources.blend_net35; #else Bitmap image = Properties.Resources.blend_net45; #endif #pragma warning disable 618 double[,] expected = image.ToDoubleMatrix(0); double[,] actual = result.ToDoubleMatrix(0); Assert.IsTrue(Matrix.IsEqual(expected, actual, 0.1)); #pragma warning restore 618 }
// PANORAMIC STICHING CODE // Accord.NET and AForge.NET frameworks and code examples provided from // http://www.codeproject.com/KB/recipes/automatic_panoramas.aspx private void panoramicStitchingToolStripMenuItem_Click(object sender, EventArgs e) { // Save a copy of the current image, ask user to open another image to merge with Bitmap img2 = img; openToolStripMenuItem_Click(sender, e); // Check whether the current loaded image is different // (If the user cancelled the open image operation, the current image in the viewer // is still the same image object) if (img2 != img) { Bitmap img1 = img; AForge.IntPoint[] harrisPoints1; AForge.IntPoint[] harrisPoints2; AForge.IntPoint[] correlationPoints1; AForge.IntPoint[] correlationPoints2; MatrixH homography; // Use Harris Corners Detector to find points of interest HarrisCornersDetector harris = new HarrisCornersDetector(0.04f, 1000f); harrisPoints1 = harris.ProcessImage(img1).ToArray(); harrisPoints2 = harris.ProcessImage(img2).ToArray(); // This check fixes an out of bounds exception generated by matcher.Match() below when a // monocolour image (0 harris points) is stitched with a non-monocolour image if (harrisPoints1.Length == 0 || harrisPoints2.Length == 0) { MessageBox.Show("Panoramic stitching cannot continue because at least one of the images does not contain any Harris points.", "Warning"); } else { // Match detected points using correlation CorrelationMatching matcher = new CorrelationMatching(9); AForge.IntPoint[][] matches = matcher.Match(img1, img2, harrisPoints1, harrisPoints2); // Separate the two arrays correlationPoints1 = matches[0]; correlationPoints2 = matches[1]; // Find homography matrix using RANSAC algorithm RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99); // This check is to handle the ransac.Estimate() function // which throws an exception if the array parameters do not contain at least 4 elements if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4) { MessageBox.Show("Panoramic stitching cannot continue because at least one of the images does not contain at least 4 correlation points.", "Warning"); } else { homography = ransac.Estimate(correlationPoints1, correlationPoints2); // Merge the images Blend blend = new Blend(homography, img1); img = blend.Apply(img2); //save the image properly and resize main form origImg.Dispose(); origImg = new Bitmap(img); pictureBox.Image = img; mainForm.ActiveForm.Width = img.Width + widthPad; mainForm.ActiveForm.Height = img.Height + heightPad; } } } }
public void Example1() { // Let's start with two pictures that have been // taken from slightly different points of view: // Bitmap img1 = Resources.dc_left; Bitmap img2 = Resources.dc_right; // Those pictures are shown below: ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480); ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480); // Step 1: Detect feature points using Surf Corners Detector var surf = new SpeededUpRobustFeaturesDetector(); var points1 = surf.ProcessImage(img1); var points2 = surf.ProcessImage(img2); // Step 2: Match feature points using a k-NN var matcher = new KNearestNeighborMatching(5); var matches = matcher.Match(points1, points2); // Step 3: Create the matrix using a robust estimator var ransac = new RansacHomographyEstimator(0.001, 0.99); MatrixH homographyMatrix = ransac.Estimate(matches); // Step 4: Project and blend using the homography Blend blend = new Blend(homographyMatrix, img1); // Compute the blending algorithm Bitmap result = blend.Apply(img2); // Show on screen ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480); }
private void btnBlend_Click(object sender, EventArgs e) { // Step 4: Project and blend the second image using the homography Blend blend = new Blend(homography, img1); pictureBox.Image = blend.Apply(img2); }