protected void fastHarrisRansacBlendStraight(List <Bitmap> imgs) { List <IntPoint[]> harrisPoints = new List <IntPoint[]>(); MatrixH homography; //Calculate all the Harris Points HarrisCornersDetector harris = new HarrisCornersDetector(0.03f, 10000f); for (int i = 0; i < imgs.Count; i++) { harrisPoints.Add(harris.ProcessImage(imgs[i]).ToArray()); } Bitmap final = imgs[0]; for (int i = 1; i < imgs.Count; i++) { //Convert my frames to grayscale so I can find and adjust the normal vectors AForge.Imaging.Filters.GrayscaleBT709 grayscale = new AForge.Imaging.Filters.GrayscaleBT709(); AForge.Imaging.DocumentSkewChecker skew = new AForge.Imaging.DocumentSkewChecker(); double finalAngle = skew.GetSkewAngle(grayscale.Apply(final)); double imgAngle = skew.GetSkewAngle(grayscale.Apply(imgs[i])); //Less than 5% to account for human error with rotations and wobbles if (Math.Abs(finalAngle - imgAngle) < 5) { AForge.Imaging.Filters.RotateBilinear rotate = new AForge.Imaging.Filters.RotateBilinear(finalAngle - imgAngle); rotate.FillColor = Color.FromArgb(0, 255, 255, 255); imgs[i] = rotate.Apply(imgs[i]); //Update harris harrisPoints[i] = harris.ProcessImage(imgs[i]).ToArray(); } IntPoint[] harrisFinal = harris.ProcessImage(final).ToArray(); //Correlate the Harris pts between imgs CorrelationMatching matcher = new CorrelationMatching(5, final, imgs[i]); IntPoint[][] matches = matcher.Match(harrisFinal, harrisPoints[i]); //Create the homography matrix using ransac RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.025, 0.99); homography = ransac.Estimate(matches[0], matches[1]); Blend blend = new Blend(homography, final); blend.Gradient = true; final = blend.Apply(imgs[i]); } showImage(final); }
private void btnBlend_Click(object sender, EventArgs e) { if (homography == null) { MessageBox.Show("Please, click RANSAC button first! :-)"); return; } // Step 4: Project and blend the second image using the homography Blend blend = new Blend(homography, img1); pictureBox.Image = blend.Apply(img2); }
public override void Apply() { Blend.Apply(Material); Material.SetMatrix(P3dShader._Matrix, Matrix.inverse); Material.SetColor(P3dShader._Color, Color); Material.SetFloat(P3dShader._Opacity, Opacity); Material.SetFloat(P3dShader._Hardness, Hardness); Material.SetFloat(P3dShader._Squash, Squash); Material.SetTexture(P3dShader._Texture, Texture); Material.SetFloat(P3dShader._Strength, Strength); Material.SetFloat(P3dShader._Tiling, Tiling); }
public override void Apply() { Blend.Apply(Material); Material.SetMatrix(P3dShader._Matrix, Matrix.inverse); Material.SetVector(P3dShader._Direction, Direction); Material.SetColor(P3dShader._Color, Color); Material.SetFloat(P3dShader._Opacity, Opacity); Material.SetFloat(P3dShader._Hardness, Hardness); Material.SetTexture(P3dShader._Texture, Texture); Material.SetTexture(P3dShader._Shape, Shape); Material.SetVector(P3dShader._NormalFront, NormalFront); Material.SetVector(P3dShader._NormalBack, NormalBack); }
public override void Apply() { Blend.Apply(Material); Material.SetVector(P3dShader._Position, Position); Material.SetVector(P3dShader._EndPosition, EndPosition); Material.SetVector(P3dShader._Position2, Position2); Material.SetVector(P3dShader._EndPosition2, EndPosition2); Material.SetMatrix(P3dShader._Matrix, Matrix.inverse); Material.SetColor(P3dShader._Color, Color); Material.SetFloat(P3dShader._Opacity, Opacity); Material.SetFloat(P3dShader._Hardness, Hardness); Material.SetTexture(P3dShader._TileTexture, TileTexture); Material.SetMatrix(P3dShader._TileMatrix, TileMatrix); Material.SetFloat(P3dShader._TileBlend, TileBlend); }
protected void surfRansacBlend(List <Bitmap> imgs) { MatrixH homography; List <SpeededUpRobustFeaturePoint[]> surfPoints = new List <SpeededUpRobustFeaturePoint[]>(); //Calculate all the Surf Points SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(); for (int i = 0; i < imgs.Count; i++) { surfPoints.Add(surf.ProcessImage(imgs[i]).ToArray()); } Bitmap final = imgs[0]; for (int i = 1; i < imgs.Count; i++) { SpeededUpRobustFeaturePoint[] surfFinal = surf.ProcessImage(final).ToArray(); //Correlate the Harris pts between imgs KNearestNeighborMatching matcher = new KNearestNeighborMatching(5); matcher.Threshold = 0.05; IntPoint[][] matches = matcher.Match(surfFinal, surfPoints[i]); RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.015, 1); homography = ransac.Estimate(matches[0], matches[1]); Blend blend = new Blend(homography, final); blend.Gradient = true; final = blend.Apply(imgs[i]); } //Smooth/Sharpen if I wanted to AForge.Imaging.Filters.Sharpen filter = new AForge.Imaging.Filters.Sharpen(); //AForge.Imaging.Filters.Gaussian filter = new AForge.Imaging.Filters.Guassian(5); //filter.ApplyInPlace(final); showImage(final); }
protected void freakRansacBlend(List <Bitmap> imgs) { MatrixH homography; List <FastRetinaKeypoint[]> freakPoints = new List <FastRetinaKeypoint[]>(); //Calculate all the FREAK Points FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector(); foreach (Bitmap img in imgs) { freakPoints.Add(freak.ProcessImage(img).ToArray()); } //Map them and draw them! Bitmap final = imgs[0]; for (int i = 1; i < imgs.Count; i++) { FastRetinaKeypoint[] freakFinal = freak.ProcessImage(final).ToArray(); KNearestNeighborMatching matcher = new KNearestNeighborMatching(500); matcher.Threshold = 0.005; IntPoint[][] matches = matcher.Match(freakFinal, freakPoints[i]); RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.015, 1); homography = ransac.Estimate(matches[0], matches[1]); Blend blend = new Blend(homography, final); blend.Gradient = true; final = blend.Apply(imgs[i]); } //Smooth/Sharpen if I wanted to AForge.Imaging.Filters.Sharpen filter = new AForge.Imaging.Filters.Sharpen(); //AForge.Imaging.Filters.Gaussian filter = new AForge.Imaging.Filters.Guassian(5); //filter.ApplyInPlace(final); showImage(final); }
public override void Apply() { Blend.Apply(Material); Material.SetVector(P3dShader._Position, Position); Material.SetVector(P3dShader._EndPosition, EndPosition); Material.SetVector(P3dShader._Position2, Position2); Material.SetVector(P3dShader._EndPosition2, EndPosition2); Material.SetMatrix(P3dShader._Matrix, Matrix.inverse); Material.SetVector(P3dShader._Direction, Direction); Material.SetColor(P3dShader._Color, Color); Material.SetFloat(P3dShader._Opacity, Opacity); Material.SetFloat(P3dShader._Hardness, Hardness); Material.SetFloat(P3dShader._Wrapping, Wrapping); Material.SetTexture(P3dShader._Texture, Texture); Material.SetTexture(P3dShader._Shape, Shape); Material.SetVector(P3dShader._ShapeChannel, ShapeChannel); Material.SetVector(P3dShader._NormalFront, NormalFront); Material.SetVector(P3dShader._NormalBack, NormalBack); Material.SetTexture(P3dShader._TileTexture, TileTexture); Material.SetMatrix(P3dShader._TileMatrix, TileMatrix); Material.SetFloat(P3dShader._TileBlend, TileBlend); }
public void Example1() { // Let's start with two pictures that have been // taken from slightly different points of view: // Bitmap img1 = Resources.dc_left; Bitmap img2 = Resources.dc_right; // Those pictures are shown below: ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480); ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480); // Step 1: Detect feature points using Surf Corners Detector var surf = new SpeededUpRobustFeaturesDetector(); var points1 = surf.ProcessImage(img1); var points2 = surf.ProcessImage(img2); // Step 2: Match feature points using a k-NN var matcher = new KNearestNeighborMatching(5); var matches = matcher.Match(points1, points2); // Step 3: Create the matrix using a robust estimator var ransac = new RansacHomographyEstimator(0.001, 0.99); MatrixH homographyMatrix = ransac.Estimate(matches); // Step 4: Project and blend using the homography Blend blend = new Blend(homographyMatrix, img1); // Compute the blending algorithm Bitmap result = blend.Apply(img2); // Show on screen ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480); }
public override void Apply() { Blend.Apply(Material); var inv = Matrix.inverse; Material.SetFloat(P3dShader._In3D, In3D ? 1.0f : 0.0f); Material.SetVector(P3dShader._Position, inv.MultiplyPoint(Position)); Material.SetVector(P3dShader._EndPosition, inv.MultiplyPoint(EndPosition)); Material.SetVector(P3dShader._Position2, inv.MultiplyPoint(Position2)); Material.SetVector(P3dShader._EndPosition2, inv.MultiplyPoint(EndPosition2)); Material.SetMatrix(P3dShader._Matrix, inv); Material.SetColor(P3dShader._Color, P3dHelper.FromGamma(Color)); Material.SetFloat(P3dShader._Opacity, Opacity); Material.SetFloat(P3dShader._Hardness, Hardness); Material.SetTexture(P3dShader._TileTexture, TileTexture); Material.SetMatrix(P3dShader._TileMatrix, TileMatrix); Material.SetFloat(P3dShader._TileOpacity, TileOpacity); Material.SetFloat(P3dShader._TileTransition, TileTransition); Material.SetMatrix(P3dShader._MaskMatrix, MaskMatrix); Material.SetTexture(P3dShader._MaskTexture, MaskShape); Material.SetVector(P3dShader._MaskChannel, MaskChannel); Material.SetVector(P3dShader._MaskStretch, MaskStretch); }
public void ApplyTest2() { var img1 = Properties.Resources.image2; var img2 = Properties.Resources.image2; var img3 = Properties.Resources.image2; var img4 = Properties.Resources.image2; MatrixH homography; Blend blend; homography = new MatrixH(1, 0, 32, 0, 1, 0, 0, 0); blend = new Blend(homography, img1); var img12 = blend.Apply(img2); //ImageBox.Show("Blend of 1 and 2", img12, PictureBoxSizeMode.Zoom); Assert.AreEqual(img12.PixelFormat, PixelFormat.Format32bppArgb); blend = new Blend(homography, img3); var img34 = blend.Apply(img4); //ImageBox.Show("Blend of 3 and 4", img34, PictureBoxSizeMode.Zoom); Assert.AreEqual(img34.PixelFormat, PixelFormat.Format32bppArgb); homography = new MatrixH(1, 0, 64, 0, 1, 0, 0, 0); blend = new Blend(homography, img12); var img1234 = blend.Apply(img34); //ImageBox.Show("Blend of 1, 2, 3, 4", img1234, PictureBoxSizeMode.Zoom); Assert.AreEqual(img1234.PixelFormat, PixelFormat.Format32bppArgb); // Blend of 1 and 5 (8bpp and 32bpp) homography = new MatrixH(1, 0, 0, 0, 1, 32, 0, 0); //ImageBox.Show("Image 1", img1, PictureBoxSizeMode.Zoom); blend = new Blend(homography, img1234); var img15 = blend.Apply(img1); //ImageBox.Show("Blend of 1 and 5", img15, PictureBoxSizeMode.Zoom); Assert.AreEqual(img1234.PixelFormat, PixelFormat.Format32bppArgb); Assert.AreEqual(img1.PixelFormat, PixelFormat.Format8bppIndexed); Assert.AreEqual(img15.PixelFormat, PixelFormat.Format32bppArgb); Assert.AreEqual(128, img15.Width); Assert.AreEqual(64, img15.Height); }
public void Panorama_Example1() { Accord.Math.Random.Generator.Seed = 0; // Let's start with two pictures that have been // taken from slightly different points of view: // Bitmap img1 = Accord.Imaging.Image.Clone(Resources.dc_left); Bitmap img2 = Accord.Imaging.Image.Clone(Resources.dc_right); // Those pictures are shown below: // ImageBox.Show(img1, PictureBoxSizeMode.Zoom, 640, 480); // ImageBox.Show(img2, PictureBoxSizeMode.Zoom, 640, 480); // Step 1: Detect feature points using Surf Corners Detector var surf = new SpeededUpRobustFeaturesDetector(); var points1 = surf.ProcessImage(img1); var points2 = surf.ProcessImage(img2); // Step 2: Match feature points using a k-NN var matcher = new KNearestNeighborMatching(5); var matches = matcher.Match(points1, points2); // Step 3: Create the matrix using a robust estimator var ransac = new RansacHomographyEstimator(0.001, 0.99); MatrixH homographyMatrix = ransac.Estimate(matches); Assert.AreEqual(1.15707409, homographyMatrix.Elements[0], 1e-5); Assert.AreEqual(-0.0233834628, homographyMatrix.Elements[1], 1e-5); Assert.AreEqual(-261.8217, homographyMatrix.Elements[2], 1e-2); Assert.AreEqual(0.08801343, homographyMatrix.Elements[3], 1e-5); Assert.AreEqual(1.12451434, homographyMatrix.Elements[4], 1e-5); Assert.AreEqual(-171.191208, homographyMatrix.Elements[5], 1e-2); Assert.AreEqual(0.000127789128, homographyMatrix.Elements[6], 1e-5); Assert.AreEqual(0.00006173445, homographyMatrix.Elements[7], 1e-5); Assert.AreEqual(8, homographyMatrix.Elements.Length); // Step 4: Project and blend using the homography Blend blend = new Blend(homographyMatrix, img1); // Compute the blending algorithm Bitmap result = blend.Apply(img2); // Show on screen // ImageBox.Show(result, PictureBoxSizeMode.Zoom, 640, 480); result = Accord.Imaging.Image.Clone(result); #if NET35 // result.Save(@"C:\Projects\Accord.NET\framework\Unit Tests\Accord.Tests.Imaging\Resources\blend_net35.png", ImageFormat.Png); Bitmap image = Accord.Imaging.Image.Clone(Resources.blend_net35); #else // result.Save(@"C:\Projects\Accord.NET\framework\Unit Tests\Accord.Tests.Imaging\Resources\blend_net45.png", ImageFormat.Png); Bitmap image = Accord.Imaging.Image.Clone(Resources.blend_net45); #endif #pragma warning disable 618 double[,] expected = image.ToDoubleMatrix(channel: 0); double[,] actual = result.ToDoubleMatrix(channel: 0); Assert.IsTrue(Matrix.IsEqual(expected, actual, atol: 0.1)); #pragma warning restore 618 }
void SurfMatch(Bitmap img1, Bitmap img2) { Stopwatch watch = Stopwatch.StartNew(); ////主线程 //List<IPoint2> ipts1 = GetFtPntList(img1, thread);//图片1的特征点 //List<IPoint2> ipts2 = GetFtPntList(img2, thread);//图片2的特征点 //List<IPoint2>[] matches = Utils.getMatches(ipts1, ipts2); //多线程且对图像进行分割 ImageManager imgM = new ImageManager(img1, img2, thread, thread, cutSize); List <IPoint2>[] matches = imgM.GetMatchPoint(); IntPoint[] correlationPoints1 = new IntPoint[matches[0].Count]; IntPoint[] correlationPoints2 = new IntPoint[matches[1].Count]; List <IPoint2> list1 = matches[0]; int num = 0; foreach (IPoint2 kv in list1) { correlationPoints1[num] = new IntPoint { X = (int)kv.x, Y = (int)kv.y }; num++; } int num1 = 0; List <IPoint2> list2 = matches[1]; foreach (IPoint2 kv in list2) { correlationPoints2[num1] = new IntPoint { X = (int)kv.x, Y = (int)kv.y }; num1++; } if (correlationPoints1.Length > 0) { RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99); MatrixH homography = ransac.Estimate(correlationPoints1, correlationPoints2); Blend blend = new Blend(homography, img1); pictureBox.Image = blend.Apply(img2); //计算时间 long matchTime = watch.ElapsedMilliseconds; this.Invoke(new Action(delegate() { if (matchTime < 1000) { this.label1.Text = "完成!耗时 " + matchTime.ToString() + " 毫秒!"; } else { this.label1.Text = "完成!耗时 " + (matchTime / 1000.0).ToString() + " 秒!"; } this.btnSave.Visible = true; this.btnBlend.Enabled = true; })); } else { //计算时间 long matchTime = watch.ElapsedMilliseconds; this.Invoke(new Action(delegate() { this.label1.Text = "没有找到相同点!耗时 " + matchTime.ToString() + " 毫秒!"; })); } watch.Stop(); thd.Abort(); }
public void RenderChixel(ushort x, ushort y, RenderOption option, PointerAdvance outmode) { Console.Title = "[Sx Sy]: [" + area.Source.X + " " + area.Source.Y + "]"; Chixel final = new Chixel(); Chixel current; // update the internal image of the screen if ((RenderOption.Record & option) == RenderOption.Record) { memory[x, y] = new Chixel(matrix[x, y]); } // do not render if chixel is not on the screen if ((Terminal.PointerX == area.Source.X + x && Terminal.PointerY == area.Source.Y + y) || (area.Source.X + x <= Terminal.Width && area.Source.Y + y <= Terminal.Height)) { Terminal.Pointer((ushort)(area.Source.X + x), (ushort)(area.Source.Y + y)); } else { return; } // just-in-time opacity merging if (matrix[x, y].MeshColor.Opacity != Color.ARGB_MAX) { // override view level for opacity blending if ((RenderOption.Reset & option) == RenderOption.Reset) { Terminal.Colors(Chixel.DEFAULT_FG.Console, Chixel.DEFAULT_BG.Console); Terminal.Put(Chixel.EMPTY_GLYPH, (Terminal.OutputMode)outmode, GetHashCode()); } current = new Chixel(Terminal.GlyphAt((ushort)(area.Source.X + x), (ushort)(area.Source.Y + y), VIEW_LEVEL), new Color(Terminal.ForeAt((ushort)(area.Source.X + x), (ushort)(area.Source.Y + y), VIEW_LEVEL)), new Color(Terminal.BackAt((ushort)(area.Source.X + x), (ushort)(area.Source.Y + y), VIEW_LEVEL))); final.MeshColor = Blend.Apply(matrix[x, y].MeshColor, current.MeshColor, Blend.DEFAULT_POS, Blend.DEFAULT_USE_GAMMA, Blend.DEFAULT_MODE); Terminal.Colors(final.Fore.Console, final.Back.Console); Terminal.Put(final.Glyph, (Terminal.OutputMode)outmode, GetHashCode()); } else { Terminal.Colors(matrix[x, y].Fore.Console, matrix[x, y].Back.Console); Terminal.Put(matrix[x, y].Glyph, (Terminal.OutputMode)outmode, GetHashCode()); } // return the render matrix to the internal image of the screen if ((RenderOption.Restore & option) == RenderOption.Restore) { matrix[x, y] = new Chixel(memory[x, y]); } // reset the chixels in the internal image if ((RenderOption.CleanUp & option) == RenderOption.CleanUp) { memory[x, y] = new Chixel(); } // reset the chixels in the render matrix if ((RenderOption.Blank & option) == RenderOption.Blank) { matrix[x, y] = new Chixel(); } }