private void differenceToolStripMenuItem_Click(object sender, EventArgs e) { GrayscaleBT709 grayObject = new GrayscaleBT709(); pictureBox2.Image = grayObject.Apply((Bitmap)pictureBox1.Image); DifferenceEdgeDetector filter = new DifferenceEdgeDetector(); pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image); }
private void ProcessSingleImage(Bitmap source) { using (Bitmap gray = Grayscale.CommonAlgorithms.BT709.Apply(source)) using (Bitmap tmp = filter.Apply(gray)) { // Paint the result on the source to emulate ApplyInPlace. Graphics g = Graphics.FromImage(source); g.DrawImage(tmp, 0, 0); } }
public Bitmap GetNewFrame(Bitmap source) { var newFrame = Grayscale.CommonAlgorithms.RMY.Apply(source); source.Dispose(); var newFrame2 = differenceEdgeDetector.Apply(newFrame); newFrame.Dispose(); return(newFrame2); }
public void ApplyEdge(object sender, DoWorkEventArgs e) { Bitmap raw_image = null; if (edgeInputRB.Checked) { raw_image = Accord.Imaging.Filters.Grayscale.CommonAlgorithms.BT709.Apply((Bitmap)input_PB.Image.Clone()); } else if (edgeOutputRb.Checked) { raw_image = (Bitmap)outputImageBox.Image.Clone(); } if (sobelRb.Checked) { var sobel = new SobelEdgeDetector(); Bitmap raw_img = raw_image; UnmanagedImage res = sobel.Apply(UnmanagedImage.FromManagedImage(raw_img)); outputImageBox.Image.Dispose(); outputImageBox.Image = res.ToManagedImage(); } else if (prewittRb.Checked) { var prewitt = new DifferenceEdgeDetector(); Bitmap raw_img = raw_image; UnmanagedImage res = prewitt.Apply(UnmanagedImage.FromManagedImage(raw_img)); outputImageBox.Image.Dispose(); outputImageBox.Image = res.ToManagedImage(); } else if (CannyRb.Checked) { var canny = new CannyEdgeDetector(); Bitmap raw_img = raw_image; byte High = byte.Parse(textBox3.Text); byte Low = byte.Parse(textBox2.Text); double GaussSigma = double.Parse(textBox1.Text); int GaussSize = int.Parse(textBox4.Text); canny.GaussianSize = GaussSize; canny.HighThreshold = High; canny.LowThreshold = Low; canny.GaussianSigma = GaussSigma; UnmanagedImage res = canny.Apply(UnmanagedImage.FromManagedImage(raw_img)); outputImageBox.Image.Dispose(); outputImageBox.Image = res.ToManagedImage(); } }
private static void function() { string path = "filePath"; Bitmap image = (Bitmap)Bitmap.FromFile(path); BlobCounter blobCounter = new BlobCounter(); blobCounter.CoupledSizeFiltering = true; blobCounter.FilterBlobs = true; blobCounter.MinHeight = 10; blobCounter.MinWidth = 10; ContrastCorrection contrastCorrection = new ContrastCorrection(200); Bitmap newImage = contrastCorrection.Apply(image); newImage.Save("filterresult.png"); DifferenceEdgeDetector filter2 = new DifferenceEdgeDetector(); var myFile= filter2.Apply(newImage); myFile.Save("sobel.png"); }
public void DifferenceClickHandler(object sender, EventArgs e) { if (image != null) { Bitmap imx = new Bitmap(path); imx = Grayscale.CommonAlgorithms.Y.Apply(imx); DifferenceEdgeDetector gb = new DifferenceEdgeDetector(); imx = gb.Apply(imx); if (mov != null) { this.WorkItem.Workspaces[WorkspaceNames.TabWorkspace].Close(mov); } mov = this.WorkItem.SmartParts.AddNew <ImageAView>(); mov.panAndZoomPictureBox1.Image = imx; SmartPartInfo spi = new SmartPartInfo("Difference", "MyOwnDescription"); this.WorkItem.Workspaces[WorkspaceNames.TabWorkspace].Show(mov, spi); } }
private void BtnDetectEdges_Click(object sender, EventArgs e) { try { ProgressBar.Maximum = 3; ProgressBar.Value = 0; UnmanagedImage image = UnmanagedImage.FromManagedImage((Bitmap)ImgInput.Image); // 1 - grayscaling UnmanagedImage grayImage = null; if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); } ProgressBar.Value++; // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); ProgressBar.Value++; // 3 - Threshold edges Threshold thresholdFilter = new Threshold((int)NumericTrashold.Value); thresholdFilter.ApplyInPlace(edgesImage); ProgressBar.Value++; ImgOutput.Image = edgesImage.ToManagedImage(); } catch (Exception exception) { MessageBox.Show(exception.Message); } }
private void button2_Click(object sender, EventArgs e) { button2.Text = "处理中"; switch (comboBox4.SelectedIndex) { case 0: { Bitmap temp = (Bitmap)pictureBox1.Image; OilPainting filter3 = new OilPainting(10); // apply the filter filter3.ApplyInPlace(temp); this.pictureBox2.Image = ResizeBitmap(temp); break; } case 1: { Bitmap temp = (Bitmap)pictureBox1.Image; temp = new Grayscale(0.2125, 0.7154, 0.0721).Apply(temp); DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); temp = edgeDetector.Apply(temp); temp = new Threshold((int)numericUpDown1.Value).Apply(temp); //FillHoles filter2 = new FillHoles(); //filter2.MaxHoleHeight = MinHeight; //filter2.MaxHoleWidth = MaxWidth; //filter2.CoupledSizeFiltering = false; // apply the filter //temp = filter2.Apply(temp); //HorizontalRunLengthSmoothing hrls = new HorizontalRunLengthSmoothing(40); // apply the filter //hrls.ApplyInPlace(temp); /*AForge.Imaging.Filters.BlobsFiltering filter = new AForge.Imaging.Filters.BlobsFiltering(); * // 设置过滤条件(对象长、宽至少为70) * filter.CoupledSizeFiltering = true; * filter.MaxWidth = (int)numericUpDown3.Value; * filter.MaxHeight = (int)numericUpDown4.Value; * filter.MinWidth = (int)numericUpDown5.Value; * filter.MinHeight = (int)numericUpDown6.Value; * filter.ApplyInPlace(temp);*/ BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = 32; blobCounter.MinWidth = 32; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; // 4 - find all stand alone blobs blobCounter.ProcessImage(temp); Blob[] blobs = blobCounter.GetObjectsInformation(); SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); List <IntPoint> corners = null; List <IntPoint> corners2 = null; for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); // does it look like a quadrilateral ? if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); listBox1.DataSource = leftEdgePoints; listBox2.DataSource = rightEdgePoints; } } //listBox1.DataSource = corners; //listBox2.DataSource = corners2; this.pictureBox1.Image = temp; break; } case 2: { Bitmap bt2 = new Bitmap(@"D:\TCL条码\截图01.bmp"); Bitmap bt1 = new Bitmap(@"D:\TCL条码\截图03.bmp"); //Bitmap bt1 = new Bitmap(pictureBox2.Image); ExhaustiveTemplateMatching tm = new ExhaustiveTemplateMatching(0.80f); //基于一定的相似性阈值获得匹配块 TemplateMatch[] matchings = tm.ProcessImage(bt1, bt2); BitmapData data = bt1.LockBits( new Rectangle(0, 0, bt1.Width, bt1.Height), ImageLockMode.ReadWrite, bt1.PixelFormat); foreach (TemplateMatch m in matchings) { Drawing.Rectangle(data, m.Rectangle, Color.Red); } bt1.UnlockBits(data); pictureBox2.Image = bt1; break; } case 3: { Bitmap bt2 = new Bitmap(@"D:\TCL条码\Canny算法.png"); AForge.Imaging.Filters.BlobsFiltering filter = new AForge.Imaging.Filters.BlobsFiltering(); // 设置过滤条件(对象长、宽至少为70) filter.CoupledSizeFiltering = true; filter.MaxWidth = (int)numericUpDown3.Value; filter.MaxHeight = (int)numericUpDown4.Value; filter.MinWidth = (int)numericUpDown5.Value; filter.MinHeight = (int)numericUpDown6.Value; filter.ApplyInPlace(bt2); pictureBox1.Image = bt2; byte[] RESULT = BitmapToBytes(bt2); break; } case 4: { Bitmap temp = (Bitmap)pictureBox1.Image; temp = new Grayscale(0.2125, 0.7154, 0.0721).Apply(temp); AForge.Imaging.Filters.CannyEdgeDetector filter = new AForge.Imaging.Filters.CannyEdgeDetector(); filter.ApplyInPlace(temp); pictureBox2.Image = temp; break; } } button2.Text = "处理"; }
public static Bitmap FilterImage(Bitmap img, int filter) { Bitmap sourceImage = img; sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format32bppArgb); IFilter myFilter; Bitmap filteredImage = sourceImage; if (filter == Filters.filters["Greyscale"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); filteredImage = sourceImage; } else if (filter == Filters.filters["Sepia"]) { myFilter = new Sepia(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Invert"]) { sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Invert(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["RotateChannel"]) { myFilter = new RotateChannels(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Threshold"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new Threshold(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["FloydFilter"]) { FloydSteinbergColorDithering myReduction = new FloydSteinbergColorDithering(); filteredImage = myReduction.Apply(sourceImage); } else if (filter == Filters.filters["OrderedDithering"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new OrderedDithering(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Sharpen"]) { myFilter = new Sharpen(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["DifferenceEdgeDetector"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new DifferenceEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["HomogenityEdgeDetector"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new HomogenityEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Sobel"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new SobelEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Jitter"]) { myFilter = new Jitter(); //Needs Expand filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["OilPainting"]) { sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new OilPainting(); //Needs Expand filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["TextureFiltering"]) { sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Texturer(new TextileTexture(), 1.0, 0.8); //Needs Expand filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Median"]) { sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Median(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Mean"]) { myFilter = new Mean(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Blur"]) { myFilter = new GaussianBlur(); filteredImage = myFilter.Apply(sourceImage); } //Console.Write(filteredImage.PixelFormat.ToString()); //Console.Write(sourceImage.PixelFormat.ToString()); filteredImage = ImageUtil.convert(filteredImage, System.Drawing.Imaging.PixelFormat.Format32bppArgb); return(filteredImage); }
/// <summary> /// This is the method that actually does the work. /// </summary> /// <param name="DA">The DA object can be used to retrieve data from input parameters and /// to store data in output parameters.</param> protected override void SolveInstance(IGH_DataAccess DA) { Bitmap sourceImage = null; DA.GetData(0, ref sourceImage); string filter = ""; DA.GetData(1, ref filter); sourceImage = ImageUtilities.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format32bppArgb); IFilter myFilter; Bitmap filteredImage = sourceImage; //Grayscale.CommonAlgorithms.Y.Apply switch (filter) { case "Greyscale": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); filteredImage = sourceImage; break; case "Sepia": Console.Write("Applying: " + filter); myFilter = new Sepia(); filteredImage = myFilter.Apply(sourceImage); break; case "Invert": Console.Write("Applying: " + filter); sourceImage = ImageUtilities.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Invert(); filteredImage = myFilter.Apply(sourceImage); break; case "RotateChannel": Console.Write("Applying: " + filter); myFilter = new RotateChannels(); filteredImage = myFilter.Apply(sourceImage); break; case "Threshold": //Need Extended Version Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new Threshold(); filteredImage = myFilter.Apply(sourceImage); break; case "FloydFilter": Console.Write("Applying: " + filter); //sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); //myFilter = new FloydSteinbergColorDithering(); FloydSteinbergColorDithering myReduction = new FloydSteinbergColorDithering(); filteredImage = myReduction.Apply(sourceImage); //filteredImage = myFilter.Apply(sourceImage); break; case "OrderedDithering": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new OrderedDithering(); filteredImage = myFilter.Apply(sourceImage); break; case "Sharpen": Console.Write("Applying: " + filter); myFilter = new Sharpen(); filteredImage = myFilter.Apply(sourceImage); break; case "DifferenceEdgeDetector": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new DifferenceEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); break; case "HomogenityEdgeDetector": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new HomogenityEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); break; case "Sobel": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new SobelEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); break; case "Jitter": Console.Write("Applying: " + filter); myFilter = new Jitter(); //Needs Expand filteredImage = myFilter.Apply(sourceImage); break; case "OilPainting": Console.Write("Applying: " + filter); myFilter = new OilPainting(); //Needs Expand filteredImage = myFilter.Apply(sourceImage); break; case "TextureFiltering": Console.Write("Applying: " + filter); sourceImage = ImageUtilities.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Texturer(new TextileTexture(), 1.0, 0.8); //Needs Expand filteredImage = myFilter.Apply(sourceImage); break; case "Median": Console.Write("Applying: " + filter); myFilter = new Median(); filteredImage = myFilter.Apply(sourceImage); break; case "Mean": Console.Write("Applying: " + filter); myFilter = new Mean(); filteredImage = myFilter.Apply(sourceImage); break; case "Blur": //Need Extended Version Console.Write("Applying: " + filter); myFilter = new GaussianBlur(); filteredImage = myFilter.Apply(sourceImage); break; default: Console.Write("No Filter"); break; } Console.Write(filteredImage.PixelFormat.ToString()); Console.Write(sourceImage.PixelFormat.ToString()); filteredImage = ImageUtilities.convert(filteredImage, System.Drawing.Imaging.PixelFormat.Format32bppArgb); DA.SetData(0, filteredImage); }
public override Bitmap ApplyFilter(List <Bitmap> bitmaps) { DifferenceEdgeDetector filter = new DifferenceEdgeDetector(); return(filter.Apply(bitmaps[0].ConvertPixelFormat(PixelFormat.Format8bppIndexed))); }
/// <summary> /// Search for glyphs in the specified image and recognize them. /// </summary> /// /// <param name="image">Image to search glyphs in.</param> /// /// <returns>Return a list of found glyphs.</returns> /// /// <remarks><para>The method does processing of the specified image and searches for glyphs in it of /// the specified <see cref="GlyphSize">size</see>. In the case if <see cref="GlyphDatabase">glyphs' database</see> /// is set, it tries to find a matching glyph in it for each found glyph in the image. If matching is found, /// then <see cref="ExtractedGlyphData.RecognizedGlyph">RecognizedGlyph</see> and /// <see cref="ExtractedGlyphData.RecognizedQuadrilateral">RecognizedQuadrilateral</see> /// properties of <see cref="ExtractedGlyphData"/> are set correspondingly.</para></remarks> /// /// <exception cref="UnsupportedImageFormatException">Pixel format of the specified image is not supported. /// It must be 8 bpp indexed or 24/32 bpp color image.</exception> /// public List <ExtractedGlyphData> FindGlyphs(UnmanagedImage image) { List <ExtractedGlyphData> extractedGlyphs = new List <ExtractedGlyphData>( ); if ((image.PixelFormat != PixelFormat.Format8bppIndexed) && (!Grayscale.CommonAlgorithms.BT709.FormatTranslations.ContainsKey(image.PixelFormat))) { throw new UnsupportedImageFormatException("Pixel format of the specified image is not supported."); } // 1 - grayscaling UnmanagedImage grayImage = null; if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); } // 2 - Edge detection UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); // 3 - Threshold edges thresholdFilter.ApplyInPlace(edgesImage); // 4 - Blob Counter blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation( ); // 5 - check each blob for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; // does it look like a quadrilateral ? if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values from outside of the shape and from inside float diff = CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); // check average difference, which tells how much outside is lighter than inside on the average if (diff > 20) { // perform glyph recognition ExtractedGlyphData glyphData = RecognizeGlyph(grayImage, corners); if (glyphData != null) { extractedGlyphs.Add(glyphData); if (extractedGlyphs.Count >= maxNumberOfGlyphsToSearch) { break; } } } } } // dispose resources if (image.PixelFormat != PixelFormat.Format8bppIndexed) { grayImage.Dispose( ); } edgesImage.Dispose( ); return(extractedGlyphs); }
public void Detect(ref Bitmap image) { List <List <IntPoint> > markers = new List <List <IntPoint> >(); Bitmap tmp = image; BitmapData bitmapData = image.LockBits(new Rectangle(0, 0, image.Width, image.Height), ImageLockMode.ReadOnly, image.PixelFormat); UnmanagedImage unmanagedImage = new UnmanagedImage(bitmapData); UnmanagedImage grayImage = UnmanagedImage.Create(unmanagedImage.Width, unmanagedImage.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(unmanagedImage, grayImage); DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); image.UnlockBits(bitmapData); if (this.edgeImage.Checked) { tmp = edgesImage.ToManagedImage().Clone(new Rectangle(0, 0, edgesImage.Width, edgesImage.Height), PixelFormat.Format24bppRgb); } Threshold thresholdFilter = new Threshold(this.binThreshold); thresholdFilter.ApplyInPlace(edgesImage); if (this.thresholdEdgeImage.Checked) { tmp = edgesImage.ToManagedImage().Clone(new Rectangle(0, 0, edgesImage.Width, edgesImage.Height), PixelFormat.Format24bppRgb); } this.blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation(); for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; if (this.isSquare(edgePoints, out corners)) { List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); float diff = calculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); if (diff > 50) { markers.Add(corners); } } } foreach (List <IntPoint> marker in markers) { Color markerColor; IntPoint markerOrientation = this.markerOrientation(image, marker, out markerColor); IntPoint center = marker[2] - marker[0]; center.X = marker[0].X + Convert.ToInt32(center.X * 0.5); center.Y = marker[0].Y + Convert.ToInt32(center.Y * 0.5); if (this.drawMarkersOnVideo.Checked) { if ((this.edgeImage.Checked) || (this.thresholdEdgeImage.Checked)) { this.drawMarker(tmp, marker, markerOrientation, markerColor); } else { this.drawMarker(image, marker, markerOrientation, markerColor); } } ColorDiscriminator discriminator = new ColorDiscriminator(); discriminator.Color = markerColor; LocationSourceManager.Instance.updateLocationSource(discriminator, center); } image = tmp; }
//eventhandler if new frame is ready private void video_NewFrame(object sender, NewFrameEventArgs eventArgs) { Bitmap img = (Bitmap)eventArgs.Frame.Clone(); if (counterImg == 10) { double delaisImage = DateTime.Now.TimeOfDay.TotalMilliseconds - _mill_last_pic; _mill_last_pic = DateTime.Now.TimeOfDay.TotalMilliseconds; double FPS = 1 / delaisImage * 1000 * counterImg + 1; // txt_nb_fps.Text = FPS.ToString() ; //txt_resolution.Text = "" + videoSource.DesiredFrameSize.Height + " * " + videoSource.DesiredFrameSize.Width; string resolutionTxt = "" + img.Width + " * " + img.Height; if (this != null && (!this.IsDisposed)) { try { this.Invoke((ProcessNewFPS)UpdateNewFPS, FPS); this.Invoke((ProcessNewResolution)UpdateNewResolution, resolutionTxt); } catch (ObjectDisposedException) // La fenetre était en train de se fermée { } } counterImg = 0; } counterImg++; //Rectangle rect = new Rectangle(0,0,eventArgs.Frame.Width,eventArgs.Frame.Height); // 1 - grayscaling UnmanagedImage image = UnmanagedImage.FromManagedImage(img); UnmanagedImage imageRouge = image.Clone(); UnmanagedImage imageBleu = image.Clone(); UnmanagedImage imageVert = image.Clone(); UnmanagedImage grayImage = null; Color colorPoint = image.GetPixel(posX, posY); this.Invoke((ProcessLalbelText)ChangeLabelText, new object[] { colorPoint.GetHue().ToString(), lbl_hue }); this.Invoke((ProcessLalbelText)ChangeLabelText, new object[] { colorPoint.GetBrightness().ToString(), lbl_lum }); this.Invoke((ProcessLalbelText)ChangeLabelText, new object[] { colorPoint.GetSaturation().ToString(), lbl_sat }); if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); } // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); // 3 - Threshold edges Threshold thresholdFilterGlyph = new Threshold((short)numericUpDown3.Value); Threshold thresholdFilterCouleur = new Threshold((short)numericUpDown2.Value); thresholdFilterGlyph.ApplyInPlace(edgesImage); /* * * Bitmap image = (Bitmap)eventArgs.Frame.Clone(); * * //Reference : http://www.aforgenet.com/framework/docs/html/743311a9-6c27-972d-39d2-ddc383dd1dd4.htm * * private HSLFiltering filter = new HSLFiltering(); * // set color ranges to keep red-orange * filter.Hue = new IntRange(0, 20); * filter.Saturation = new DoubleRange(0.5, 1); * * // apply the filter * filter.ApplyInPlace(image); * */ /*RGB colorRed = new RGB(215, 30, 30); * RGB colorBlue = new RGB(10, 10, 215); * RGB colorVert = new RGB(30, 215, 30); * RGB colorBlanc = new RGB(225, 219, 160);*/ HSLFiltering filter = new HSLFiltering(); // create filter // EuclideanColorFiltering filter = new EuclideanColorFiltering(); //filter.Radius = (short)numericUpDown1.Value; filter.Hue = new IntRange(40, 140); filter.Saturation = new Range(0.5f, 1.0f); filter.Luminance = new Range(0.2f, 1.0f); //filter.CenterColor = colorRed; filter.ApplyInPlace(imageRouge); filter.Hue = new IntRange(100, 180); //filter.CenterColor = colorBlanc; filter.ApplyInPlace(imageVert); filter.Hue = new IntRange(0, 40); //filter.CenterColor = colorBlue; filter.ApplyInPlace(imageBleu); Grayscale filterRouge = new Grayscale(0.800, 0.200, 0.200); Grayscale filterVert = new Grayscale(0.200, 0.800, 0.200); Grayscale filterBleu = new Grayscale(0.200, 0.200, 0.800); UnmanagedImage grayRougeImage = filterRouge.Apply(imageRouge); UnmanagedImage grayBleuImage = filterBleu.Apply(imageBleu); UnmanagedImage edgesRougeImage = edgeDetector.Apply(grayRougeImage); UnmanagedImage edgesBleuImage = edgeDetector.Apply(grayBleuImage); thresholdFilterCouleur.ApplyInPlace(edgesRougeImage); thresholdFilterCouleur.ApplyInPlace(edgesBleuImage); // All the image processing is done here... // pictureBox1.Image = image.ToManagedImage(); if (this != null && (!this.IsDisposed)) // Si on est pas en train de suppirmer la fenetre { try { this.Invoke((ProcessNewImage)DisplayNewImage, new object[] { image, pic_ImageNormal }); this.Invoke((ProcessNewImage)DisplayNewImage, new object[] { edgesImage, pic_ImageEdge }); this.Invoke((ProcessNewImage)DisplayNewImage, new object[] { imageRouge, pic_ImageRouge }); this.Invoke((ProcessNewImage)DisplayNewImage, new object[] { imageBleu, pic_ImageBleu }); this.Invoke((ProcessNewImage)DisplayNewImage, new object[] { imageVert, pic_ImageVert }); } catch (ObjectDisposedException) // La fenetre était en train de se fermée { } } /*pictureBox2.Image = grayImage.ToManagedImage(); * pictureBox3.Image = edgesImage.ToManagedImage(); * pictureBox4.Image = imageRouge.ToManagedImage();*/ }
private void AugmentedMethod2() { UnmanagedImage image = UnmanagedImage.FromManagedImage(new Bitmap(picSource.Image)); // 1 - grayscaling UnmanagedImage grayImage = null; if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); } // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); // 3 - Threshold edges Threshold thresholdFilter = new Threshold(40); thresholdFilter.ApplyInPlace(edgesImage); // create and configure blob counter BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = 32; blobCounter.MinWidth = 32; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; // 4 - find all stand alone blobs blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation(); SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); int counter = 0; // 5 - check each blob for (int i = 0, n = blobs.Length; i < n; i++) { // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values from outside of the // shape and from inside float diff = CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); // check average difference, which tells how much outside is lighter than // inside on the average if (diff >= 50) { ++counter; } txtOut.AppendText(diff + ","); /*List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); * List<IntPoint> corners = null; * * // does it look like a quadrilateral ? * if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) * { ++counter; * }*/ } txtOut.AppendText(Environment.NewLine); lblCount.Text = counter.ToString(); picResult.Image = edgesImage.ToManagedImage(); }
private void scan_code() { List <IntPoint> TempCorners = new List <IntPoint>(); // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); // 3 - Threshold edges Threshold thresholdFilter = new Threshold(40); thresholdFilter.ApplyInPlace(edgesImage); // create and configure blob counter BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = 32; blobCounter.MinWidth = 32; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; // 4 - find all stand alone blobs blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation(); // 5 - check each blob for (int i = 0, n = blobs.Length; i < n; i++) { // ... List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; // does it look like a quadrilateral ? SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { TempCorners.AddRange(corners); // ... // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values fro+m outside of the // shape and from inside float diff = CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); // check average difference, which tells how much outside is lighter than // inside on the average if (diff > 20) { QuadrilateralTransformation quadrilateralTransformation = new QuadrilateralTransformation(corners, 100, 100); UnmanagedImage glyphImage = quadrilateralTransformation.Apply(grayImage); //// otsu thresholding hier fehler OtsuThreshold otsuThresholdFilter = new OtsuThreshold(); otsuThresholdFilter.ApplyInPlace(glyphImage); image = glyphImage; //// recognize raw glyph float confidence; //code geändert byte[,] LeftUpMarker = new byte[5, 5]; LeftUpMarker[0, 0] = 0; LeftUpMarker[0, 1] = 0; LeftUpMarker[0, 2] = 0; LeftUpMarker[0, 3] = 0; LeftUpMarker[0, 4] = 0; LeftUpMarker[1, 0] = 0; LeftUpMarker[1, 1] = 0; LeftUpMarker[1, 2] = 1; LeftUpMarker[1, 3] = 0; LeftUpMarker[1, 4] = 0; LeftUpMarker[2, 0] = 0; LeftUpMarker[2, 1] = 1; LeftUpMarker[2, 2] = 0; LeftUpMarker[2, 3] = 1; LeftUpMarker[2, 4] = 0; LeftUpMarker[3, 0] = 0; LeftUpMarker[3, 1] = 0; LeftUpMarker[3, 2] = 1; LeftUpMarker[3, 3] = 0; LeftUpMarker[3, 4] = 0; LeftUpMarker[4, 0] = 0; LeftUpMarker[4, 1] = 0; LeftUpMarker[4, 2] = 0; LeftUpMarker[4, 3] = 0; LeftUpMarker[4, 4] = 0; byte[,] RightUpMarker = new byte[5, 5]; RightUpMarker[0, 0] = 0; RightUpMarker[0, 1] = 0; RightUpMarker[0, 2] = 0; RightUpMarker[0, 3] = 0; RightUpMarker[0, 4] = 0; RightUpMarker[1, 0] = 0; RightUpMarker[1, 1] = 1; RightUpMarker[1, 2] = 0; RightUpMarker[1, 3] = 1; RightUpMarker[1, 4] = 0; RightUpMarker[2, 0] = 0; RightUpMarker[2, 1] = 0; RightUpMarker[2, 2] = 0; RightUpMarker[2, 3] = 0; RightUpMarker[2, 4] = 0; RightUpMarker[3, 0] = 0; RightUpMarker[3, 1] = 1; RightUpMarker[3, 2] = 0; RightUpMarker[3, 3] = 1; RightUpMarker[3, 4] = 0; RightUpMarker[4, 0] = 0; RightUpMarker[4, 1] = 0; RightUpMarker[4, 2] = 0; RightUpMarker[4, 3] = 0; RightUpMarker[4, 4] = 0; byte[,] LeftDownMarker = new byte[5, 5]; LeftDownMarker[0, 0] = 0; LeftDownMarker[0, 1] = 0; LeftDownMarker[0, 2] = 0; LeftDownMarker[0, 3] = 0; LeftDownMarker[0, 4] = 0; LeftDownMarker[1, 0] = 0; LeftDownMarker[1, 1] = 0; LeftDownMarker[1, 2] = 1; LeftDownMarker[1, 3] = 0; LeftDownMarker[1, 4] = 0; LeftDownMarker[2, 0] = 0; LeftDownMarker[2, 1] = 1; LeftDownMarker[2, 2] = 1; LeftDownMarker[2, 3] = 1; LeftDownMarker[2, 4] = 0; LeftDownMarker[3, 0] = 0; LeftDownMarker[3, 1] = 0; LeftDownMarker[3, 2] = 1; LeftDownMarker[3, 3] = 0; LeftDownMarker[3, 4] = 0; LeftDownMarker[4, 0] = 0; LeftDownMarker[4, 1] = 0; LeftDownMarker[4, 2] = 0; LeftDownMarker[4, 3] = 0; LeftDownMarker[4, 4] = 0; byte[,] ReightDownMarker = new byte[5, 5]; ReightDownMarker[0, 0] = 0; ReightDownMarker[0, 1] = 0; ReightDownMarker[0, 2] = 0; ReightDownMarker[0, 3] = 0; ReightDownMarker[0, 4] = 0; ReightDownMarker[1, 0] = 0; ReightDownMarker[1, 1] = 1; ReightDownMarker[1, 2] = 1; ReightDownMarker[1, 3] = 1; ReightDownMarker[1, 4] = 0; ReightDownMarker[2, 0] = 0; ReightDownMarker[2, 1] = 1; ReightDownMarker[2, 2] = 0; ReightDownMarker[2, 3] = 1; ReightDownMarker[2, 4] = 0; ReightDownMarker[3, 0] = 0; ReightDownMarker[3, 1] = 1; ReightDownMarker[3, 2] = 1; ReightDownMarker[3, 3] = 1; ReightDownMarker[3, 4] = 0; ReightDownMarker[4, 0] = 0; ReightDownMarker[4, 1] = 0; ReightDownMarker[4, 2] = 0; ReightDownMarker[4, 3] = 0; ReightDownMarker[4, 4] = 0; byte[,] glyphValues = Recognize(glyphImage, new System.Drawing.Rectangle(0, 0, glyphImage.Width, glyphImage.Height), out confidence); Boolean bool_LeftUpMarkerMarker = true; for (int l = 0; l < 5; l++) { for (int m = 0; m < 5; m++) { if (LeftUpMarker[l, m] != glyphValues[l, m]) { bool_LeftUpMarkerMarker = false; break; } } } if (bool_LeftUpMarkerMarker) { Debug.Log("Marker erkannt"); } Boolean bool_RightUpMarker = true; for (int l = 0; l < 5; l++) { for (int m = 0; m < 5; m++) { if (RightUpMarker[l, m] != glyphValues[l, m]) { bool_RightUpMarker = false; break; } } } if (bool_RightUpMarker) { Debug.Log("Marker erkannt"); } Boolean bool_LeftDownMarker = true; for (int l = 0; l < 5; l++) { for (int m = 0; m < 5; m++) { if (LeftDownMarker[l, m] != glyphValues[l, m]) { bool_LeftDownMarker = false; break; } } } if (bool_LeftDownMarker) { Debug.Log("Marker erkannt"); } Boolean bool_ReightDownMarker = true; for (int l = 0; l < 5; l++) { for (int m = 0; m < 5; m++) { if (ReightDownMarker[l, m] != glyphValues[l, m]) { bool_ReightDownMarker = false; break; } } } if (bool_ReightDownMarker) { Debug.Log("Marker erkannt"); } } } } if (TempCorners.Count > 0) { Corners = TempCorners; } }
// Process specified image trying to recognize counter's image public void Process(Bitmap image, IImageProcessingLog log) { log.AddMessage("Image size: " + image.Width + " x " + image.Height); // 1 - Grayscale Bitmap grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image); log.AddImage("Grayscale", grayImage); // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector( ); Bitmap edges = edgeDetector.Apply(grayImage); log.AddImage("Edges", edges); // 3 - Threshold edges Threshold thresholdFilter = new Threshold(40); thresholdFilter.ApplyInPlace(edges); log.AddImage("Thresholded Edges", edges); // 4 - Blob Counter BlobCounter blobCounter = new BlobCounter( ); blobCounter.MinHeight = 32; blobCounter.MinWidth = 32; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; blobCounter.ProcessImage(edges); Blob[] blobs = blobCounter.GetObjectsInformation( ); // create unmanaged copy of source image, so we could draw on it UnmanagedImage imageData = UnmanagedImage.FromManagedImage(image); // Get unmanaged copy of grayscale image, so we could access it's pixel values UnmanagedImage grayUI = UnmanagedImage.FromManagedImage(grayImage); // list of found dark/black quadrilaterals surrounded by white area List <List <IntPoint> > foundObjects = new List <List <IntPoint> >( ); // shape checker for checking quadrilaterals SimpleShapeChecker shapeChecker = new SimpleShapeChecker( ); // 5 - check each blob for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; // does it look like a quadrilateral ? if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { // do some more checks to filter so unacceptable shapes // if ( CheckIfShapeIsAcceptable( corners ) ) { log.AddMessage("Blob size: " + blobs[i].Rectangle.Width + " x " + blobs[i].Rectangle.Height); // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values from outside of the shape and from inside float diff = this.CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayUI); log.AddMessage("Avg Diff: " + diff); // check average difference, which tells how much outside is lighter than inside on the average if (diff > 20) { Drawing.Polygon(imageData, corners, Color.FromArgb(255, 255, 0, 0)); // add the object to the list of interesting objects for further processing foundObjects.Add(corners); } } } } log.AddImage("Potential glyps", imageData.ToManagedImage()); int counter = 1; // further processing of each potential glyph foreach (List <IntPoint> corners in foundObjects) { log.AddMessage("Glyph #" + counter); log.AddMessage(string.Format("Corners: ({0}), ({1}), ({2}), ({3})", corners[0], corners[1], corners[2], corners[3])); // 6 - do quadrilateral transformation QuadrilateralTransformation quadrilateralTransformation = new QuadrilateralTransformation(corners, 250, 250); Bitmap transformed = quadrilateralTransformation.Apply(grayImage); log.AddImage("Transformed #" + counter, transformed); // 7 - otsu thresholding OtsuThreshold otsuThresholdFilter = new OtsuThreshold( ); Bitmap transformedOtsu = otsuThresholdFilter.Apply(transformed); log.AddImage("Transformed Otsu #" + counter, transformedOtsu); int glyphSize = 5; SquareBinaryGlyphRecognizer gr = new SquareBinaryGlyphRecognizer(glyphSize); bool[,] glyphValues = gr.Recognize(ref transformedOtsu, new Rectangle(0, 0, 250, 250)); log.AddImage("Glyph lines #" + counter, transformedOtsu); // output recognize glyph to log log.AddMessage(string.Format("glyph: {0:F2}%", gr.confidence * 100)); for (int i = 0; i < glyphSize; i++) { StringBuilder sb = new StringBuilder(" "); for (int j = 0; j < glyphSize; j++) { sb.Append((glyphValues[i, j]) ? "1 " : "0 "); } log.AddMessage(sb.ToString( )); } counter++; } }
/// <summary> /// Search for glyphs in the specified image and recognize them. /// </summary> /// /// <param name="image">Image to search glyphs in.</param> /// /// <returns>Return a list of found glyphs.</returns> /// /// <remarks><para>The method does processing of the specified image and searches for glyphs in it of /// the specified <see cref="GlyphSize">size</see>. In the case if <see cref="GlyphDatabase">glyphs' database</see> /// is set, it tries to find a matching glyph in it for each found glyph in the image. If matching is found, /// then <see cref="ExtractedGlyphData.RecognizedGlyph">RecognizedGlyph</see> and /// <see cref="ExtractedGlyphData.RecognizedQuadrilateral">RecognizedQuadrilateral</see> /// properties of <see cref="ExtractedGlyphData"/> are set correspondingly.</para></remarks> /// /// <exception cref="UnsupportedImageFormatException">Pixel format of the specified image is not supported. /// It must be 8 bpp indexed or 24/32 bpp color image.</exception> /// public List <ExtractedGlyphData> FindGlyphs(UnmanagedImage image) { List <ExtractedGlyphData> extractedGlyphs = new List <ExtractedGlyphData>( ); if ((image.PixelFormat != PixelFormat.Format8bppIndexed) && (!Grayscale.CommonAlgorithms.BT709.FormatTranslations.ContainsKey(image.PixelFormat))) { throw new UnsupportedImageFormatException("Pixel format of the specified image is not supported."); } // 1 - grayscaling UnmanagedImage grayImage = null; if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); } // 2 - Edge detection UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); // 3 - Threshold edges thresholdFilter.ApplyInPlace(edgesImage); // 4 - Blob Counter blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation( ); // --- HLE // create copy of source image, so we could draw on it Bitmap imageCopy = image.ToManagedImage(); System.Drawing.Imaging.BitmapData imageData = imageCopy.LockBits(new Rectangle(0, 0, image.Width, image.Height), System.Drawing.Imaging.ImageLockMode.ReadWrite, imageCopy.PixelFormat); // --- HLE // 5 - check each blob int counter = 1; // Counter -> HLE for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; // does it look like a quadrilateral ? if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values from outside of the shape and from inside float diff = CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); // check average difference, which tells how much outside is lighter than inside on the average if (diff > 20) { // --- HLE // - draw on image Drawing.Polygon(imageData, corners, Color.Red); counter++; //counter -> HLE // --- HLE // perform glyph recognition ExtractedGlyphData glyphData = RecognizeGlyph(grayImage, corners, counter); if (glyphData != null) { extractedGlyphs.Add(glyphData); if (extractedGlyphs.Count >= maxNumberOfGlyphsToSearch) { break; } } } } } // --- HLE //Save out image for checking //imageCopy.Save("C:\\Users\\heve\\Documents\\_Work_PointCloud\\AAG\\ImageProcessing\\FoundObjects_dll.png", System.Drawing.Imaging.ImageFormat.Png); // --- HLE // dispose resources if (image.PixelFormat != PixelFormat.Format8bppIndexed) { grayImage.Dispose( ); } edgesImage.Dispose( ); return(extractedGlyphs); }
public void RunEdgeDetection(EdgeDetectionOptions options) { if (HasRunEdgeDetection) { return; } using (Bitmap newBitmap = LoadBitmap()) { Rectangle rect = new Rectangle(0, 0, newBitmap.Width, newBitmap.Height); using (UnmanagedImage image = new UnmanagedImage(newBitmap.LockBits(rect, ImageLockMode.ReadWrite, newBitmap.PixelFormat))) { using (UnmanagedImage grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed)) { Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); Threshold threshold = new Threshold(options.Threshold); using (UnmanagedImage edgesImage = EDGE_DETECTOR.Apply(grayImage)) { Threshold thresholdFilter = new Threshold(options.Threshold); thresholdFilter.ApplyInPlace(edgesImage); if (options.ShowEdgesImage) { ImageForm.ShowImage("Enhanced Edges Image", edgesImage); } BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = MINIMUM_BLOB_SIZE; blobCounter.MinWidth = MINIMUM_BLOB_SIZE; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation(); Corners.Clear(); foreach (Blob blob in blobs) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blob); List <IntPoint> corners = null; if (SHAPE_CHECKER.IsQuadrilateral(edgePoints, out corners)) { List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdgePoints, out rightEdgePoints); Corners.Add(corners); if (options.ShowBlobImages) { QuadrilateralTransformation quadTransformation = new QuadrilateralTransformation(corners, 200, 200); using (UnmanagedImage quadImage = quadTransformation.Apply(image)) { ImageForm.ShowImage("Quad Image", quadImage); } } } } } } } } }