private void convexHullBlov(Bitmap saus) { // http://www.aforgenet.com/framework/features/blobs_processing.html Bitmap olahData = (Bitmap)saus.Clone(); BlobCounter blCount = new BlobCounter(); blCount.ProcessImage(saus); Blob[] blobs = blCount.GetObjectsInformation(); GrahamConvexHull hullFinder = new GrahamConvexHull(); BitmapData data = olahData.LockBits(new Rectangle(0, 0, olahData.Width, olahData.Height), ImageLockMode.ReadWrite, olahData.PixelFormat); foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints; List <IntPoint> edgePoints = new List <IntPoint>(); // get blob's edge points blCount.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull List <IntPoint> hull = hullFinder.FindHull(edgePoints); Drawing.Polygon(data, hull, Color.Red); } olahData.UnlockBits(data); pictureBox2.Image = olahData; }
private List <IntPoint> FindCorners(Bitmap bitmap) { List <IntPoint> corners = new List <IntPoint>(); using (var clone = bitmap.Clone() as Bitmap) { new EuclideanColorFiltering(new AForge.Imaging.RGB((byte)Red, (byte)Green, (byte)Blue), Radius).ApplyInPlace(clone); using (var grayscaledBitmap = Grayscale.CommonAlgorithms.BT709.Apply(clone)) { //new Threshold(Threshold).ApplyInPlace(grayscaledBitmap); if (Inverted) { new Invert().ApplyInPlace(grayscaledBitmap); } BlobCounter blobCounter = new BlobCounter(); blobCounter.FilterBlobs = true; blobCounter.MinWidth = 50; blobCounter.MinHeight = 50; blobCounter.ObjectsOrder = ObjectsOrder.Size; blobCounter.ProcessImage(grayscaledBitmap); Blob[] blobs = blobCounter.GetObjectsInformation(); GrahamConvexHull hullFinder = new GrahamConvexHull(); for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> leftPoints, rightPoints; List <IntPoint> edgePoints = new List <IntPoint>(); blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); corners = hullFinder.FindHull(edgePoints); } } } return(corners); }
private List <IntPoint> getHull(Blob blob, Bitmap grayImage) { GrahamConvexHull hullFinder = new GrahamConvexHull(); List <IntPoint> leftPoints, rightPoints, edgePoints = new List <IntPoint>(); BlobCounter blobCounter = getBlobCounter(grayImage); // get blob's edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull return(hullFinder.FindHull(edgePoints)); }
private void convexHull() // Convex Hull { // Declare image Bitmap bmp = new Bitmap(pictureBox1.Image); // process image with blob counter BlobCounter blobCounter = new BlobCounter(); blobCounter.ProcessImage(bmp); Blob[] blobs = blobCounter.GetObjectsInformation(); // create convex hull searching algorithm GrahamConvexHull hullFinder = new GrahamConvexHull(); // lock image to draw on it BitmapData data = bmp.LockBits( new Rectangle(0, 0, bmp.Width, bmp.Height), ImageLockMode.ReadWrite, bmp.PixelFormat); // process each blob foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints, edgePoints; edgePoints = new List <IntPoint>(); // get blob's edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull List <IntPoint> hull = hullFinder.FindHull(edgePoints); Drawing.Polygon(data, hull, Color.Red); } bmp.UnlockBits(data); pictureBox1.Image = bmp; }
private void extractConvexHull() { GrahamConvexHull hullFinder = new GrahamConvexHull(); // process each blob hulls = new List <Polygon>(); foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints, edgePoints; edgePoints = new List <IntPoint>(); // get blob's edge points BlobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull List <IntPoint> hull = hullFinder.FindHull(edgePoints); hulls.Add(new Polygon(hull)); } }
public static List <Shapes.Component> FindComponents(VideoProcessing vp, Bitmap bitmap) { // Locating objects BlobCounter blobCounter = new BlobCounter(); blobCounter.FilterBlobs = true; blobCounter.MinHeight = 8; blobCounter.MinWidth = 8; blobCounter.ProcessImage(bitmap); Blob[] blobs = blobCounter.GetObjectsInformation(); // create convex hull searching algorithm GrahamConvexHull hullFinder = new GrahamConvexHull(); ClosePointsMergingOptimizer optimizer1 = new ClosePointsMergingOptimizer(); FlatAnglesOptimizer optimizer2 = new FlatAnglesOptimizer(); List <Shapes.Component> Components = new List <Shapes.Component>(); // process each blob foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints, edgePoints = new List <IntPoint>(); if ((blob.Rectangle.Height > 400) && (blob.Rectangle.Width > 600)) { break; // The whole image could be a blob, discard that } // get blob's edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull List <IntPoint> Outline = hullFinder.FindHull(edgePoints); optimizer1.MaxDistanceToMerge = 4; optimizer2.MaxAngleToKeep = 170F; Outline = optimizer2.OptimizeShape(Outline); Outline = optimizer1.OptimizeShape(Outline); // find Longest line segment float dist = 0; LineSegment Longest = new LineSegment(Outline[0], Outline[1]); LineSegment line; dist = Longest.Length; int LongestInd = 0; for (int i = 1; i < Outline.Count; i++) { if (i != Outline.Count - 1) { line = new LineSegment(Outline[i], Outline[i + 1]); } else { // last iteration if (Outline[i] == Outline[0]) { break; } line = new LineSegment(Outline[i], Outline[0]); } if (line.Length > dist) { Longest = line; dist = line.Length; LongestInd = i; } } // Get the center point of it Point LongestCenter = new Point(); LongestCenter.X = (float)Math.Round((Longest.End.X - Longest.Start.X) / 2.0 + Longest.Start.X); LongestCenter.Y = (float)Math.Round((Longest.End.Y - Longest.Start.Y) / 2.0 + Longest.Start.Y); Point NormalStart = new Point(); Point NormalEnd = new Point(); // Find normal: // start= longest.start rotated +90deg relative to center // end= longest.end rotated -90deg and relative to center // If you rotate point (px, py) around point (ox, oy) by angle theta you'll get: // p'x = cos(theta) * (px-ox) - sin(theta) * (py-oy) + ox // p'y = sin(theta) * (px-ox) + cos(theta) * (py-oy) + oy // cos90 = 0, sin90= 1 => // p'x= -(py-oy) + ox= oy-py+ox, p'y= (px-ox)+ oy NormalStart.X = LongestCenter.Y - Longest.Start.Y + LongestCenter.X; NormalStart.Y = (Longest.Start.X - LongestCenter.X) + LongestCenter.Y; // cos-90=0, sin-90= -1 => // p'x= (py-oy) + ox // p'y= -(px-ox)+oy= ox-px+oy NormalEnd.X = (Longest.Start.Y - LongestCenter.Y) + LongestCenter.X; NormalEnd.Y = LongestCenter.X - Longest.Start.X + LongestCenter.Y; // Make line out of the points Line Normal = Line.FromPoints(NormalStart, NormalEnd); // Find the furthest intersection to the normal (skip the Longest) Point InterSection = new Point(); Point Furthest = new Point(); bool FurhtestAssinged = false; LineSegment seg; dist = 0; for (int i = 0; i < Outline.Count; i++) { if (i == LongestInd) { continue; } if (i != Outline.Count - 1) { seg = new LineSegment(Outline[i], Outline[i + 1]); } else { // last iteration if (Outline[i] == Outline[0]) { break; } seg = new LineSegment(Outline[i], Outline[0]); } if (seg.GetIntersectionWith(Normal) == null) { continue; } InterSection = (Point)seg.GetIntersectionWith(Normal); if (InterSection.DistanceTo(LongestCenter) > dist) { Furthest = InterSection; FurhtestAssinged = true; dist = InterSection.DistanceTo(LongestCenter); } } // Check, if there is a edge point that is close to the normal even further Point fPoint = new Point(); for (int i = 0; i < Outline.Count; i++) { fPoint.X = Outline[i].X; fPoint.Y = Outline[i].Y; if (Normal.DistanceToPoint(fPoint) < 1.5) { if (fPoint.DistanceTo(LongestCenter) > dist) { Furthest = fPoint; FurhtestAssinged = true; dist = fPoint.DistanceTo(LongestCenter); } } } Point ComponentCenter = new Point(); if (FurhtestAssinged) { // Find the midpoint of LongestCenter and Furthest: This is the centerpoint of component ComponentCenter.X = (float)Math.Round((LongestCenter.X - Furthest.X) / 2.0 + Furthest.X); ComponentCenter.Y = (float)Math.Round((LongestCenter.Y - Furthest.Y) / 2.0 + Furthest.Y); // Alignment is the angle of longest double Alignment; if (Math.Abs(Longest.End.X - Longest.Start.X) < 0.001) { Alignment = 0; } else { Alignment = Math.Atan((Longest.End.Y - Longest.Start.Y) / (Longest.End.X - Longest.Start.X)); Alignment = Alignment * 180.0 / Math.PI; // in deg. } Components.Add(new Shapes.Component(ComponentCenter, Alignment, Outline, Longest, NormalStart, NormalEnd)); } } SetVideoProcessing(Components, vp); return(Components); }
private void scan_code() { List <IntPoint> TempCorners = new List <IntPoint>(); // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); // 3 - Threshold edges Threshold thresholdFilter = new Threshold(40); thresholdFilter.ApplyInPlace(edgesImage); // create and configure blob counter BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = 32; blobCounter.MinWidth = 32; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; // 4 - find all stand alone blobs blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation(); // 5 - check each blob for (int i = 0, n = blobs.Length; i < n; i++) { // ... List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; // does it look like a quadrilateral ? SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { TempCorners.AddRange(corners); // ... // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values fro+m outside of the // shape and from inside float diff = CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); // check average difference, which tells how much outside is lighter than // inside on the average if (diff > 20) { QuadrilateralTransformation quadrilateralTransformation = new QuadrilateralTransformation(corners, 100, 100); UnmanagedImage glyphImage = quadrilateralTransformation.Apply(grayImage); //// otsu thresholding hier fehler OtsuThreshold otsuThresholdFilter = new OtsuThreshold(); otsuThresholdFilter.ApplyInPlace(glyphImage); image = glyphImage; //// recognize raw glyph float confidence; //code geändert byte[,] LeftUpMarker = new byte[5, 5]; LeftUpMarker[0, 0] = 0; LeftUpMarker[0, 1] = 0; LeftUpMarker[0, 2] = 0; LeftUpMarker[0, 3] = 0; LeftUpMarker[0, 4] = 0; LeftUpMarker[1, 0] = 0; LeftUpMarker[1, 1] = 0; LeftUpMarker[1, 2] = 1; LeftUpMarker[1, 3] = 0; LeftUpMarker[1, 4] = 0; LeftUpMarker[2, 0] = 0; LeftUpMarker[2, 1] = 1; LeftUpMarker[2, 2] = 0; LeftUpMarker[2, 3] = 1; LeftUpMarker[2, 4] = 0; LeftUpMarker[3, 0] = 0; LeftUpMarker[3, 1] = 0; LeftUpMarker[3, 2] = 1; LeftUpMarker[3, 3] = 0; LeftUpMarker[3, 4] = 0; LeftUpMarker[4, 0] = 0; LeftUpMarker[4, 1] = 0; LeftUpMarker[4, 2] = 0; LeftUpMarker[4, 3] = 0; LeftUpMarker[4, 4] = 0; byte[,] RightUpMarker = new byte[5, 5]; RightUpMarker[0, 0] = 0; RightUpMarker[0, 1] = 0; RightUpMarker[0, 2] = 0; RightUpMarker[0, 3] = 0; RightUpMarker[0, 4] = 0; RightUpMarker[1, 0] = 0; RightUpMarker[1, 1] = 1; RightUpMarker[1, 2] = 0; RightUpMarker[1, 3] = 1; RightUpMarker[1, 4] = 0; RightUpMarker[2, 0] = 0; RightUpMarker[2, 1] = 0; RightUpMarker[2, 2] = 0; RightUpMarker[2, 3] = 0; RightUpMarker[2, 4] = 0; RightUpMarker[3, 0] = 0; RightUpMarker[3, 1] = 1; RightUpMarker[3, 2] = 0; RightUpMarker[3, 3] = 1; RightUpMarker[3, 4] = 0; RightUpMarker[4, 0] = 0; RightUpMarker[4, 1] = 0; RightUpMarker[4, 2] = 0; RightUpMarker[4, 3] = 0; RightUpMarker[4, 4] = 0; byte[,] LeftDownMarker = new byte[5, 5]; LeftDownMarker[0, 0] = 0; LeftDownMarker[0, 1] = 0; LeftDownMarker[0, 2] = 0; LeftDownMarker[0, 3] = 0; LeftDownMarker[0, 4] = 0; LeftDownMarker[1, 0] = 0; LeftDownMarker[1, 1] = 0; LeftDownMarker[1, 2] = 1; LeftDownMarker[1, 3] = 0; LeftDownMarker[1, 4] = 0; LeftDownMarker[2, 0] = 0; LeftDownMarker[2, 1] = 1; LeftDownMarker[2, 2] = 1; LeftDownMarker[2, 3] = 1; LeftDownMarker[2, 4] = 0; LeftDownMarker[3, 0] = 0; LeftDownMarker[3, 1] = 0; LeftDownMarker[3, 2] = 1; LeftDownMarker[3, 3] = 0; LeftDownMarker[3, 4] = 0; LeftDownMarker[4, 0] = 0; LeftDownMarker[4, 1] = 0; LeftDownMarker[4, 2] = 0; LeftDownMarker[4, 3] = 0; LeftDownMarker[4, 4] = 0; byte[,] ReightDownMarker = new byte[5, 5]; ReightDownMarker[0, 0] = 0; ReightDownMarker[0, 1] = 0; ReightDownMarker[0, 2] = 0; ReightDownMarker[0, 3] = 0; ReightDownMarker[0, 4] = 0; ReightDownMarker[1, 0] = 0; ReightDownMarker[1, 1] = 1; ReightDownMarker[1, 2] = 1; ReightDownMarker[1, 3] = 1; ReightDownMarker[1, 4] = 0; ReightDownMarker[2, 0] = 0; ReightDownMarker[2, 1] = 1; ReightDownMarker[2, 2] = 0; ReightDownMarker[2, 3] = 1; ReightDownMarker[2, 4] = 0; ReightDownMarker[3, 0] = 0; ReightDownMarker[3, 1] = 1; ReightDownMarker[3, 2] = 1; ReightDownMarker[3, 3] = 1; ReightDownMarker[3, 4] = 0; ReightDownMarker[4, 0] = 0; ReightDownMarker[4, 1] = 0; ReightDownMarker[4, 2] = 0; ReightDownMarker[4, 3] = 0; ReightDownMarker[4, 4] = 0; byte[,] glyphValues = Recognize(glyphImage, new System.Drawing.Rectangle(0, 0, glyphImage.Width, glyphImage.Height), out confidence); Boolean bool_LeftUpMarkerMarker = true; for (int l = 0; l < 5; l++) { for (int m = 0; m < 5; m++) { if (LeftUpMarker[l, m] != glyphValues[l, m]) { bool_LeftUpMarkerMarker = false; break; } } } if (bool_LeftUpMarkerMarker) { Debug.Log("Marker erkannt"); } Boolean bool_RightUpMarker = true; for (int l = 0; l < 5; l++) { for (int m = 0; m < 5; m++) { if (RightUpMarker[l, m] != glyphValues[l, m]) { bool_RightUpMarker = false; break; } } } if (bool_RightUpMarker) { Debug.Log("Marker erkannt"); } Boolean bool_LeftDownMarker = true; for (int l = 0; l < 5; l++) { for (int m = 0; m < 5; m++) { if (LeftDownMarker[l, m] != glyphValues[l, m]) { bool_LeftDownMarker = false; break; } } } if (bool_LeftDownMarker) { Debug.Log("Marker erkannt"); } Boolean bool_ReightDownMarker = true; for (int l = 0; l < 5; l++) { for (int m = 0; m < 5; m++) { if (ReightDownMarker[l, m] != glyphValues[l, m]) { bool_ReightDownMarker = false; break; } } } if (bool_ReightDownMarker) { Debug.Log("Marker erkannt"); } } } } if (TempCorners.Count > 0) { Corners = TempCorners; } }
/// <summary> /// Search for glyphs in the specified image and recognize them. /// </summary> /// /// <param name="image">Image to search glyphs in.</param> /// /// <returns>Return a list of found glyphs.</returns> /// /// <remarks><para>The method does processing of the specified image and searches for glyphs in it of /// the specified <see cref="GlyphSize">size</see>. In the case if <see cref="GlyphDatabase">glyphs' database</see> /// is set, it tries to find a matching glyph in it for each found glyph in the image. If matching is found, /// then <see cref="ExtractedGlyphData.RecognizedGlyph">RecognizedGlyph</see> and /// <see cref="ExtractedGlyphData.RecognizedQuadrilateral">RecognizedQuadrilateral</see> /// properties of <see cref="ExtractedGlyphData"/> are set correspondingly.</para></remarks> /// /// <exception cref="UnsupportedImageFormatException">Pixel format of the specified image is not supported. /// It must be 8 bpp indexed or 24/32 bpp color image.</exception> /// public List <ExtractedGlyphData> FindGlyphs(UnmanagedImage image) { List <ExtractedGlyphData> extractedGlyphs = new List <ExtractedGlyphData>( ); if ((image.PixelFormat != PixelFormat.Format8bppIndexed) && (!Grayscale.CommonAlgorithms.BT709.FormatTranslations.ContainsKey(image.PixelFormat))) { throw new UnsupportedImageFormatException("Pixel format of the specified image is not supported."); } // 1 - grayscaling UnmanagedImage grayImage = null; if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); } // 2 - Edge detection UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); // 3 - Threshold edges thresholdFilter.ApplyInPlace(edgesImage); // 4 - Blob Counter blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation( ); // 5 - check each blob for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; // does it look like a quadrilateral ? if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values from outside of the shape and from inside float diff = CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); // check average difference, which tells how much outside is lighter than inside on the average if (diff > 20) { // perform glyph recognition ExtractedGlyphData glyphData = RecognizeGlyph(grayImage, corners); if (glyphData != null) { extractedGlyphs.Add(glyphData); if (extractedGlyphs.Count >= maxNumberOfGlyphsToSearch) { break; } } } } } // dispose resources if (image.PixelFormat != PixelFormat.Format8bppIndexed) { grayImage.Dispose( ); } edgesImage.Dispose( ); return(extractedGlyphs); }
/// <summary> /// calculate the blob's position based on the grids position /// </summary> /// <param name="b"></param> /// <param name="offset"></param> private bool ProcessBlobs(BlobCounter bc) { var blobs = new List <Blob>(bc.GetObjectsInformation()); blobs.RemoveAll(x => !Grid.Contains(x.CenterOfGravity)); blobs.RemoveAll(x => x.Fullness < 0.5); //if (blobs.Count < 4) //{ // Debug.WriteLine("too few blobs"); // return false; //} //if (blobs.Count > 5) //{ // Debug.WriteLine("too many blobs"); // return false; //} var refs = new Dictionary <AForge.Point, Point>(); var parts = (int)Math.Pow(2, _calibrationStep - 2); var w = _vs.Width / parts; var h = _vs.Height / parts; Point?missing = null; for (int i = 0; i <= parts; i++) { for (int j = 0; j <= parts; j++) { if (i % 2 == 1) { if (j % 2 == 1) { //center of a rectangle var tl = Grid.GetRefPoint((i - 1) * w, (j - 1) * h, 1); var tr = Grid.GetRefPoint((i + 1) * w, (j - 1) * h, 1); var bl = Grid.GetRefPoint((i - 1) * w, (j + 1) * h, 1); var br = Grid.GetRefPoint((i + 1) * w, (j + 1) * h, 1); refs.Add(new AForge.Point((tl.X + tr.X + bl.X + br.X) / 4.0f, (tl.Y + tr.Y + bl.Y + br.Y) / 4.0f), new Point(i, j)); } else { // middle of a horizontal outline var l = Grid.GetRefPoint((i - 1) * w, (j) * h, 1); var r = Grid.GetRefPoint((i + 1) * w, (j) * h, 1); refs.Add(new AForge.Point((l.X + r.X) / 2.0f, (l.Y + r.Y) / 2.0f), new Point(i, j)); } } else if (j % 2 == 1) { // middle of a horizontal outline var t = Grid.GetRefPoint((i) * w, (j - 1) * h, 1); var b = Grid.GetRefPoint((i) * w, (j + 1) * h, 1); refs.Add(new AForge.Point((t.X + b.X) / 2.0f, (t.Y + b.Y) / 2.0f), new Point(i, j)); } } } var clusters = KMeansCluster.KMeansClustering(blobs.Select(x => x.CenterOfGravity).ToList(), refs.Keys.ToList(), 5); foreach (var cluster in clusters) { if (cluster.Points.Count(refs.ContainsKey) > 1) { Debug.WriteLine("Multiple Centers in a Cluster"); return(false); } if (!cluster.Points.Any(refs.ContainsKey)) { Debug.WriteLine("No Center in a Cluster"); return(false); } var center = cluster.Points.First(refs.ContainsKey); var pos = refs[center]; if (cluster.Points.Count == 1) { Debug.WriteLine("Point missing in cluster"); if (missing.HasValue) { return(false); } missing = pos; continue; } if (cluster.Points.Count > 2) { Debug.WriteLine("Multiple Points in a cluster"); } cluster.Points.Remove(center); #if DEBUG foreach (var point in cluster.Points) { using (var g = Graphics.FromImage(actImg)) { g.DrawString(pos.X + "," + pos.Y, new Font(FontFamily.GenericSansSerif, 8.0f), new SolidBrush(Color.Red), point.X, point.Y); g.Flush(); //Debug.WriteLine("wrote to image"); } } #endif var p = cluster.Points.First(); if (pos.X == 0) { //left var b = blobs.First(x => x.CenterOfGravity == p); List <IntPoint> rightEdge; List <IntPoint> leftEdge; bc.GetBlobsLeftAndRightEdges(b, out leftEdge, out rightEdge); //var ep = leftEdge.First(x => x.Y == (int)Math.Round(p.Y)); //p = ep; //p.Y = (float) Math.Round(leftEdge.Average(x => x.Y)); p = leftEdge.First(x => x.Y == (int)Math.Round(leftEdge.Average(y => y.Y))); } else if (pos.X == parts) { //right var b = blobs.First(x => x.CenterOfGravity == p); List <IntPoint> rightEdge; List <IntPoint> leftEdge; bc.GetBlobsLeftAndRightEdges(b, out leftEdge, out rightEdge); //var ep = rightEdge.First(x => x.Y == (int)Math.Round(p.Y)); //p.X = ep.X; p = rightEdge.First(x => x.Y == (int)Math.Round(rightEdge.Average(y => y.Y))); } if (pos.Y == 0) { //top var b = blobs.First(x => x.CenterOfGravity == p); List <IntPoint> bottomEdge; List <IntPoint> topEdge; bc.GetBlobsTopAndBottomEdges(b, out topEdge, out bottomEdge); //var ep = topEdge.First(x => x.X == (int)Math.Round(p.X)); //p = ep; //p.X = (float) topEdge.Average(x => x.X); p = topEdge.First(x => x.X == (int)Math.Round(topEdge.Average(y => y.X))); } else if (pos.Y == parts) { //bottom var b = blobs.First(x => x.CenterOfGravity == p); List <IntPoint> bottomEdge; List <IntPoint> topEdge; bc.GetBlobsTopAndBottomEdges(b, out topEdge, out bottomEdge); //var ep = bottomEdge.First(x => x.X == (int)Math.Round(p.X)); //p.Y = ep.Y; p = bottomEdge.First(x => x.X == (int)Math.Round(bottomEdge.Average(y => y.X))); } var ip = p.Round(); Grid.AddPoint(pos.X * w, pos.Y * h, ip.X, ip.Y); } return(true); }
/// <summary> /// Search for glyphs in the specified image and recognize them. /// </summary> /// /// <param name="image">Image to search glyphs in.</param> /// /// <returns>Return a list of found glyphs.</returns> /// /// <remarks><para>The method does processing of the specified image and searches for glyphs in it of /// the specified <see cref="GlyphSize">size</see>. In the case if <see cref="GlyphDatabase">glyphs' database</see> /// is set, it tries to find a matching glyph in it for each found glyph in the image. If matching is found, /// then <see cref="ExtractedGlyphData.RecognizedGlyph">RecognizedGlyph</see> and /// <see cref="ExtractedGlyphData.RecognizedQuadrilateral">RecognizedQuadrilateral</see> /// properties of <see cref="ExtractedGlyphData"/> are set correspondingly.</para></remarks> /// /// <exception cref="UnsupportedImageFormatException">Pixel format of the specified image is not supported. /// It must be 8 bpp indexed or 24/32 bpp color image.</exception> /// public List <ExtractedGlyphData> FindGlyphs(UnmanagedImage image) { List <ExtractedGlyphData> extractedGlyphs = new List <ExtractedGlyphData>( ); if ((image.PixelFormat != PixelFormat.Format8bppIndexed) && (!Grayscale.CommonAlgorithms.BT709.FormatTranslations.ContainsKey(image.PixelFormat))) { throw new UnsupportedImageFormatException("Pixel format of the specified image is not supported."); } // 1 - grayscaling UnmanagedImage grayImage = null; if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); } // 2 - Edge detection UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); // 3 - Threshold edges thresholdFilter.ApplyInPlace(edgesImage); // 4 - Blob Counter blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation( ); // --- HLE // create copy of source image, so we could draw on it Bitmap imageCopy = image.ToManagedImage(); System.Drawing.Imaging.BitmapData imageData = imageCopy.LockBits(new Rectangle(0, 0, image.Width, image.Height), System.Drawing.Imaging.ImageLockMode.ReadWrite, imageCopy.PixelFormat); // --- HLE // 5 - check each blob int counter = 1; // Counter -> HLE for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; // does it look like a quadrilateral ? if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values from outside of the shape and from inside float diff = CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); // check average difference, which tells how much outside is lighter than inside on the average if (diff > 20) { // --- HLE // - draw on image Drawing.Polygon(imageData, corners, Color.Red); counter++; //counter -> HLE // --- HLE // perform glyph recognition ExtractedGlyphData glyphData = RecognizeGlyph(grayImage, corners, counter); if (glyphData != null) { extractedGlyphs.Add(glyphData); if (extractedGlyphs.Count >= maxNumberOfGlyphsToSearch) { break; } } } } } // --- HLE //Save out image for checking //imageCopy.Save("C:\\Users\\heve\\Documents\\_Work_PointCloud\\AAG\\ImageProcessing\\FoundObjects_dll.png", System.Drawing.Imaging.ImageFormat.Png); // --- HLE // dispose resources if (image.PixelFormat != PixelFormat.Format8bppIndexed) { grayImage.Dispose( ); } edgesImage.Dispose( ); return(extractedGlyphs); }
public double[] detectionGlyph(bool CalculTailleTerrain) { bool Trouve = false; double[] ratio = new double[2] { 0, 0 }; SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = 23; blobCounter.MinWidth = 23; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; // 4 - find all stand alone blobs blobCounter.ProcessImage(imgContour); Blob[] blobs = blobCounter.GetObjectsInformation(); // 5 - check each blob for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; // Test de la forme selectionnée if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { // Détection des points de coutour List <IntPoint> leftEdgePoints, rightEdgePoints, topEdgePoints, bottomEdgePoints; Line Horizontale = Line.FromPoints(new IntPoint(0, 0), new IntPoint(10, 0)); blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); blobCounter.GetBlobsTopAndBottomEdges(blobs[i], out topEdgePoints, out bottomEdgePoints); // calculate average difference between pixel values from outside of the // shape and from inside float diff = CalculateAverageEdgesBrightnessDifference(leftEdgePoints, rightEdgePoints, imgNB); // check average difference, which tells how much outside is lighter than // inside on the average if (diff > 20) { // Transformation de l'image reçu en un carré pour la reconnaissance QuadrilateralTransformation quadrilateralTransformation = new QuadrilateralTransformation(corners, 60, 60); UnmanagedImage glyphImage = quadrilateralTransformation.Apply(imgNB); // Filtre de contraste OtsuThreshold otsuThresholdFilter = new OtsuThreshold(); otsuThresholdFilter.ApplyInPlace(glyphImage); imgContour = glyphImage; // Reconnaissance du Glyph Glyph Gl = new Glyph(glyphImage, GlyphSize); Gl.ReconnaissanceGlyph(corners, imgNB); // Si le Glyph est valide if (Gl.getIdentifiant() > 0) { if (AutAffichage[0]) { // Coloration des contours des zones détectées UnImgReel.SetPixels(leftEdgePoints, Color.Red); UnImgReel.SetPixels(rightEdgePoints, Color.Red); UnImgReel.SetPixels(topEdgePoints, Color.Red); UnImgReel.SetPixels(bottomEdgePoints, Color.Red); } // Détection du milieu Line line = Line.FromPoints(corners[0], corners[2]); Line line2 = Line.FromPoints(corners[1], corners[3]); IntPoint intersection = (IntPoint)line.GetIntersectionWith(line2); if (AutAffichage[1]) { dessinePoint(intersection, UnImgReel, 4, Color.Yellow); } // Calcul de la rotation Line ComparasionAngle = Line.FromPoints(corners[0], corners[1]); Double rotation = (int)ComparasionAngle.GetAngleBetweenLines(Horizontale); rotation += 90 * Gl.getNbRotation(); Gl.rotation = 360 - rotation; rotation *= (Math.PI / 180.0); // Calcul d'un point en bout de pince float Taille = corners[0].DistanceTo(corners[1]); float taille = (Taille / BibliotequeGlyph.Biblioteque[Gl.getPosition()].taille) * BibliotequeGlyph.Biblioteque[Gl.getPosition()].DistancePince; int x = -(int)(System.Math.Sin(rotation) * taille); int y = -(int)(System.Math.Cos(rotation) * taille); x += (int)intersection.X; y += (int)intersection.Y; Gl.Position = new int[2] { x, y }; if (AutAffichage[2]) { dessinePoint(new IntPoint(x, y), UnImgReel, 4, Color.Cyan); } imgContour = Gl.getImage(); addGlyph(Gl); if (CalculTailleTerrain == true && Trouve == false) { Trouve = true; int tailleglyph = BibliotequeGlyph.Biblioteque[Gl.getPosition()].taille; // Pythagore pour detection taille Rectangle a = blobs[i].Rectangle; double angle = -Gl.rotation + 180; List <IntPoint> coins = new List <IntPoint>(); coins.Add(new IntPoint(100, 100)); coins.Add(new IntPoint(100, 100 + tailleglyph)); coins.Add(new IntPoint(100 + tailleglyph, 100 + tailleglyph)); coins.Add(new IntPoint(100 + tailleglyph, 100)); IntPoint Centre = new IntPoint((coins[2].X + coins[0].X) / 2, (coins[2].Y + coins[0].Y) / 2); int radius = (int)(0.5 * Math.Sqrt(coins[0].DistanceTo(coins[1]) * coins[0].DistanceTo(coins[1]) + coins[1].DistanceTo(coins[2]) * coins[1].DistanceTo(coins[2]))); double alpha = Math.Atan2(coins[0].DistanceTo(coins[1]), coins[1].DistanceTo(coins[2])) * (180 / Math.PI); double ang = 0; for (i = 0; i < 4; i++) { IntPoint tmp = coins[i]; switch (i) { case 0: ang = alpha - 180 + angle; break; case 1: ang = +angle - alpha; break; case 2: ang = +angle + alpha; break; case 3: ang = -alpha + 180 + angle; break; } ang *= (Math.PI / 180); tmp.X = (int)(Centre.X + radius * Math.Cos(ang)); tmp.Y = (int)(Centre.Y + radius * Math.Sin(ang)); coins[i] = tmp; } Rectangle r = new Rectangle(min(coins[0].X, coins[1].X, coins[2].X, coins[3].X), min(coins[0].Y, coins[1].Y, coins[2].Y, coins[3].Y), max(coins[0].X, coins[1].X, coins[2].X, coins[3].X) - min(coins[0].X, coins[1].X, coins[2].X, coins[3].X), max(coins[0].Y, coins[1].Y, coins[2].Y, coins[3].Y) - min(coins[0].Y, coins[1].Y, coins[2].Y, coins[3].Y)); ratio[0] = ((double)r.Width / (double)a.Width) * 1.48; ratio[1] = ((double)r.Height / (double)a.Height) * 1.48; } } } } } if (Trouve == false || ratio[0] == 0 || ratio[0] == 1 || ratio[1] == 0 || ratio[1] == 1) { return(null); } ratio[0] *= 0.7; ratio[1] *= 0.7; return(ratio); }
private void btn_search_Click(object sender, EventArgs e) { //bright = 0; // value = 10; openFileDialog1.Title = "영상파일 열기"; openFileDialog1.Filter = "All Files(*.*)|*.*| Bitmap File(*.bmp)|*.bmp|GIF File(*.gif)|*.gif|JPEG File(*.jpg)|*.jpg|PNG file(*.png)|*.png|TIFF(*.tif)|*.tif"; if (openFileDialog1.ShowDialog() == DialogResult.OK) { openstrFilename = openFileDialog1.FileName; image = System.Drawing.Image.FromFile(openstrFilename); myBitmap = new Bitmap(image); this.xray_preview.Image = myBitmap;// 원 이미지를 담는 PictureBox 개체 } //임시화일삭제 string filePath = @"Image\temp.jpg"; FileInfo file = new FileInfo(filePath); if (file.Exists) { file.Delete(); } /* *손바닥 외곽선 추출 */ Bitmap CroppedImage = myBitmap.Clone(new System.Drawing.Rectangle(0, 100, myBitmap.Width, (myBitmap.Height - 100)), myBitmap.PixelFormat); //손바닥 int width = 600; int height = 600; Size resize = new Size(width, height); resizeImage = new Bitmap(CroppedImage, resize); gsImage = Grayscale.CommonAlgorithms.BT709.Apply(resizeImage); filter = new CannyEdgeDetector(); edge = filter.Apply(gsImage); //외곽선 블러링 Blur hfilter = new Blur(); // apply the filter hfilter.ApplyInPlace(edge); /////////////////////////// // process image with blob counter BlobCounter hblobCounter = new BlobCounter(); hblobCounter.ProcessImage(edge); Blob[] hblobs = hblobCounter.GetObjectsInformation(); // create convex hull searching algorithm GrahamConvexHull hhullFinder = new GrahamConvexHull(); // lock image to draw on it BitmapData hdata = edge.LockBits(new Rectangle(0, 0, edge.Width, edge.Height), ImageLockMode.ReadWrite, edge.PixelFormat); // process each blob List <IntPoint> hhull = new List <IntPoint> { }; List <IntPoint> hedgePoints = new List <IntPoint> { }; int hblobcount = 0; int hminX = 0, hmaxX = 700, hminY = 0, hmaxY = 700; foreach (Blob blob in hblobs) { List <IntPoint> leftPoints, rightPoints; // get blob's edge points hblobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); hedgePoints.AddRange(leftPoints); hedgePoints.AddRange(rightPoints); // blob's convex hull hhull = hhullFinder.FindHull(hedgePoints); foreach (IntPoint hulls in hhull) { // convexhull 최외곽선 추출 if (hblobcount == 0) { hminX = hulls.X; hmaxX = hulls.X; hminY = hulls.Y; hmaxY = hulls.Y; } if (hminX > hulls.X) { hminX = hulls.X; } else if (hmaxX < hulls.X) { hmaxX = hulls.X; } if (hminY > hulls.Y) { hminY = hulls.Y; } else if (hmaxY < hulls.Y) { hmaxY = hulls.Y; } hblobcount++; } Drawing.Polygon(hdata, hhull, Color.White); } edge = edge.Clone(new Rectangle(hminX, hminY, hmaxX - hminX, hmaxY - hminY), myBitmap.PixelFormat); this.xray_preview.Image = edge; /////////////////////////////////// //수골 및 지골 분할 및 특징 추출 //손목 : 요골 및 척골 (2곳) //손바닥 : 제1,3,5지 중수골 (3곳) //손가락 : 제1,3,5지 기절골 및 말절골 (6곳) //손가락 : 제3. 5지 중수골 (2곳) /////////////////////////////// //요골 추출 및 인지 알고리즘 CroppedImage1 = myBitmap.Clone(new System.Drawing.Rectangle(270, 620, 250, 180), myBitmap.PixelFormat); //1. 요골 //이미지 사이즈 정규화 요골 크롭 이미지 250 X 180 -> 125 X 125 CroppedImage1 = CroppedImage1.Clone(new System.Drawing.Rectangle(10, 0, 230, 150), myBitmap.PixelFormat); width = 125; height = 125; resize = new Size(width, height); resizeImage = new Bitmap(CroppedImage1, resize); //전처리 및 특징 추출 루틴 //DetectCorners(CroppedImage1); gsImage = Grayscale.CommonAlgorithms.BT709.Apply(resizeImage); filter = new CannyEdgeDetector(); edge = filter.Apply(gsImage); //외곽선 블러링 Blur Bfilter = new Blur(); // apply the filter Bfilter.ApplyInPlace(edge); /////////////////////////// // process image with blob counter BlobCounter blobCounter = new BlobCounter(); blobCounter.ProcessImage(edge); Blob[] blobs = blobCounter.GetObjectsInformation(); // create convex hull searching algorithm GrahamConvexHull hullFinder = new GrahamConvexHull(); // lock image to draw on it BitmapData data = edge.LockBits(new Rectangle(0, 0, edge.Width, edge.Height), ImageLockMode.ReadWrite, edge.PixelFormat); // process each blob List <IntPoint> hull = new List <IntPoint> { }; List <IntPoint> edgePoints = new List <IntPoint> { }; int blobcount = 0; int minX = 0, maxX = 125, minY = 0, maxY = 125; foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints; // get blob's edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull hull = hullFinder.FindHull(edgePoints); foreach (IntPoint hulls in hull) // convexhull 최외곽선 추출 { if (blobcount == 0) { minX = hulls.X; maxX = hulls.X; minY = hulls.Y; maxY = hulls.Y; } if (minX > hulls.X) { minX = hulls.X; } else if (maxX < hulls.X) { maxX = hulls.X; } if (minY > hulls.Y) { minY = hulls.Y; } else if (maxY < hulls.Y) { maxY = hulls.Y; } blobcount++; } Drawing.Polygon(data, hull, Color.White); } edge = resizeImage.Clone(new System.Drawing.Rectangle(minX, minY, maxX - minX, maxY - minY), myBitmap.PixelFormat); CroppedImage1.Save(@"Image\temp.jpg", ImageFormat.Jpeg); this.pB_radius.Image = edge; ///////////////////////////////////////////////////////////// //////////////////////// //척골 ////////////// CroppedImage2 = myBitmap.Clone(new System.Drawing.Rectangle(133, 620, 200, 180), myBitmap.PixelFormat); //2. 척골 //이미지 사이즈 정규화 요골 크롭 이미지 250 X 180 -> 125 X 125 CroppedImage2 = CroppedImage2.Clone(new System.Drawing.Rectangle(0, 20, 200, 150), myBitmap.PixelFormat); width = 125; height = 125; resize = new Size(width, height); resizeImage = new Bitmap(CroppedImage2, resize); //전처리 및 특징 추출 루틴 //DetectCorners(CroppedImage1); gsImage = Grayscale.CommonAlgorithms.BT709.Apply(resizeImage); filter = new CannyEdgeDetector(); edge = filter.Apply(gsImage); //외곽선 블러링 Bfilter = new Blur(); // apply the filter Bfilter.ApplyInPlace(edge); /////////////////////////// // process image with blob counter blobCounter = new BlobCounter(); blobCounter.ProcessImage(edge); blobs = blobCounter.GetObjectsInformation(); // create convex hull searching algorithm hullFinder = new GrahamConvexHull(); // lock image to draw on it BitmapData data1 = edge.LockBits(new Rectangle(0, 0, edge.Width, edge.Height), ImageLockMode.ReadWrite, edge.PixelFormat); // process each blob hull = new List <IntPoint> { }; edgePoints = new List <IntPoint> { }; blobcount = 0; minX = 0; maxX = 125; minY = 0; maxY = 125; foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints; // get blob's edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull hull = hullFinder.FindHull(edgePoints); foreach (IntPoint hulls in hull) { // convexhull 최외곽선 추출 if (blobcount == 0) { minX = hulls.X; maxX = hulls.X; minY = hulls.Y; maxY = hulls.Y; } if (minX > hulls.X) { minX = hulls.X; } else if (maxX < hulls.X) { maxX = hulls.X; } if (minY > hulls.Y) { minY = hulls.Y; } else if (maxY < hulls.Y) { maxY = hulls.Y; } blobcount++; } Drawing.Polygon(data1, hull, Color.White); } Bitmap edge1 = resizeImage.Clone(new System.Drawing.Rectangle(minX, minY, (maxX - minX), (maxY - minY)), myBitmap.PixelFormat); this.pB_ulna.Image = edge1; /////////////////////////////////////// CroppedImage3 = myBitmap.Clone(new System.Drawing.Rectangle(390, 500, 180, 180), myBitmap.PixelFormat); //3. 제1지 중수골 resizeImage = new Bitmap(CroppedImage3, resize); this.pB_Met1.Image = CroppedImage3; CroppedImage4 = myBitmap.Clone(new System.Drawing.Rectangle(266, 266, 180, 180), myBitmap.PixelFormat); //4.제3지 기절골/중절골 resizeImage = new Bitmap(CroppedImage4, resize); this.pB_Met3.Image = CroppedImage4; CroppedImage5 = myBitmap.Clone(new System.Drawing.Rectangle(75, 335, 180, 180), myBitmap.PixelFormat); //5. 제5지 중수골/기절골 resizeImage = new Bitmap(CroppedImage5, resize); this.pB_Met5.Image = CroppedImage5; CroppedImage6 = myBitmap.Clone(new System.Drawing.Rectangle(534, 410, 180, 180), myBitmap.PixelFormat); //6. 제1지 기절골 resizeImage = new Bitmap(CroppedImage6, resize); this.pB_Pph1.Image = CroppedImage6; CroppedImage7 = myBitmap.Clone(new System.Drawing.Rectangle(266, 266, 180, 180), myBitmap.PixelFormat); //7. 제3지 기절골/중절골 resizeImage = new Bitmap(CroppedImage7, resize); this.pB_Pph3.Image = CroppedImage7; CroppedImage8 = myBitmap.Clone(new System.Drawing.Rectangle(75, 335, 180, 180), myBitmap.PixelFormat); //8. 제5지 중수골/기절골 resizeImage = new Bitmap(CroppedImage8, resize); this.pB_Pph5.Image = CroppedImage8; CroppedImage9 = myBitmap.Clone(new System.Drawing.Rectangle(260, 110, 180, 180), myBitmap.PixelFormat); //9. 제3자 중절골 resizeImage = new Bitmap(CroppedImage9, resize); this.pB_Mph3.Image = CroppedImage9; CroppedImage10 = myBitmap.Clone(new System.Drawing.Rectangle(0, 250, 180, 180), myBitmap.PixelFormat); //10. 제5지 중절골 resizeImage = new Bitmap(CroppedImage10, resize); this.pB_Mph5.Image = CroppedImage10; CroppedImage11 = myBitmap.Clone(new System.Drawing.Rectangle(620, 320, 180, 180), myBitmap.PixelFormat); //11. 제1지 말절골 resizeImage = new Bitmap(CroppedImage11, resize); this.pB_Dph1.Image = CroppedImage11; CroppedImage12 = myBitmap.Clone(new System.Drawing.Rectangle(260, 0, 180, 180), myBitmap.PixelFormat); //12. 제3지 말절골 resizeImage = new Bitmap(CroppedImage12, resize); this.pB_Dph3.Image = CroppedImage12; CroppedImage13 = myBitmap.Clone(new System.Drawing.Rectangle(0, 133, 180, 180), myBitmap.PixelFormat); //13. 제5지 말절골 resizeImage = new Bitmap(CroppedImage13, resize); this.pB_Dph5.Image = CroppedImage13; //edge.UnlockBits(data1); //edge.UnlockBits(data); }
private void AugmentedMethod2() { UnmanagedImage image = UnmanagedImage.FromManagedImage(new Bitmap(picSource.Image)); // 1 - grayscaling UnmanagedImage grayImage = null; if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); } // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); // 3 - Threshold edges Threshold thresholdFilter = new Threshold(40); thresholdFilter.ApplyInPlace(edgesImage); // create and configure blob counter BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = 32; blobCounter.MinWidth = 32; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; // 4 - find all stand alone blobs blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation(); SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); int counter = 0; // 5 - check each blob for (int i = 0, n = blobs.Length; i < n; i++) { // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values from outside of the // shape and from inside float diff = CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); // check average difference, which tells how much outside is lighter than // inside on the average if (diff >= 50) { ++counter; } txtOut.AppendText(diff + ","); /*List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); * List<IntPoint> corners = null; * * // does it look like a quadrilateral ? * if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) * { ++counter; * }*/ } txtOut.AppendText(Environment.NewLine); lblCount.Text = counter.ToString(); picResult.Image = edgesImage.ToManagedImage(); }
public int SetImage(Bitmap image) { leftEdges.Clear(); rightEdges.Clear(); topEdges.Clear(); bottomEdges.Clear(); hulls.Clear(); quadrilaterals.Clear(); selectedBlobID = 0; Bitmap imageclone = AForge.Imaging.Image.Clone(image, image.PixelFormat); // create filter Median filterMedian = new Median(); // apply the filter // create filter Threshold filterThreshold = new Threshold(250); // configure filter // create filter BlobsFiltering filterBlobsFiltering = new BlobsFiltering(); filterBlobsFiltering.CoupledSizeFiltering = false; filterBlobsFiltering.MinWidth = 6; //7 filterBlobsFiltering.MinHeight = 6; //7 filterBlobsFiltering.MaxWidth = 8; //8 filterBlobsFiltering.MaxHeight = 8; //8 // apply the filter filterThreshold.ApplyInPlace(imageclone); filterBlobsFiltering.ApplyInPlace(imageclone); filterMedian.ApplyInPlace(imageclone); //this.image = AForge.Imaging.Image.Clone( image, PixelFormat.Format16bppGrayScale ); //imageclone = AForge.Imaging.Image.Clone(image); imageWidth = imageclone.Width; imageHeight = imageclone.Height; blobCounter.ProcessImage(imageclone); blobs = blobCounter.GetObjectsInformation(); center = new List <AForge.Point>(); GrahamConvexHull grahamScan = new GrahamConvexHull(); foreach (Blob blob in blobs) { List <IntPoint> leftEdge = new List <IntPoint>(); List <IntPoint> rightEdge = new List <IntPoint>(); List <IntPoint> topEdge = new List <IntPoint>(); List <IntPoint> bottomEdge = new List <IntPoint>(); // collect edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdge, out rightEdge); blobCounter.GetBlobsTopAndBottomEdges(blob, out topEdge, out bottomEdge); AForge.Point centering = blob.CenterOfGravity; leftEdges.Add(blob.ID, leftEdge); rightEdges.Add(blob.ID, rightEdge); topEdges.Add(blob.ID, topEdge); bottomEdges.Add(blob.ID, bottomEdge); // find convex hull List <IntPoint> edgePoints = new List <IntPoint>(); edgePoints.AddRange(leftEdge); edgePoints.AddRange(rightEdge); List <IntPoint> hull = grahamScan.FindHull(edgePoints); hulls.Add(blob.ID, hull); List <IntPoint> quadrilateral = null; // List array center of gravity center.Add(centering); // find quadrilateral // if (hull.Count < 4) { quadrilateral = new List <IntPoint>(hull); } else { quadrilateral = PointsCloud.FindQuadrilateralCorners(hull); } quadrilaterals.Add(blob.ID, quadrilateral); // shift all points for vizualization IntPoint shift = new IntPoint(1, 1); PointsCloud.Shift(leftEdge, shift); PointsCloud.Shift(rightEdge, shift); PointsCloud.Shift(topEdge, shift); PointsCloud.Shift(bottomEdge, shift); PointsCloud.Shift(hull, shift); PointsCloud.Shift(quadrilateral, shift); } double xhair = imageWidth / 2; double yhair = imageHeight / 2; if (image.PixelFormat != PixelFormat.Format24bppRgb) { //filterBlobX(516.0, 670.0); //filterBlobY(360.0, 520.0); filterBlobX(516.0, 1117.0); filterBlobY(357.0, 460.0); refPointList = new List <AForge.Point>(); //findRef(388.0, 0.5); findRef(20.0, 1.5);// findPick(refPoint.X, refPoint.Y); } //UpdatePosition(); //Invalidate(); //if (!Directory.Exists(path)) //{ // System.IO.Directory.CreateDirectory(path); //} ////file = path + DateTime.Now.ToString(datePatt); file = path + "visimg.bmp"; imageclone.Save(file); if (blobs.Length > 0) { return(blobs.Length); } else { return(0); } }
public void RunEdgeDetection(EdgeDetectionOptions options) { if (HasRunEdgeDetection) { return; } using (Bitmap newBitmap = LoadBitmap()) { Rectangle rect = new Rectangle(0, 0, newBitmap.Width, newBitmap.Height); using (UnmanagedImage image = new UnmanagedImage(newBitmap.LockBits(rect, ImageLockMode.ReadWrite, newBitmap.PixelFormat))) { using (UnmanagedImage grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed)) { Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage); Threshold threshold = new Threshold(options.Threshold); using (UnmanagedImage edgesImage = EDGE_DETECTOR.Apply(grayImage)) { Threshold thresholdFilter = new Threshold(options.Threshold); thresholdFilter.ApplyInPlace(edgesImage); if (options.ShowEdgesImage) { ImageForm.ShowImage("Enhanced Edges Image", edgesImage); } BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = MINIMUM_BLOB_SIZE; blobCounter.MinWidth = MINIMUM_BLOB_SIZE; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation(); Corners.Clear(); foreach (Blob blob in blobs) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blob); List <IntPoint> corners = null; if (SHAPE_CHECKER.IsQuadrilateral(edgePoints, out corners)) { List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdgePoints, out rightEdgePoints); Corners.Add(corners); if (options.ShowBlobImages) { QuadrilateralTransformation quadTransformation = new QuadrilateralTransformation(corners, 200, 200); using (UnmanagedImage quadImage = quadTransformation.Apply(image)) { ImageForm.ShowImage("Quad Image", quadImage); } } } } } } } } }
public void Detect(ref Bitmap image) { List <List <IntPoint> > markers = new List <List <IntPoint> >(); Bitmap tmp = image; BitmapData bitmapData = image.LockBits(new Rectangle(0, 0, image.Width, image.Height), ImageLockMode.ReadOnly, image.PixelFormat); UnmanagedImage unmanagedImage = new UnmanagedImage(bitmapData); UnmanagedImage grayImage = UnmanagedImage.Create(unmanagedImage.Width, unmanagedImage.Height, PixelFormat.Format8bppIndexed); Grayscale.CommonAlgorithms.BT709.Apply(unmanagedImage, grayImage); DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); UnmanagedImage edgesImage = edgeDetector.Apply(grayImage); image.UnlockBits(bitmapData); if (this.edgeImage.Checked) { tmp = edgesImage.ToManagedImage().Clone(new Rectangle(0, 0, edgesImage.Width, edgesImage.Height), PixelFormat.Format24bppRgb); } Threshold thresholdFilter = new Threshold(this.binThreshold); thresholdFilter.ApplyInPlace(edgesImage); if (this.thresholdEdgeImage.Checked) { tmp = edgesImage.ToManagedImage().Clone(new Rectangle(0, 0, edgesImage.Width, edgesImage.Height), PixelFormat.Format24bppRgb); } this.blobCounter.ProcessImage(edgesImage); Blob[] blobs = blobCounter.GetObjectsInformation(); for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; if (this.isSquare(edgePoints, out corners)) { List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); float diff = calculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayImage); if (diff > 50) { markers.Add(corners); } } } foreach (List <IntPoint> marker in markers) { Color markerColor; IntPoint markerOrientation = this.markerOrientation(image, marker, out markerColor); IntPoint center = marker[2] - marker[0]; center.X = marker[0].X + Convert.ToInt32(center.X * 0.5); center.Y = marker[0].Y + Convert.ToInt32(center.Y * 0.5); if (this.drawMarkersOnVideo.Checked) { if ((this.edgeImage.Checked) || (this.thresholdEdgeImage.Checked)) { this.drawMarker(tmp, marker, markerOrientation, markerColor); } else { this.drawMarker(image, marker, markerOrientation, markerColor); } } ColorDiscriminator discriminator = new ColorDiscriminator(); discriminator.Color = markerColor; LocationSourceManager.Instance.updateLocationSource(discriminator, center); } image = tmp; }
public void DetectBlobs() { // Set filtering options blobCounter.FilterBlobs = true; blobCounter.MinWidth = 25; blobCounter.MaxWidth = 220; blobCounter.MinHeight = 15; blobCounter.MaxHeight = 100; blobCounter.ProcessImage(image); Blob[] blobs = blobCounter.GetObjectsInformation( ); // Instantiate a Convex Hull algorithm GrahamConvexHull hullFinder = new GrahamConvexHull( ); // Lock image to draw on it BitmapData data = image.LockBits(new Rectangle(0, 0, image.Width, image.Height), ImageLockMode.ReadWrite, image.PixelFormat); // Go through each BLOB foreach (Blob blob in blobs) { //Calculate the width/height ratio of each blob float blobRatio = (float)blob.Rectangle.Width / (float)blob.Rectangle.Height; //Determine if the blob is within the allowed ratio of mouth and eyes. bool mouthRatio = ((float)3.5 < blobRatio && (float)8 > blobRatio); bool eyeRatio = ((float)1.2 < blobRatio && (float)3.5 >= blobRatio); //Blobs are only processed if they are within the allowed ratios. if (mouthRatio || eyeRatio) { List <IntPoint> leftPoints, rightPoints, edgePoints = new List <IntPoint>(); System.Drawing.Point blobCenter; // Get the edge points of the BLOB blobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // Find center point int avgX = 0; int avgY = 0; //Add all edgepoints in the convex hull. foreach (IntPoint edge in edgePoints) { avgX += edge.X; avgY += edge.Y; } //Divide all edgepoint with the amount of edgepoint to find weigted center. avgX /= edgePoints.Count; avgY /= edgePoints.Count; //Create a point to return to the placement class. blobCenter = new System.Drawing.Point(avgX, avgY); blobCentres.Add(blobCenter); } } // Unlock databits image.UnlockBits(data); }
/// <summary> /// Detect Blobs in the bitmap image /// </summary> /// <param name="inputImage">Input bitmap image (Gray or RGB)</param> /// <param name="minFullness"> 0 if you don't want a fullness filter</param> /// <param name="maxNumOfFilteredBlobs"> How many filtered blob do you want be save in blobs_filter list</param> /// /// <returns></returns> public Blob_Aforge(Bitmap inputImage, int minWidth, int maxWidth, int minHeight, int maxHeight, double minFullness, int maxNumOfFilteredBlobs) { try { leftEdges.Clear(); rightEdges.Clear(); topEdges.Clear(); bottomEdges.Clear(); hulls.Clear(); //quadrilaterals.Clear(); ///Get Image this.image = AForge.Imaging.Image.Clone(inputImage, PixelFormat.Format24bppRgb);// _imageWidth = this.image.Width; _imageHeight = this.image.Height; ///Size Filter blobCounter.FilterBlobs = true; blobCounter.MinHeight = minHeight; blobCounter.MinWidth = minWidth; blobCounter.MaxHeight = maxHeight; blobCounter.MaxWidth = maxWidth; blobCounter.ObjectsOrder = ObjectsOrder.Area; ///Detection /// blobCounter.ProcessImage(this.image); blobs_all = blobCounter.GetObjectsInformation(); GrahamConvexHull grahamScan = new GrahamConvexHull(); foreach (Blob blob in blobs_all) { fullness = blob.Fullness; if (fullness > minFullness & blobs_Filtered.Count < maxNumOfFilteredBlobs)///Fullness Filter { List <IntPoint> leftEdge = new List <IntPoint>(); List <IntPoint> rightEdge = new List <IntPoint>(); // List<IntPoint> topEdge = new List<IntPoint>(); // List<IntPoint> bottomEdge = new List<IntPoint>(); // collect edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdge, out rightEdge); // blobCounter.GetBlobsTopAndBottomEdges(blob, out topEdge, out bottomEdge); leftEdges.Add(blob.ID, leftEdge); rightEdges.Add(blob.ID, rightEdge); // topEdges.Add(blob.ID, topEdge); // bottomEdges.Add(blob.ID, bottomEdge); // find convex hull List <IntPoint> edgePoints = new List <IntPoint>(); edgePoints.AddRange(leftEdge); edgePoints.AddRange(rightEdge); shapeChecker.MinAcceptableDistortion = (float)0.5; shapeChecker.RelativeDistortionLimit = (float)0.15; if (shapeChecker.IsCircle(edgePoints)) { blobs_Filtered.Add(blob); List <IntPoint> hull = grahamScan.FindHull(edgePoints); hulls.Add(blobs_Filtered.Count - 1, hull);//sinchronized with blobs_filtered items } } } DrawBlobImage(); } catch (Exception error) { //System.Windows.Forms.MessageBox.Show(error.ToString()); // METState.Current.ErrorSound.Play(); } }
private void process_Click(object sender, EventArgs e) { //grayscale Grayscale filter1 = new Grayscale(0.2125, 0.7154, 0.0721); processed = filter1.Apply(processed); //threshold var filter2 = new AForge.Imaging.Filters.Threshold(175); processed = filter2.Apply(processed); // erosion Erosion filter3 = new Erosion(); filter3.Apply(processed); // create filter BlobsFiltering filter = new BlobsFiltering(); // configure filter filter.CoupledSizeFiltering = true; filter.MinWidth = 25; filter.MinHeight = 25; // apply the filter filter.ApplyInPlace(processed); Invert filterInvert = new Invert(); // apply the filter filterInvert.ApplyInPlace(processed); BlobCounterBase bc = new BlobCounter(); bc.FilterBlobs = true; bc.MinWidth = 30; //give required value or ignore bc.MinHeight = 30; //give required value or ignore bc.CoupledSizeFiltering = true; // if value are given and if you want both Width and Height to be applied as a constraint to identify blob, set it to true bc.ProcessImage(processed); Blob[] blobs = bc.GetObjectsInformation(); int count = bc.ObjectsCount; // lock image to draw on it BitmapData data = processed.LockBits( new Rectangle(0, 0, processed.Width, processed.Height), ImageLockMode.ReadWrite, processed.PixelFormat); // process each blob foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints, edgePoints; edgePoints = new List <IntPoint>(); // get blob's edge points bc.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); IConvexHullAlgorithm hullFinder = new GrahamConvexHull(); // blob's convex hull List <IntPoint> hull = hullFinder.FindHull(edgePoints); Drawing.Polygon(data, hull, Color.Yellow); if (blob.Area < 8000) { five_cents++; } else if (blob.Area < 9000 && blob.Area > 8000) { ten_cents++; } else if (blob.Area < 13000 && blob.Area > 11000) { twentyfive_cents++; } else if (blob.Area < 17000 && blob.Area > 16000) { one_peso++; } else { five_peso++; } } processed.UnlockBits(data); pictureBox2.Image = processed; pictureBox2.SizeMode = PictureBoxSizeMode.StretchImage; textBox1.Text += count; textBox2.Text += ((five_cents * .05) + (ten_cents * .10) + (twentyfive_cents * .25) + (one_peso * 1) + (five_peso * 5)); }
public ImageProcessorResult Process(Bitmap bitmap, bool rgb) { var result = new ImageProcessorResult(); _viewConfig = _configService.ViewConfig; _imageProcessorConfig = _configService.ImageProcessorConfig; new Blur().ApplyInPlace(bitmap); Bitmap overlay = bitmap; if (_viewConfig.BackgroundImage == ViewConfigBackgroundImage.CameraRaw) { // 카메라 원본 overlay = bitmap; } // 그레이 스케일 var grayscale = Grayscale.CommonAlgorithms.BT709.Apply(bitmap); // 경계 검출 var edges = new DifferenceEdgeDetector().Apply(grayscale); if (_viewConfig.BackgroundImage == ViewConfigBackgroundImage.Edge) { overlay = new GrayscaleToRGB().Apply(edges); } // 이진화 // var threshold = new Threshold(_imageProcessorConfig.Threshold).Apply(edges); var threshold = new OtsuThreshold().Apply(edges); if (_viewConfig.BackgroundImage == ViewConfigBackgroundImage.Binary) { overlay = new GrayscaleToRGB().Apply(threshold); } // 오버레이 복제 overlay = overlay.CloneBitmap(); // 오버레이 데이터 var overlayData = overlay.LockBits(overlay.GetRectangle(), ImageLockMode.ReadWrite, overlay.PixelFormat); _blobCounter = new BlobCounter(); _blobCounter.MinHeight = _imageProcessorConfig.BlobMinHeight; _blobCounter.MinHeight = _imageProcessorConfig.BlobMinWidth; _blobCounter.FilterBlobs = true; _blobCounter.ObjectsOrder = ObjectsOrder.XY; _blobCounter.ProcessImage(threshold); var blobs = _blobCounter.GetObjectsInformation(); var shapeChecker = new SimpleShapeChecker(); // 각 영역에 대해 처리 수행 foreach (var blob in blobs) { // 현재 시도하는 마커 Marker marker = new Marker(); var edgePoints = _blobCounter.GetBlobsEdgePoints(blob); // 사각형 판정 var points = new List <IntPoint>(); if (shapeChecker.IsQuadrilateral(edgePoints, out points)) { marker.Points = points; List <IntPoint> leftEdge, rightEdge; _blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdge, out rightEdge); // 각도와 길이 판정 if (MoreQuadTest(blob, marker, leftEdge, rightEdge)) { // 검은색 테두리 판정 if (BorderTest(blob, marker, grayscale, leftEdge, rightEdge)) { // 판정 완료 result.Markers.Add(marker); // 무게 중심 좌표 marker.X = (int)(blob.CenterOfGravity.X - (threshold.Width / 2)); marker.Y = (int)(-(blob.CenterOfGravity.Y - (threshold.Height / 2))); // 프레임워크에서 계산한 넓이 marker.FrameworkArea = blob.Area; // 오버레이 ApplyOverlay(overlayData, marker.Points); } } } } overlay.UnlockBits(overlayData); foreach (var marker in result.Markers) { var points = marker.Points; // 방향 보정 var sideLength = points[0].DistanceTo(points[1]); if (points[2].Y - points[1].Y < sideLength / 1.6) { points = new List <IntPoint>( new IntPoint[] { points[1], points[2], points[3], points[0] }); marker.Points = points; } // 변형 복구 var quadrilateralTransformation = new QuadrilateralTransformation(points, _imageProcessorConfig.QuadrilateralTransformationWidth, _imageProcessorConfig.QuadrilateralTransformationHeight); var transformed = quadrilateralTransformation.Apply(bitmap); // 회전 및 색상 판정 시작 int halfWidth = _imageProcessorConfig.QuadrilateralTransformationWidth / 2, halfHeight = _imageProcessorConfig.QuadrilateralTransformationHeight / 2; // x => x + 1 사분면 var crops = new[] { new Crop(new Rectangle(halfWidth, 0, halfWidth, halfHeight)), new Crop(new Rectangle(0, 0, halfWidth, halfHeight)), new Crop(new Rectangle(0, halfHeight, halfWidth, halfHeight)), new Crop(new Rectangle(halfWidth, halfHeight, halfWidth, halfHeight)) }; var quadImage = new[] { crops[0].Apply(transformed), crops[1].Apply(transformed), crops[2].Apply(transformed), crops[3].Apply(transformed) }; var filteredResult = new[] { new { Img = quadImage[0], Red = Filter(quadImage[0], MarkerColor.Red).Luminance(), Green = Filter(quadImage[0], MarkerColor.Green).Luminance(), Blue = Filter(quadImage[0], MarkerColor.Blue).Luminance(), White = Filter(quadImage[0], MarkerColor.White).Luminance() }, new { Img = quadImage[1], Red = Filter(quadImage[1], MarkerColor.Red).Luminance(), Green = Filter(quadImage[1], MarkerColor.Green).Luminance(), Blue = Filter(quadImage[1], MarkerColor.Blue).Luminance(), White = Filter(quadImage[1], MarkerColor.White).Luminance() }, new { Img = quadImage[2], Red = Filter(quadImage[2], MarkerColor.Red).Luminance(), Green = Filter(quadImage[2], MarkerColor.Green).Luminance(), Blue = Filter(quadImage[2], MarkerColor.Blue).Luminance(), White = Filter(quadImage[2], MarkerColor.White).Luminance() }, new { Img = quadImage[3], Red = Filter(quadImage[3], MarkerColor.Red).Luminance(), Green = Filter(quadImage[3], MarkerColor.Green).Luminance(), Blue = Filter(quadImage[3], MarkerColor.Blue).Luminance(), White = Filter(quadImage[3], MarkerColor.White).Luminance() } }; var whiteDesc = filteredResult.OrderByDescending(a => a.White).ToArray(); if (rgb) { // RGB 색상 판별 var colorQuad = whiteDesc.Skip(1); var red = colorQuad.Sum(a => a.Red); var green = colorQuad.Sum(a => a.Green); var blue = colorQuad.Sum(a => a.Blue); Console.WriteLine("{0}: {1} {2} {3}", colorQuad.Count(), red, green, blue); var max = Math.Max(red, Math.Max(green, blue)); if (red == max) { marker.Color = MarkerColor.Red; } else if (green == max) { marker.Color = MarkerColor.Green; } else if (blue == max) { marker.Color = MarkerColor.Blue; } } else { // 흑백 색상 판별 var whiteMax = whiteDesc[0].White; var whiteRest = (whiteDesc[1].White + whiteDesc[2].White + whiteDesc[3].White) / 3; if (whiteMax - whiteRest < _imageProcessorConfig.ColorTestWhite) { // White marker.Color = MarkerColor.White; } else { // Black marker.Color = MarkerColor.Black; } } // 회전 판별 for (int i = 0; i < 4; i++) { if (filteredResult[i].White == whiteDesc.First().White) { marker.Rotate = (MarkerRotate)(i + 1); break; } } // 백색 마커에는 회전 방향이 없습니다. if (marker.Color == MarkerColor.White) { // 지정되지 않습니다. marker.Rotate = MarkerRotate.None; } // 화상 중심으로 좌표 변환 for (int i = 0; i < marker.Points.Count; i++) { marker.Points[i] = new IntPoint { X = marker.Points[i].X - _configService.DeviceConfig.PixelWidth / 2, Y = marker.Points[i].Y - _configService.DeviceConfig.PixelHeight / 2 }; } // 코어 서비스에서 기하학적 방법으로 거리 계산 var coreResult = _coreService.Query(marker.Points, _imageProcessorConfig.MarkerSize); marker.EuclideanDistance = coreResult.Distance; marker.TiltAngle = Math.Asin(coreResult.TranslationVector[1] / marker.EuclideanDistance); marker.PanAngle = Math.Asin(coreResult.TranslationVector[0] / marker.EuclideanDistance); if (marker.PanAngle > Math.PI) { // 음수 marker.PanAngle = 2 * Math.PI - marker.PanAngle; } marker.TransX = coreResult.TranslationVector[0]; marker.TransY = coreResult.TranslationVector[1]; marker.TransZ = coreResult.TranslationVector[2]; } BackgroundBitmap = overlay; Console.WriteLine(); foreach (var marker in result.Markers) { Console.WriteLine(marker.Color); } return(result); }
// Process specified image trying to recognize counter's image public void Process(Bitmap image, IImageProcessingLog log) { log.AddMessage("Image size: " + image.Width + " x " + image.Height); // 1 - Grayscale Bitmap grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image); log.AddImage("Grayscale", grayImage); // 2 - Edge detection DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector( ); Bitmap edges = edgeDetector.Apply(grayImage); log.AddImage("Edges", edges); // 3 - Threshold edges Threshold thresholdFilter = new Threshold(40); thresholdFilter.ApplyInPlace(edges); log.AddImage("Thresholded Edges", edges); // 4 - Blob Counter BlobCounter blobCounter = new BlobCounter( ); blobCounter.MinHeight = 32; blobCounter.MinWidth = 32; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; blobCounter.ProcessImage(edges); Blob[] blobs = blobCounter.GetObjectsInformation( ); // create unmanaged copy of source image, so we could draw on it UnmanagedImage imageData = UnmanagedImage.FromManagedImage(image); // Get unmanaged copy of grayscale image, so we could access it's pixel values UnmanagedImage grayUI = UnmanagedImage.FromManagedImage(grayImage); // list of found dark/black quadrilaterals surrounded by white area List <List <IntPoint> > foundObjects = new List <List <IntPoint> >( ); // shape checker for checking quadrilaterals SimpleShapeChecker shapeChecker = new SimpleShapeChecker( ); // 5 - check each blob for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); List <IntPoint> corners = null; // does it look like a quadrilateral ? if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { // do some more checks to filter so unacceptable shapes // if ( CheckIfShapeIsAcceptable( corners ) ) { log.AddMessage("Blob size: " + blobs[i].Rectangle.Width + " x " + blobs[i].Rectangle.Height); // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); // calculate average difference between pixel values from outside of the shape and from inside float diff = this.CalculateAverageEdgesBrightnessDifference( leftEdgePoints, rightEdgePoints, grayUI); log.AddMessage("Avg Diff: " + diff); // check average difference, which tells how much outside is lighter than inside on the average if (diff > 20) { Drawing.Polygon(imageData, corners, Color.FromArgb(255, 255, 0, 0)); // add the object to the list of interesting objects for further processing foundObjects.Add(corners); } } } } log.AddImage("Potential glyps", imageData.ToManagedImage()); int counter = 1; // further processing of each potential glyph foreach (List <IntPoint> corners in foundObjects) { log.AddMessage("Glyph #" + counter); log.AddMessage(string.Format("Corners: ({0}), ({1}), ({2}), ({3})", corners[0], corners[1], corners[2], corners[3])); // 6 - do quadrilateral transformation QuadrilateralTransformation quadrilateralTransformation = new QuadrilateralTransformation(corners, 250, 250); Bitmap transformed = quadrilateralTransformation.Apply(grayImage); log.AddImage("Transformed #" + counter, transformed); // 7 - otsu thresholding OtsuThreshold otsuThresholdFilter = new OtsuThreshold( ); Bitmap transformedOtsu = otsuThresholdFilter.Apply(transformed); log.AddImage("Transformed Otsu #" + counter, transformedOtsu); int glyphSize = 5; SquareBinaryGlyphRecognizer gr = new SquareBinaryGlyphRecognizer(glyphSize); bool[,] glyphValues = gr.Recognize(ref transformedOtsu, new Rectangle(0, 0, 250, 250)); log.AddImage("Glyph lines #" + counter, transformedOtsu); // output recognize glyph to log log.AddMessage(string.Format("glyph: {0:F2}%", gr.confidence * 100)); for (int i = 0; i < glyphSize; i++) { StringBuilder sb = new StringBuilder(" "); for (int j = 0; j < glyphSize; j++) { sb.Append((glyphValues[i, j]) ? "1 " : "0 "); } log.AddMessage(sb.ToString( )); } counter++; } }
// Set image to display by the control public int SetImage(Bitmap image) { leftEdges.Clear(); rightEdges.Clear(); topEdges.Clear(); bottomEdges.Clear(); hulls.Clear(); quadrilaterals.Clear(); selectedBlobID = 0; this.image = Accord.Imaging.Image.Clone(image, PixelFormat.Format24bppRgb); imageWidth = this.image.Width; imageHeight = this.image.Height; blobCounter.ProcessImage(this.image); blobs = blobCounter.GetObjectsInformation(); GrahamConvexHull grahamScan = new GrahamConvexHull(); foreach (Blob blob in blobs) { List<IntPoint> leftEdge = new List<IntPoint>(); List<IntPoint> rightEdge = new List<IntPoint>(); List<IntPoint> topEdge = new List<IntPoint>(); List<IntPoint> bottomEdge = new List<IntPoint>(); // collect edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdge, out rightEdge); blobCounter.GetBlobsTopAndBottomEdges(blob, out topEdge, out bottomEdge); leftEdges.Add(blob.ID, leftEdge); rightEdges.Add(blob.ID, rightEdge); topEdges.Add(blob.ID, topEdge); bottomEdges.Add(blob.ID, bottomEdge); // find convex hull List<IntPoint> edgePoints = new List<IntPoint>(); edgePoints.AddRange(leftEdge); edgePoints.AddRange(rightEdge); List<IntPoint> hull = grahamScan.FindHull(edgePoints); hulls.Add(blob.ID, hull); List<IntPoint> quadrilateral = null; // find quadrilateral if (hull.Count < 4) { quadrilateral = new List<IntPoint>(hull); } else { quadrilateral = PointsCloud.FindQuadrilateralCorners(hull); } quadrilaterals.Add(blob.ID, quadrilateral); // shift all points for vizualization IntPoint shift = new IntPoint(1, 1); PointsCloud.Shift(leftEdge, shift); PointsCloud.Shift(rightEdge, shift); PointsCloud.Shift(topEdge, shift); PointsCloud.Shift(bottomEdge, shift); PointsCloud.Shift(hull, shift); PointsCloud.Shift(quadrilateral, shift); } UpdatePosition(); Invalidate(); return blobs.Length; }
private void button2_Click(object sender, EventArgs e) { button2.Text = "处理中"; switch (comboBox4.SelectedIndex) { case 0: { Bitmap temp = (Bitmap)pictureBox1.Image; OilPainting filter3 = new OilPainting(10); // apply the filter filter3.ApplyInPlace(temp); this.pictureBox2.Image = ResizeBitmap(temp); break; } case 1: { Bitmap temp = (Bitmap)pictureBox1.Image; temp = new Grayscale(0.2125, 0.7154, 0.0721).Apply(temp); DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector(); temp = edgeDetector.Apply(temp); temp = new Threshold((int)numericUpDown1.Value).Apply(temp); //FillHoles filter2 = new FillHoles(); //filter2.MaxHoleHeight = MinHeight; //filter2.MaxHoleWidth = MaxWidth; //filter2.CoupledSizeFiltering = false; // apply the filter //temp = filter2.Apply(temp); //HorizontalRunLengthSmoothing hrls = new HorizontalRunLengthSmoothing(40); // apply the filter //hrls.ApplyInPlace(temp); /*AForge.Imaging.Filters.BlobsFiltering filter = new AForge.Imaging.Filters.BlobsFiltering(); * // 设置过滤条件(对象长、宽至少为70) * filter.CoupledSizeFiltering = true; * filter.MaxWidth = (int)numericUpDown3.Value; * filter.MaxHeight = (int)numericUpDown4.Value; * filter.MinWidth = (int)numericUpDown5.Value; * filter.MinHeight = (int)numericUpDown6.Value; * filter.ApplyInPlace(temp);*/ BlobCounter blobCounter = new BlobCounter(); blobCounter.MinHeight = 32; blobCounter.MinWidth = 32; blobCounter.FilterBlobs = true; blobCounter.ObjectsOrder = ObjectsOrder.Size; // 4 - find all stand alone blobs blobCounter.ProcessImage(temp); Blob[] blobs = blobCounter.GetObjectsInformation(); SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); List <IntPoint> corners = null; List <IntPoint> corners2 = null; for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); // does it look like a quadrilateral ? if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { // get edge points on the left and on the right side List <IntPoint> leftEdgePoints, rightEdgePoints; blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints); listBox1.DataSource = leftEdgePoints; listBox2.DataSource = rightEdgePoints; } } //listBox1.DataSource = corners; //listBox2.DataSource = corners2; this.pictureBox1.Image = temp; break; } case 2: { Bitmap bt2 = new Bitmap(@"D:\TCL条码\截图01.bmp"); Bitmap bt1 = new Bitmap(@"D:\TCL条码\截图03.bmp"); //Bitmap bt1 = new Bitmap(pictureBox2.Image); ExhaustiveTemplateMatching tm = new ExhaustiveTemplateMatching(0.80f); //基于一定的相似性阈值获得匹配块 TemplateMatch[] matchings = tm.ProcessImage(bt1, bt2); BitmapData data = bt1.LockBits( new Rectangle(0, 0, bt1.Width, bt1.Height), ImageLockMode.ReadWrite, bt1.PixelFormat); foreach (TemplateMatch m in matchings) { Drawing.Rectangle(data, m.Rectangle, Color.Red); } bt1.UnlockBits(data); pictureBox2.Image = bt1; break; } case 3: { Bitmap bt2 = new Bitmap(@"D:\TCL条码\Canny算法.png"); AForge.Imaging.Filters.BlobsFiltering filter = new AForge.Imaging.Filters.BlobsFiltering(); // 设置过滤条件(对象长、宽至少为70) filter.CoupledSizeFiltering = true; filter.MaxWidth = (int)numericUpDown3.Value; filter.MaxHeight = (int)numericUpDown4.Value; filter.MinWidth = (int)numericUpDown5.Value; filter.MinHeight = (int)numericUpDown6.Value; filter.ApplyInPlace(bt2); pictureBox1.Image = bt2; byte[] RESULT = BitmapToBytes(bt2); break; } case 4: { Bitmap temp = (Bitmap)pictureBox1.Image; temp = new Grayscale(0.2125, 0.7154, 0.0721).Apply(temp); AForge.Imaging.Filters.CannyEdgeDetector filter = new AForge.Imaging.Filters.CannyEdgeDetector(); filter.ApplyInPlace(temp); pictureBox2.Image = temp; break; } } button2.Text = "处理"; }