public IEnumerable <Sperm> Discover(Bitmap image) { var classificator = new SpermClassificator(); var blobFinder = new BlobFinder(provider); var blobs = blobFinder.FindBlobsByArea(image); GrahamConvexHull grahamScan = new GrahamConvexHull(); foreach (var blob in blobs) { List <IntPoint> leftEdge = new List <IntPoint>(); List <IntPoint> rightEdge = new List <IntPoint>(); List <IntPoint> topEdge = new List <IntPoint>(); List <IntPoint> bottomEdge = new List <IntPoint>(); // collect edge points blobFinder.BlobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdge, out rightEdge); blobFinder.BlobCounter.GetBlobsTopAndBottomEdges(blob, out topEdge, out bottomEdge); // find convex hull List <IntPoint> edgePoints = new List <IntPoint>(); edgePoints.AddRange(leftEdge); edgePoints.AddRange(rightEdge); List <IntPoint> hull = grahamScan.FindHull(edgePoints); var points = hull.Select(x => new Point(x.X, x.Y)).ToList(); var center = new Point(blob.CenterOfGravity.X, blob.CenterOfGravity.Y); blobFinder.BlobCounter.ExtractBlobsImage(image, blob, false); var blobBitmap = blob.Image.ToManagedImage(true); var spermType = classificator.Classify(blobBitmap); yield return(new Sperm(points, center, spermType)); } }
public void FindTest() { List<IntPoint> contour = new List<IntPoint>(); int max = 100; for (int i = 0; i < max; i++) add(contour, i, max); for (int i = 0; i < max; i++) add(contour, max, i); for (int i = 0; i < max; i++) add(contour, 0, i); for (int i = 0; i < max / 2; i++) add(contour, i, i); for (int i = 0; i < max / 2; i++) add(contour, i + max / 2, max / 2 - i); PointsMarker marker = new PointsMarker(contour); var bitmap = AForge.Imaging.Image.CreateGrayscaleImage(max + 1, max + 1); bitmap = marker.Apply(bitmap); // Accord.Controls.ImageBox.Show(bitmap); GrahamConvexHull graham = new GrahamConvexHull(); List<IntPoint> hull = graham.FindHull(contour); ConvexHullDefects hullDefects = new ConvexHullDefects(10); List<ConvexityDefect> defects = hullDefects.FindDefects(contour, hull); Assert.AreEqual(1, defects.Count); Assert.AreEqual(99, defects[0].Depth); }
private void convexHullBlov(Bitmap saus) { // http://www.aforgenet.com/framework/features/blobs_processing.html Bitmap olahData = (Bitmap)saus.Clone(); BlobCounter blCount = new BlobCounter(); blCount.ProcessImage(saus); Blob[] blobs = blCount.GetObjectsInformation(); GrahamConvexHull hullFinder = new GrahamConvexHull(); BitmapData data = olahData.LockBits(new Rectangle(0, 0, olahData.Width, olahData.Height), ImageLockMode.ReadWrite, olahData.PixelFormat); foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints; List <IntPoint> edgePoints = new List <IntPoint>(); // get blob's edge points blCount.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull List <IntPoint> hull = hullFinder.FindHull(edgePoints); Drawing.Polygon(data, hull, Color.Red); } olahData.UnlockBits(data); pictureBox2.Image = olahData; }
// Set monochromeImage to display by the control public int ScanImage(Bitmap monochromeImage) { this.hulls.Clear(); this.image = monochromeImage; this.blobCounter.ProcessImage(this.image); this.blobs = this.blobCounter.GetObjectsInformation(); var grahamScan = new GrahamConvexHull(); foreach (var blob in this.GetBlobs()) { List <IntPoint> leftEdge; List <IntPoint> rightEdge; List <IntPoint> topEdge; List <IntPoint> bottomEdge; // collect edge points this.blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdge, out rightEdge); this.blobCounter.GetBlobsTopAndBottomEdges(blob, out topEdge, out bottomEdge); // find convex hull var edgePoints = new List <IntPoint>(); edgePoints.AddRange(leftEdge); edgePoints.AddRange(rightEdge); List <IntPoint> hull = grahamScan.FindHull(edgePoints); this.hulls.Add(blob.ID, hull); } return(this.blobs.Length); }
private List <IntPoint> FindCorners(Bitmap bitmap) { List <IntPoint> corners = new List <IntPoint>(); using (var clone = bitmap.Clone() as Bitmap) { new EuclideanColorFiltering(new AForge.Imaging.RGB((byte)Red, (byte)Green, (byte)Blue), Radius).ApplyInPlace(clone); using (var grayscaledBitmap = Grayscale.CommonAlgorithms.BT709.Apply(clone)) { //new Threshold(Threshold).ApplyInPlace(grayscaledBitmap); if (Inverted) { new Invert().ApplyInPlace(grayscaledBitmap); } BlobCounter blobCounter = new BlobCounter(); blobCounter.FilterBlobs = true; blobCounter.MinWidth = 50; blobCounter.MinHeight = 50; blobCounter.ObjectsOrder = ObjectsOrder.Size; blobCounter.ProcessImage(grayscaledBitmap); Blob[] blobs = blobCounter.GetObjectsInformation(); GrahamConvexHull hullFinder = new GrahamConvexHull(); for (int i = 0, n = blobs.Length; i < n; i++) { List <IntPoint> leftPoints, rightPoints; List <IntPoint> edgePoints = new List <IntPoint>(); blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); corners = hullFinder.FindHull(edgePoints); } } } return(corners); }
public void FindHullTest() { var grahamHull = new GrahamConvexHull(); for (int i = 0, n = pointsLists.Count; i < n; i++) { ComparePointsLists(grahamHull.FindHull(pointsLists[i]), expectedHulls[i]); } }
/// <summary> /// 분석 버튼 이벤트 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btnAnalysis_Click(object sender, EventArgs e) { if (sqPoints.Count() <= 0) { transfer(); } if (sqPoints.Count() > 2) { // Convex Hull 라이브러리 IConvexHullAlgorithm hullFinder = new GrahamConvexHull(); resultPoints = hullFinder.FindHull(sqPoints); IntPoint[] ip = resultPoints.ToArray(); PointF[] pointf = new PointF[ip.Count <IntPoint>()]; for (int i = 0; i < ip.Length; i++) { pointf[i].X = ip[i].X; pointf[i].Y = ip[i].Y; } for (int i = 0; i < resultPoints.Count; i++) { for (int j = 0; j < sqPoints.Count; j++) { if (sqPoints[j] == resultPoints[i]) { listIndex.Add(j); } } } //Convex Hull 둘레 측정 double round = getRound(ip); int x = panel1.Size.Width / 2; int y = panel1.Size.Height / 2; System.Drawing.Point p = new System.Drawing.Point(x, y); lblRound.Location = p; lblRound.Visible = true; lblRound.Text = round.ToString("##.##"); foreach (int t in listIndex) { S3.Points.AddXY(cirPoints[t].X, cirPoints[t].Y); } S1.Points.Clear(); S2.Points.Clear(); S4.Points.Clear(); S3.Points.AddXY(cirPoints[listIndex[0]].X, cirPoints[listIndex[0]].Y); } else { MessageBox.Show("데이터가 부족합니다.", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public void blobcounter_test() { string basePath = Path.Combine(NUnit.Framework.TestContext.CurrentContext.TestDirectory, "Resources"); #region doc_process // Load an example image containing blobs (such the sample from the Blob Detection sample applications) // https://github.com/accord-net/framework/raw/development/Samples/Imaging/Detection%20(Blobs)/demo.png Bitmap image = Accord.Imaging.Image.FromFile(Path.Combine(basePath, "blob-input.png")); // Creeate a new blob counter object var blobCounter = new BlobCounter(); // Process the image looking for blobs blobCounter.ProcessImage(image); // Get information about all the image blobs found: Blob[] blobs = blobCounter.GetObjectsInformation(); // Prepare to extract their Convex Hull var grahamScan = new GrahamConvexHull(); var colors = new ColorSequenceCollection(); // For each blob in the image for (int i = 0; i < blobs.Length; i++) { // Get the blob Blob blob = blobs[i]; // Collect edge points List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blob); // Find convex hull List <IntPoint> hull = grahamScan.FindHull(edgePoints); // Prepare to mark the hull in the image var marker = new PointsMarker(colors[i]) { Points = hull, Connect = true // connect the points with line segments }; // Draw the hull lines marker.ApplyInPlace(image); } // Save the image to disk image.Save(Path.Combine(basePath, "test.png")); #endregion Assert.AreEqual(25, blobs.Length); }
private List <IntPoint> getHull(Blob blob, Bitmap grayImage) { GrahamConvexHull hullFinder = new GrahamConvexHull(); List <IntPoint> leftPoints, rightPoints, edgePoints = new List <IntPoint>(); BlobCounter blobCounter = getBlobCounter(grayImage); // get blob's edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull return(hullFinder.FindHull(edgePoints)); }
public void FindTest() { List <IntPoint> contour = new List <IntPoint>(); int max = 100; for (int i = 0; i < max; i++) { add(contour, i, max); } for (int i = 0; i < max; i++) { add(contour, max, i); } for (int i = 0; i < max; i++) { add(contour, 0, i); } for (int i = 0; i < max / 2; i++) { add(contour, i, i); } for (int i = 0; i < max / 2; i++) { add(contour, i + max / 2, max / 2 - i); } PointsMarker marker = new PointsMarker(contour); var bitmap = AForge.Imaging.Image.CreateGrayscaleImage(max + 1, max + 1); bitmap = marker.Apply(bitmap); // ImageBox.Show(bitmap); GrahamConvexHull graham = new GrahamConvexHull(); List <IntPoint> hull = graham.FindHull(contour); ConvexHullDefects hullDefects = new ConvexHullDefects(10); List <ConvexityDefect> defects = hullDefects.FindDefects(contour, hull); Assert.AreEqual(1, defects.Count); Assert.AreEqual(99, defects[0].Depth); }
public List <IntPoint> FindConvexHull(double value) { var points = new List <IntPoint>(); for (int i = 0; i < ActualImageSize; i++) { for (int j = 0; j < ActualImageSize; j++) { if (_newTopo[i, j] < value) { points.Add(new IntPoint(i, j)); } } } IConvexHullAlgorithm hullFinder = new GrahamConvexHull(); return(hullFinder.FindHull(points)); }
public void FindDefectsTest() { Bitmap bmp = Properties.Resources.hand; Bitmap gray = AForge.Imaging.Filters.Grayscale.CommonAlgorithms.BT709.Apply(bmp); BlobCounter bc = new BlobCounter(gray); bc.ObjectsOrder = ObjectsOrder.Size; Blob[] blobs = bc.GetObjectsInformation(); bc.ExtractBlobsImage(bmp, blobs[0], true); Bitmap blob = blobs[0].Image.ToManagedImage(); BorderFollowing bf = new BorderFollowing(); List<IntPoint> contour = bf.FindContour(blob); GrahamConvexHull graham = new GrahamConvexHull(); List<IntPoint> hull = graham.FindHull(contour); ConvexHullDefects hullDefects = new ConvexHullDefects(10); List<ConvexityDefect> defects = hullDefects.FindDefects(contour, hull); /* PointsMarker marker = new PointsMarker(hull, Color.Green, 10); marker.ApplyInPlace(blob); ImageBox.Show(blob); */ Assert.AreEqual(2, defects.Count); Assert.AreEqual(new IntPoint(130, 10), contour[defects[0].Start]); Assert.AreEqual(new IntPoint(93, 109), contour[defects[0].Point]); Assert.AreEqual(new IntPoint(64, 9), contour[defects[0].End]); Assert.AreEqual(99.549179077148438, defects[0].Depth, 1e-5); Assert.IsFalse(double.IsNaN(defects[0].Depth)); // Assert.AreEqual(9912.9531239366424, defects[0].Area); Assert.AreEqual(new IntPoint(49, 18), contour[defects[1].Start]); Assert.AreEqual(new IntPoint(61, 106), contour[defects[1].Point]); Assert.AreEqual(new IntPoint(18, 127), contour[defects[1].End]); Assert.AreEqual(35.615153852366504, defects[1].Depth, 1e-5); Assert.IsFalse(double.IsNaN(defects[1].Depth)); // Assert.AreEqual(2293.7535682510002, defects[1].Area); }
public void FindDefectsTest() { Bitmap bmp = Properties.Resources.hand; Bitmap gray = AForge.Imaging.Filters.Grayscale.CommonAlgorithms.BT709.Apply(bmp); BlobCounter bc = new BlobCounter(gray); bc.ObjectsOrder = ObjectsOrder.Size; Blob[] blobs = bc.GetObjectsInformation(); bc.ExtractBlobsImage(bmp, blobs[0], true); Bitmap blob = blobs[0].Image.ToManagedImage(); BorderFollowing bf = new BorderFollowing(); List <IntPoint> contour = bf.FindContour(blob); GrahamConvexHull graham = new GrahamConvexHull(); List <IntPoint> hull = graham.FindHull(contour); ConvexHullDefects hullDefects = new ConvexHullDefects(10); List <ConvexityDefect> defects = hullDefects.FindDefects(contour, hull); /* PointsMarker marker = new PointsMarker(hull, Color.Green, 10); * marker.ApplyInPlace(blob); * ImageBox.Show(blob); */ Assert.AreEqual(2, defects.Count); Assert.AreEqual(new IntPoint(130, 10), contour[defects[0].Start]); Assert.AreEqual(new IntPoint(93, 109), contour[defects[0].Point]); Assert.AreEqual(new IntPoint(64, 9), contour[defects[0].End]); Assert.AreEqual(99.549179077148438, defects[0].Depth, 1e-5); Assert.IsFalse(double.IsNaN(defects[0].Depth)); // Assert.AreEqual(9912.9531239366424, defects[0].Area); Assert.AreEqual(new IntPoint(49, 18), contour[defects[1].Start]); Assert.AreEqual(new IntPoint(61, 106), contour[defects[1].Point]); Assert.AreEqual(new IntPoint(18, 127), contour[defects[1].End]); Assert.AreEqual(35.615153852366504, defects[1].Depth, 1e-5); Assert.IsFalse(double.IsNaN(defects[1].Depth)); // Assert.AreEqual(2293.7535682510002, defects[1].Area); }
public Bitmap ProcessFrame(Bitmap inputBitmap, int x, int y) { // Create an image for AForge to process Bitmap workingImage = new Bitmap(inputBitmap.Width, inputBitmap.Height); workingImage = AForge.Imaging.Image.Clone(inputBitmap, PixelFormat.Format24bppRgb); // Create a mask for ROI selection Rectangle roi = new Rectangle(x - 30, y - 30, 80, 80); Crop roicrop = new Crop(roi); Bitmap outimage = roicrop.Apply(workingImage); BlobCounter blobCounter = new BlobCounter(); blobCounter.ObjectsOrder = ObjectsOrder.Area; Blob[] blobs; // Find the blobs blobCounter.ProcessImage(outimage); blobs = blobCounter.GetObjectsInformation(); List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[0]); GrahamConvexHull grahamScan = new GrahamConvexHull(); List <IntPoint> hullPoints = grahamScan.FindHull(edgePoints); Graphics g = Graphics.FromImage(outimage); Pen redPen = new Pen(Color.Red, 2); g.DrawPolygon(redPen, ToPointsArray(hullPoints)); //g.Clear(Color.Black); //g.DrawImage(handImage, x, y); //g.DrawRectangle(redPen, roi); //g.DrawEllipse(redPen, x, y, 20, 20); ResizeNearestNeighbor resizeFilter = new ResizeNearestNeighbor(160, 160); Bitmap resizedImage = resizeFilter.Apply(outimage); return(resizedImage); }
// Set monochromeImage to display by the control public IList <Polygon> ScanImage(Bitmap monochromeImage) { this.hulls.Clear(); this.image = monochromeImage; this.blobCounter.ProcessImage(this.image); this.blobs = this.blobCounter.GetObjectsInformation(); var grahamScan = new GrahamConvexHull(); var polygons = new List <Polygon>(); foreach (var blob in this.GetBlobs()) { List <IntPoint> leftEdge; List <IntPoint> rightEdge; // collect edge points this.blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdge, out rightEdge); // find convex hull var edgePoints = new List <IntPoint>(); edgePoints.AddRange(leftEdge); edgePoints.AddRange(rightEdge); var hull = grahamScan.FindHull(edgePoints); this.hulls.Add(blob.ID, hull); var minX = edgePoints.Min(x => x.X); var minY = edgePoints.Min(x => x.Y); var maxY = edgePoints.Max(x => x.Y); var maxX = edgePoints.Max(x => x.X); polygons.Add(new Polygon(new Point(minX, minY), new Point(maxX, maxY))); } return(polygons); }
private void convexHull() // Convex Hull { // Declare image Bitmap bmp = new Bitmap(pictureBox1.Image); // process image with blob counter BlobCounter blobCounter = new BlobCounter(); blobCounter.ProcessImage(bmp); Blob[] blobs = blobCounter.GetObjectsInformation(); // create convex hull searching algorithm GrahamConvexHull hullFinder = new GrahamConvexHull(); // lock image to draw on it BitmapData data = bmp.LockBits( new Rectangle(0, 0, bmp.Width, bmp.Height), ImageLockMode.ReadWrite, bmp.PixelFormat); // process each blob foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints, edgePoints; edgePoints = new List <IntPoint>(); // get blob's edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull List <IntPoint> hull = hullFinder.FindHull(edgePoints); Drawing.Polygon(data, hull, Color.Red); } bmp.UnlockBits(data); pictureBox1.Image = bmp; }
private void button1_Click(object sender, EventArgs e) { pointNum = Convert.ToInt16(textBox1.Text); Graphics graph = Graphics.FromImage(drawArea); graph.Clear(Color.White); List <IntPoint> list = new List <IntPoint>(); Random rand = new Random(); for (int i = 0; i < pointNum; i++) { list.Add(new IntPoint( rand.Next(200), rand.Next(200) )); } foreach (var point in list) { graph.FillEllipse(Brushes.Black, point.X, point.Y, 10, 10); } pictureBox1.Image = drawArea; graph.Dispose(); IConvexHullAlgorithm hullFinder = new GrahamConvexHull(); List <IntPoint> hulls = hullFinder.FindHull(list); Bitmap bmp = new Bitmap(drawArea); Graphics graph2 = Graphics.FromImage(bmp); Pen pen = new Pen(Color.Lime, 5); for (int i = 0; i < hulls.Count - 1; i++) { graph2.DrawLine(pen, hulls.ElementAt(i).X, hulls.ElementAt(i).Y, hulls.ElementAt(i + 1).X, hulls.ElementAt(i + 1).Y); } graph2.DrawLine(pen, hulls.ElementAt(0).X, hulls.ElementAt(0).Y, hulls.ElementAt(hulls.Count - 1).X, hulls.ElementAt(hulls.Count - 1).Y); pictureBox2.Image = bmp; }
private void extractConvexHull() { GrahamConvexHull hullFinder = new GrahamConvexHull(); // process each blob hulls = new List <Polygon>(); foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints, edgePoints; edgePoints = new List <IntPoint>(); // get blob's edge points BlobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull List <IntPoint> hull = hullFinder.FindHull(edgePoints); hulls.Add(new Polygon(hull)); } }
private void btn_search_Click(object sender, EventArgs e) { //bright = 0; // value = 10; openFileDialog1.Title = "영상파일 열기"; openFileDialog1.Filter = "All Files(*.*)|*.*| Bitmap File(*.bmp)|*.bmp|GIF File(*.gif)|*.gif|JPEG File(*.jpg)|*.jpg|PNG file(*.png)|*.png|TIFF(*.tif)|*.tif"; if (openFileDialog1.ShowDialog() == DialogResult.OK) { openstrFilename = openFileDialog1.FileName; image = System.Drawing.Image.FromFile(openstrFilename); myBitmap = new Bitmap(image); this.xray_preview.Image = myBitmap;// 원 이미지를 담는 PictureBox 개체 } //임시화일삭제 string filePath = @"Image\temp.jpg"; FileInfo file = new FileInfo(filePath); if (file.Exists) { file.Delete(); } /* *손바닥 외곽선 추출 */ Bitmap CroppedImage = myBitmap.Clone(new System.Drawing.Rectangle(0, 100, myBitmap.Width, (myBitmap.Height - 100)), myBitmap.PixelFormat); //손바닥 int width = 600; int height = 600; Size resize = new Size(width, height); resizeImage = new Bitmap(CroppedImage, resize); gsImage = Grayscale.CommonAlgorithms.BT709.Apply(resizeImage); filter = new CannyEdgeDetector(); edge = filter.Apply(gsImage); //외곽선 블러링 Blur hfilter = new Blur(); // apply the filter hfilter.ApplyInPlace(edge); /////////////////////////// // process image with blob counter BlobCounter hblobCounter = new BlobCounter(); hblobCounter.ProcessImage(edge); Blob[] hblobs = hblobCounter.GetObjectsInformation(); // create convex hull searching algorithm GrahamConvexHull hhullFinder = new GrahamConvexHull(); // lock image to draw on it BitmapData hdata = edge.LockBits(new Rectangle(0, 0, edge.Width, edge.Height), ImageLockMode.ReadWrite, edge.PixelFormat); // process each blob List <IntPoint> hhull = new List <IntPoint> { }; List <IntPoint> hedgePoints = new List <IntPoint> { }; int hblobcount = 0; int hminX = 0, hmaxX = 700, hminY = 0, hmaxY = 700; foreach (Blob blob in hblobs) { List <IntPoint> leftPoints, rightPoints; // get blob's edge points hblobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); hedgePoints.AddRange(leftPoints); hedgePoints.AddRange(rightPoints); // blob's convex hull hhull = hhullFinder.FindHull(hedgePoints); foreach (IntPoint hulls in hhull) { // convexhull 최외곽선 추출 if (hblobcount == 0) { hminX = hulls.X; hmaxX = hulls.X; hminY = hulls.Y; hmaxY = hulls.Y; } if (hminX > hulls.X) { hminX = hulls.X; } else if (hmaxX < hulls.X) { hmaxX = hulls.X; } if (hminY > hulls.Y) { hminY = hulls.Y; } else if (hmaxY < hulls.Y) { hmaxY = hulls.Y; } hblobcount++; } Drawing.Polygon(hdata, hhull, Color.White); } edge = edge.Clone(new Rectangle(hminX, hminY, hmaxX - hminX, hmaxY - hminY), myBitmap.PixelFormat); this.xray_preview.Image = edge; /////////////////////////////////// //수골 및 지골 분할 및 특징 추출 //손목 : 요골 및 척골 (2곳) //손바닥 : 제1,3,5지 중수골 (3곳) //손가락 : 제1,3,5지 기절골 및 말절골 (6곳) //손가락 : 제3. 5지 중수골 (2곳) /////////////////////////////// //요골 추출 및 인지 알고리즘 CroppedImage1 = myBitmap.Clone(new System.Drawing.Rectangle(270, 620, 250, 180), myBitmap.PixelFormat); //1. 요골 //이미지 사이즈 정규화 요골 크롭 이미지 250 X 180 -> 125 X 125 CroppedImage1 = CroppedImage1.Clone(new System.Drawing.Rectangle(10, 0, 230, 150), myBitmap.PixelFormat); width = 125; height = 125; resize = new Size(width, height); resizeImage = new Bitmap(CroppedImage1, resize); //전처리 및 특징 추출 루틴 //DetectCorners(CroppedImage1); gsImage = Grayscale.CommonAlgorithms.BT709.Apply(resizeImage); filter = new CannyEdgeDetector(); edge = filter.Apply(gsImage); //외곽선 블러링 Blur Bfilter = new Blur(); // apply the filter Bfilter.ApplyInPlace(edge); /////////////////////////// // process image with blob counter BlobCounter blobCounter = new BlobCounter(); blobCounter.ProcessImage(edge); Blob[] blobs = blobCounter.GetObjectsInformation(); // create convex hull searching algorithm GrahamConvexHull hullFinder = new GrahamConvexHull(); // lock image to draw on it BitmapData data = edge.LockBits(new Rectangle(0, 0, edge.Width, edge.Height), ImageLockMode.ReadWrite, edge.PixelFormat); // process each blob List <IntPoint> hull = new List <IntPoint> { }; List <IntPoint> edgePoints = new List <IntPoint> { }; int blobcount = 0; int minX = 0, maxX = 125, minY = 0, maxY = 125; foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints; // get blob's edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull hull = hullFinder.FindHull(edgePoints); foreach (IntPoint hulls in hull) // convexhull 최외곽선 추출 { if (blobcount == 0) { minX = hulls.X; maxX = hulls.X; minY = hulls.Y; maxY = hulls.Y; } if (minX > hulls.X) { minX = hulls.X; } else if (maxX < hulls.X) { maxX = hulls.X; } if (minY > hulls.Y) { minY = hulls.Y; } else if (maxY < hulls.Y) { maxY = hulls.Y; } blobcount++; } Drawing.Polygon(data, hull, Color.White); } edge = resizeImage.Clone(new System.Drawing.Rectangle(minX, minY, maxX - minX, maxY - minY), myBitmap.PixelFormat); CroppedImage1.Save(@"Image\temp.jpg", ImageFormat.Jpeg); this.pB_radius.Image = edge; ///////////////////////////////////////////////////////////// //////////////////////// //척골 ////////////// CroppedImage2 = myBitmap.Clone(new System.Drawing.Rectangle(133, 620, 200, 180), myBitmap.PixelFormat); //2. 척골 //이미지 사이즈 정규화 요골 크롭 이미지 250 X 180 -> 125 X 125 CroppedImage2 = CroppedImage2.Clone(new System.Drawing.Rectangle(0, 20, 200, 150), myBitmap.PixelFormat); width = 125; height = 125; resize = new Size(width, height); resizeImage = new Bitmap(CroppedImage2, resize); //전처리 및 특징 추출 루틴 //DetectCorners(CroppedImage1); gsImage = Grayscale.CommonAlgorithms.BT709.Apply(resizeImage); filter = new CannyEdgeDetector(); edge = filter.Apply(gsImage); //외곽선 블러링 Bfilter = new Blur(); // apply the filter Bfilter.ApplyInPlace(edge); /////////////////////////// // process image with blob counter blobCounter = new BlobCounter(); blobCounter.ProcessImage(edge); blobs = blobCounter.GetObjectsInformation(); // create convex hull searching algorithm hullFinder = new GrahamConvexHull(); // lock image to draw on it BitmapData data1 = edge.LockBits(new Rectangle(0, 0, edge.Width, edge.Height), ImageLockMode.ReadWrite, edge.PixelFormat); // process each blob hull = new List <IntPoint> { }; edgePoints = new List <IntPoint> { }; blobcount = 0; minX = 0; maxX = 125; minY = 0; maxY = 125; foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints; // get blob's edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull hull = hullFinder.FindHull(edgePoints); foreach (IntPoint hulls in hull) { // convexhull 최외곽선 추출 if (blobcount == 0) { minX = hulls.X; maxX = hulls.X; minY = hulls.Y; maxY = hulls.Y; } if (minX > hulls.X) { minX = hulls.X; } else if (maxX < hulls.X) { maxX = hulls.X; } if (minY > hulls.Y) { minY = hulls.Y; } else if (maxY < hulls.Y) { maxY = hulls.Y; } blobcount++; } Drawing.Polygon(data1, hull, Color.White); } Bitmap edge1 = resizeImage.Clone(new System.Drawing.Rectangle(minX, minY, (maxX - minX), (maxY - minY)), myBitmap.PixelFormat); this.pB_ulna.Image = edge1; /////////////////////////////////////// CroppedImage3 = myBitmap.Clone(new System.Drawing.Rectangle(390, 500, 180, 180), myBitmap.PixelFormat); //3. 제1지 중수골 resizeImage = new Bitmap(CroppedImage3, resize); this.pB_Met1.Image = CroppedImage3; CroppedImage4 = myBitmap.Clone(new System.Drawing.Rectangle(266, 266, 180, 180), myBitmap.PixelFormat); //4.제3지 기절골/중절골 resizeImage = new Bitmap(CroppedImage4, resize); this.pB_Met3.Image = CroppedImage4; CroppedImage5 = myBitmap.Clone(new System.Drawing.Rectangle(75, 335, 180, 180), myBitmap.PixelFormat); //5. 제5지 중수골/기절골 resizeImage = new Bitmap(CroppedImage5, resize); this.pB_Met5.Image = CroppedImage5; CroppedImage6 = myBitmap.Clone(new System.Drawing.Rectangle(534, 410, 180, 180), myBitmap.PixelFormat); //6. 제1지 기절골 resizeImage = new Bitmap(CroppedImage6, resize); this.pB_Pph1.Image = CroppedImage6; CroppedImage7 = myBitmap.Clone(new System.Drawing.Rectangle(266, 266, 180, 180), myBitmap.PixelFormat); //7. 제3지 기절골/중절골 resizeImage = new Bitmap(CroppedImage7, resize); this.pB_Pph3.Image = CroppedImage7; CroppedImage8 = myBitmap.Clone(new System.Drawing.Rectangle(75, 335, 180, 180), myBitmap.PixelFormat); //8. 제5지 중수골/기절골 resizeImage = new Bitmap(CroppedImage8, resize); this.pB_Pph5.Image = CroppedImage8; CroppedImage9 = myBitmap.Clone(new System.Drawing.Rectangle(260, 110, 180, 180), myBitmap.PixelFormat); //9. 제3자 중절골 resizeImage = new Bitmap(CroppedImage9, resize); this.pB_Mph3.Image = CroppedImage9; CroppedImage10 = myBitmap.Clone(new System.Drawing.Rectangle(0, 250, 180, 180), myBitmap.PixelFormat); //10. 제5지 중절골 resizeImage = new Bitmap(CroppedImage10, resize); this.pB_Mph5.Image = CroppedImage10; CroppedImage11 = myBitmap.Clone(new System.Drawing.Rectangle(620, 320, 180, 180), myBitmap.PixelFormat); //11. 제1지 말절골 resizeImage = new Bitmap(CroppedImage11, resize); this.pB_Dph1.Image = CroppedImage11; CroppedImage12 = myBitmap.Clone(new System.Drawing.Rectangle(260, 0, 180, 180), myBitmap.PixelFormat); //12. 제3지 말절골 resizeImage = new Bitmap(CroppedImage12, resize); this.pB_Dph3.Image = CroppedImage12; CroppedImage13 = myBitmap.Clone(new System.Drawing.Rectangle(0, 133, 180, 180), myBitmap.PixelFormat); //13. 제5지 말절골 resizeImage = new Bitmap(CroppedImage13, resize); this.pB_Dph5.Image = CroppedImage13; //edge.UnlockBits(data1); //edge.UnlockBits(data); }
static void Main(string[] args) { Threshold thresh = new Threshold(10); Median median = new Median(9); Erosion3x3 erode = new Erosion3x3(); Dilatation3x3 dilate = new Dilatation3x3(); GrahamConvexHull hullFinder = new GrahamConvexHull(); ConnectedComponentsLabeling ccLabeler = new ConnectedComponentsLabeling(); BorderFollowing contourFinder = new BorderFollowing(); GrayscaleToRGB rgb = new GrayscaleToRGB(); ConvexHullDefects defectFinder = new ConvexHullDefects(10); Bitmap img = (Bitmap)Bitmap.FromFile("hand3.jpg"); Bitmap image = Grayscale.CommonAlgorithms.BT709.Apply(img); thresh.ApplyInPlace(image); //median.ApplyInPlace(image); erode.ApplyInPlace(image); dilate.ApplyInPlace(image); BlobCounter counter = new BlobCounter(image); counter.ObjectsOrder = ObjectsOrder.Area; Blob[] blobs = counter.GetObjectsInformation(); if (blobs.Length > 0) { counter.ExtractBlobsImage(image, blobs[0], true); UnmanagedImage hand = blobs[0].Image; var contour = contourFinder.FindContour(hand); if (contour.Count() > 0) { var initialHull = hullFinder.FindHull(contour); var defects = defectFinder.FindDefects(contour, initialHull); var filteredHull = initialHull.ClusterHullPoints().FilterLinearHullPoints(); var palmCenter = defects.Centroid(contour); var wristPoints = filteredHull.SelectWristPoints(defects, contour); Bitmap color = rgb.Apply(hand).ToManagedImage(); //BitmapData data = color.LockBits(new Rectangle(0, 0, color.Width, color.Height), ImageLockMode.ReadWrite, color.PixelFormat); //Drawing.Polyline(data, contour, Color.Blue); //Drawing.Polygon(data, filteredHull, Color.Red); //color.UnlockBits(data); Graphics gr = Graphics.FromImage(color); gr.DrawPolygon(new Pen(Brushes.Red, 3), filteredHull.ToPtArray()); gr.DrawLines(new Pen(Brushes.Blue, 3), contour.ToPtArray()); gr.DrawEllipse(new Pen(Brushes.Red, 3), palmCenter.X - 10, palmCenter.Y - 10, 20, 20); foreach (ConvexityDefect defect in defects) { gr.DrawEllipse(new Pen(Brushes.Green, 6), contour[defect.Point].X - 10, contour[defect.Point].Y - 10, 20, 20); } foreach (AForge.IntPoint pt in filteredHull) { gr.DrawEllipse(new Pen(Brushes.Yellow, 6), pt.X - 10, pt.Y - 10, 20, 20); } foreach (AForge.IntPoint pt in wristPoints) { gr.DrawEllipse(new Pen(Brushes.PowderBlue, 6), pt.X - 10, pt.Y - 10, 20, 20); } ImageBox.Show(color); } } }
private void process_Click(object sender, EventArgs e) { //grayscale Grayscale filter1 = new Grayscale(0.2125, 0.7154, 0.0721); processed = filter1.Apply(processed); //threshold var filter2 = new AForge.Imaging.Filters.Threshold(175); processed = filter2.Apply(processed); // erosion Erosion filter3 = new Erosion(); filter3.Apply(processed); // create filter BlobsFiltering filter = new BlobsFiltering(); // configure filter filter.CoupledSizeFiltering = true; filter.MinWidth = 25; filter.MinHeight = 25; // apply the filter filter.ApplyInPlace(processed); Invert filterInvert = new Invert(); // apply the filter filterInvert.ApplyInPlace(processed); BlobCounterBase bc = new BlobCounter(); bc.FilterBlobs = true; bc.MinWidth = 30; //give required value or ignore bc.MinHeight = 30; //give required value or ignore bc.CoupledSizeFiltering = true; // if value are given and if you want both Width and Height to be applied as a constraint to identify blob, set it to true bc.ProcessImage(processed); Blob[] blobs = bc.GetObjectsInformation(); int count = bc.ObjectsCount; // lock image to draw on it BitmapData data = processed.LockBits( new Rectangle(0, 0, processed.Width, processed.Height), ImageLockMode.ReadWrite, processed.PixelFormat); // process each blob foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints, edgePoints; edgePoints = new List <IntPoint>(); // get blob's edge points bc.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); IConvexHullAlgorithm hullFinder = new GrahamConvexHull(); // blob's convex hull List <IntPoint> hull = hullFinder.FindHull(edgePoints); Drawing.Polygon(data, hull, Color.Yellow); if (blob.Area < 8000) { five_cents++; } else if (blob.Area < 9000 && blob.Area > 8000) { ten_cents++; } else if (blob.Area < 13000 && blob.Area > 11000) { twentyfive_cents++; } else if (blob.Area < 17000 && blob.Area > 16000) { one_peso++; } else { five_peso++; } } processed.UnlockBits(data); pictureBox2.Image = processed; pictureBox2.SizeMode = PictureBoxSizeMode.StretchImage; textBox1.Text += count; textBox2.Text += ((five_cents * .05) + (ten_cents * .10) + (twentyfive_cents * .25) + (one_peso * 1) + (five_peso * 5)); }
private void PatternRecognition(Bitmap bitmap) { // Prvi korak - grayscalling originalne slike Bitmap frame = grayscaleFilter.Apply(bitmap); BitmapData frameData = frame.LockBits(new Rectangle(0, 0, frame.Width, frame.Height), ImageLockMode.ReadWrite, frame.PixelFormat); // Drugi korak - detekcija ivica pomocu Sobel filtra sobelFilter.ApplyInPlace(frameData); // Treći korak - konvertuj sliku u crno-bijelu pri čemu je threshold = 100 odnoso od 0 do 155 je crna boja, a od 156 do 255 je bijela boja thresholdFilter.ApplyInPlace(frameData); // Četvrti korak - dilitacija / pojacavanje bijele boje jer dilitationFilter.ApplyInPlace(frameData); // Peti korak - kreiranje binarne slike frame = frame.Clone(new Rectangle(0, 0, frame.Width, frame.Height), PixelFormat.Format8bppIndexed); // Šesti korak - pronalazak potencijalnih oblika na slici blobCounter.ProcessImage(frameData); Blob[] blobs = blobCounter.GetObjectsInformation(); // za crtanje po originalnoj slici Graphics g = Graphics.FromImage(bitmap); // Sedmi korak - provjeri svaki oblik foreach (Blob blob in blobs) { List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blob); List<IntPoint> hullPoints = hullFinder.FindHull(edgePoints); List<IntPoint> corners = null; // da li je četverougao? if (shapeChecker.IsQuadrilateral(hullPoints, out corners)) // da li je kvadrat? if (shapeChecker.CheckPolygonSubType(corners) == PolygonSubType.Square) { if (!detected) { // Osmi korak - odrđivanje centra gravitacije i gornjeg lijevog tjemena FindNewCorners(corners); // Deveti korak - ekstrakcija prepoznatog kvadrata sa originalne slike u novu sliku dimenzija 100x100 SimpleQuadrilateralTransformation quadrilateralTransformation = new SimpleQuadrilateralTransformation(corners, 100, 100); Bitmap recognizedSquare = quadrilateralTransformation.Apply(bitmap); recognizedSquare = recognizedSquare.Clone(new Rectangle(0, 0, recognizedSquare.Width, recognizedSquare.Height), PixelFormat.Format8bppIndexed); // Deseti korak - od nove slike ponovo napravi crno-bijelu otsuThresholdFilter.ApplyInPlace(recognizedSquare); // Jedanaesti korak - invertuj boje invertFilter.ApplyInPlace(recognizedSquare); //Dvanaesti korak - prepoznaj oblik (formiraj matricu) float confidence; // vjerovatnoća da je prepoznat pravi oblik (odnos borja crnih i bijelih piksela u ćeliji byte[,] pattern = binaryGlyphRecognizer.Recognize(recognizedSquare, new Rectangle(0, 0, recognizedSquare.Width, recognizedSquare.Height), out confidence); recognizedSquare.Dispose(); if (confidence >= 0.6) { oldPatterns.Add(pattern); Boolean canDraw = CheckPrevious(); if (canDraw) { // Trinaesti korak - iscrtaj matricu DrawPattern(pattern); detected = true; // pravim delay od 3s nakon što prepozna pattern new Task(() => { Thread.Sleep(3*1000); detected = false; }).Start(); // Komunikacija sa warehouse uređajem. //new Thread(new RS232Communication(shape).Run).Start(); } } } // iscrtaj ivice oko prepoznatog kvadrata g.DrawPolygon(pen, ToPointsArray(hullPoints)); } } g.Dispose(); frame.UnlockBits(frameData); frame.Dispose(); }
/// <summary> /// Detect Blobs in the bitmap image /// </summary> /// <param name="inputImage">Input bitmap image (Gray or RGB)</param> /// <param name="minFullness"> 0 if you don't want a fullness filter</param> /// <param name="maxNumOfFilteredBlobs"> How many filtered blob do you want be save in blobs_filter list</param> /// /// <returns></returns> public Blob_Aforge(Bitmap inputImage, int minWidth, int maxWidth, int minHeight, int maxHeight, double minFullness, int maxNumOfFilteredBlobs) { try { leftEdges.Clear(); rightEdges.Clear(); topEdges.Clear(); bottomEdges.Clear(); hulls.Clear(); //quadrilaterals.Clear(); ///Get Image this.image = AForge.Imaging.Image.Clone(inputImage, PixelFormat.Format24bppRgb);// _imageWidth = this.image.Width; _imageHeight = this.image.Height; ///Size Filter blobCounter.FilterBlobs = true; blobCounter.MinHeight = minHeight; blobCounter.MinWidth = minWidth; blobCounter.MaxHeight = maxHeight; blobCounter.MaxWidth = maxWidth; blobCounter.ObjectsOrder = ObjectsOrder.Area; ///Detection /// blobCounter.ProcessImage(this.image); blobs_all = blobCounter.GetObjectsInformation(); GrahamConvexHull grahamScan = new GrahamConvexHull(); foreach (Blob blob in blobs_all) { fullness = blob.Fullness; if (fullness > minFullness & blobs_Filtered.Count < maxNumOfFilteredBlobs)///Fullness Filter { List <IntPoint> leftEdge = new List <IntPoint>(); List <IntPoint> rightEdge = new List <IntPoint>(); // List<IntPoint> topEdge = new List<IntPoint>(); // List<IntPoint> bottomEdge = new List<IntPoint>(); // collect edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdge, out rightEdge); // blobCounter.GetBlobsTopAndBottomEdges(blob, out topEdge, out bottomEdge); leftEdges.Add(blob.ID, leftEdge); rightEdges.Add(blob.ID, rightEdge); // topEdges.Add(blob.ID, topEdge); // bottomEdges.Add(blob.ID, bottomEdge); // find convex hull List <IntPoint> edgePoints = new List <IntPoint>(); edgePoints.AddRange(leftEdge); edgePoints.AddRange(rightEdge); shapeChecker.MinAcceptableDistortion = (float)0.5; shapeChecker.RelativeDistortionLimit = (float)0.15; if (shapeChecker.IsCircle(edgePoints)) { blobs_Filtered.Add(blob); List <IntPoint> hull = grahamScan.FindHull(edgePoints); hulls.Add(blobs_Filtered.Count - 1, hull);//sinchronized with blobs_filtered items } } } DrawBlobImage(); } catch (Exception error) { //System.Windows.Forms.MessageBox.Show(error.ToString()); // METState.Current.ErrorSound.Play(); } }
public int SetImage(Bitmap image) { leftEdges.Clear(); rightEdges.Clear(); topEdges.Clear(); bottomEdges.Clear(); hulls.Clear(); quadrilaterals.Clear(); selectedBlobID = 0; Bitmap imageclone = AForge.Imaging.Image.Clone(image, image.PixelFormat); // create filter Median filterMedian = new Median(); // apply the filter // create filter Threshold filterThreshold = new Threshold(250); // configure filter // create filter BlobsFiltering filterBlobsFiltering = new BlobsFiltering(); filterBlobsFiltering.CoupledSizeFiltering = false; filterBlobsFiltering.MinWidth = 6; //7 filterBlobsFiltering.MinHeight = 6; //7 filterBlobsFiltering.MaxWidth = 8; //8 filterBlobsFiltering.MaxHeight = 8; //8 // apply the filter filterThreshold.ApplyInPlace(imageclone); filterBlobsFiltering.ApplyInPlace(imageclone); filterMedian.ApplyInPlace(imageclone); //this.image = AForge.Imaging.Image.Clone( image, PixelFormat.Format16bppGrayScale ); //imageclone = AForge.Imaging.Image.Clone(image); imageWidth = imageclone.Width; imageHeight = imageclone.Height; blobCounter.ProcessImage(imageclone); blobs = blobCounter.GetObjectsInformation(); center = new List <AForge.Point>(); GrahamConvexHull grahamScan = new GrahamConvexHull(); foreach (Blob blob in blobs) { List <IntPoint> leftEdge = new List <IntPoint>(); List <IntPoint> rightEdge = new List <IntPoint>(); List <IntPoint> topEdge = new List <IntPoint>(); List <IntPoint> bottomEdge = new List <IntPoint>(); // collect edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdge, out rightEdge); blobCounter.GetBlobsTopAndBottomEdges(blob, out topEdge, out bottomEdge); AForge.Point centering = blob.CenterOfGravity; leftEdges.Add(blob.ID, leftEdge); rightEdges.Add(blob.ID, rightEdge); topEdges.Add(blob.ID, topEdge); bottomEdges.Add(blob.ID, bottomEdge); // find convex hull List <IntPoint> edgePoints = new List <IntPoint>(); edgePoints.AddRange(leftEdge); edgePoints.AddRange(rightEdge); List <IntPoint> hull = grahamScan.FindHull(edgePoints); hulls.Add(blob.ID, hull); List <IntPoint> quadrilateral = null; // List array center of gravity center.Add(centering); // find quadrilateral // if (hull.Count < 4) { quadrilateral = new List <IntPoint>(hull); } else { quadrilateral = PointsCloud.FindQuadrilateralCorners(hull); } quadrilaterals.Add(blob.ID, quadrilateral); // shift all points for vizualization IntPoint shift = new IntPoint(1, 1); PointsCloud.Shift(leftEdge, shift); PointsCloud.Shift(rightEdge, shift); PointsCloud.Shift(topEdge, shift); PointsCloud.Shift(bottomEdge, shift); PointsCloud.Shift(hull, shift); PointsCloud.Shift(quadrilateral, shift); } double xhair = imageWidth / 2; double yhair = imageHeight / 2; if (image.PixelFormat != PixelFormat.Format24bppRgb) { //filterBlobX(516.0, 670.0); //filterBlobY(360.0, 520.0); filterBlobX(516.0, 1117.0); filterBlobY(357.0, 460.0); refPointList = new List <AForge.Point>(); //findRef(388.0, 0.5); findRef(20.0, 1.5);// findPick(refPoint.X, refPoint.Y); } //UpdatePosition(); //Invalidate(); //if (!Directory.Exists(path)) //{ // System.IO.Directory.CreateDirectory(path); //} ////file = path + DateTime.Now.ToString(datePatt); file = path + "visimg.bmp"; imageclone.Save(file); if (blobs.Length > 0) { return(blobs.Length); } else { return(0); } }
// Set image to display by the control public int SetImage(Bitmap image) { leftEdges.Clear(); rightEdges.Clear(); topEdges.Clear(); bottomEdges.Clear(); hulls.Clear(); quadrilaterals.Clear(); selectedBlobID = 0; this.image = Accord.Imaging.Image.Clone(image, PixelFormat.Format24bppRgb); imageWidth = this.image.Width; imageHeight = this.image.Height; blobCounter.ProcessImage(this.image); blobs = blobCounter.GetObjectsInformation(); GrahamConvexHull grahamScan = new GrahamConvexHull(); foreach (Blob blob in blobs) { List<IntPoint> leftEdge = new List<IntPoint>(); List<IntPoint> rightEdge = new List<IntPoint>(); List<IntPoint> topEdge = new List<IntPoint>(); List<IntPoint> bottomEdge = new List<IntPoint>(); // collect edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftEdge, out rightEdge); blobCounter.GetBlobsTopAndBottomEdges(blob, out topEdge, out bottomEdge); leftEdges.Add(blob.ID, leftEdge); rightEdges.Add(blob.ID, rightEdge); topEdges.Add(blob.ID, topEdge); bottomEdges.Add(blob.ID, bottomEdge); // find convex hull List<IntPoint> edgePoints = new List<IntPoint>(); edgePoints.AddRange(leftEdge); edgePoints.AddRange(rightEdge); List<IntPoint> hull = grahamScan.FindHull(edgePoints); hulls.Add(blob.ID, hull); List<IntPoint> quadrilateral = null; // find quadrilateral if (hull.Count < 4) { quadrilateral = new List<IntPoint>(hull); } else { quadrilateral = PointsCloud.FindQuadrilateralCorners(hull); } quadrilaterals.Add(blob.ID, quadrilateral); // shift all points for vizualization IntPoint shift = new IntPoint(1, 1); PointsCloud.Shift(leftEdge, shift); PointsCloud.Shift(rightEdge, shift); PointsCloud.Shift(topEdge, shift); PointsCloud.Shift(bottomEdge, shift); PointsCloud.Shift(hull, shift); PointsCloud.Shift(quadrilateral, shift); } UpdatePosition(); Invalidate(); return blobs.Length; }
public static List <Shapes.Component> FindComponents(VideoProcessing vp, Bitmap bitmap) { // Locating objects BlobCounter blobCounter = new BlobCounter(); blobCounter.FilterBlobs = true; blobCounter.MinHeight = 8; blobCounter.MinWidth = 8; blobCounter.ProcessImage(bitmap); Blob[] blobs = blobCounter.GetObjectsInformation(); // create convex hull searching algorithm GrahamConvexHull hullFinder = new GrahamConvexHull(); ClosePointsMergingOptimizer optimizer1 = new ClosePointsMergingOptimizer(); FlatAnglesOptimizer optimizer2 = new FlatAnglesOptimizer(); List <Shapes.Component> Components = new List <Shapes.Component>(); // process each blob foreach (Blob blob in blobs) { List <IntPoint> leftPoints, rightPoints, edgePoints = new List <IntPoint>(); if ((blob.Rectangle.Height > 400) && (blob.Rectangle.Width > 600)) { break; // The whole image could be a blob, discard that } // get blob's edge points blobCounter.GetBlobsLeftAndRightEdges(blob, out leftPoints, out rightPoints); edgePoints.AddRange(leftPoints); edgePoints.AddRange(rightPoints); // blob's convex hull List <IntPoint> Outline = hullFinder.FindHull(edgePoints); optimizer1.MaxDistanceToMerge = 4; optimizer2.MaxAngleToKeep = 170F; Outline = optimizer2.OptimizeShape(Outline); Outline = optimizer1.OptimizeShape(Outline); // find Longest line segment float dist = 0; LineSegment Longest = new LineSegment(Outline[0], Outline[1]); LineSegment line; dist = Longest.Length; int LongestInd = 0; for (int i = 1; i < Outline.Count; i++) { if (i != Outline.Count - 1) { line = new LineSegment(Outline[i], Outline[i + 1]); } else { // last iteration if (Outline[i] == Outline[0]) { break; } line = new LineSegment(Outline[i], Outline[0]); } if (line.Length > dist) { Longest = line; dist = line.Length; LongestInd = i; } } // Get the center point of it Point LongestCenter = new Point(); LongestCenter.X = (float)Math.Round((Longest.End.X - Longest.Start.X) / 2.0 + Longest.Start.X); LongestCenter.Y = (float)Math.Round((Longest.End.Y - Longest.Start.Y) / 2.0 + Longest.Start.Y); Point NormalStart = new Point(); Point NormalEnd = new Point(); // Find normal: // start= longest.start rotated +90deg relative to center // end= longest.end rotated -90deg and relative to center // If you rotate point (px, py) around point (ox, oy) by angle theta you'll get: // p'x = cos(theta) * (px-ox) - sin(theta) * (py-oy) + ox // p'y = sin(theta) * (px-ox) + cos(theta) * (py-oy) + oy // cos90 = 0, sin90= 1 => // p'x= -(py-oy) + ox= oy-py+ox, p'y= (px-ox)+ oy NormalStart.X = LongestCenter.Y - Longest.Start.Y + LongestCenter.X; NormalStart.Y = (Longest.Start.X - LongestCenter.X) + LongestCenter.Y; // cos-90=0, sin-90= -1 => // p'x= (py-oy) + ox // p'y= -(px-ox)+oy= ox-px+oy NormalEnd.X = (Longest.Start.Y - LongestCenter.Y) + LongestCenter.X; NormalEnd.Y = LongestCenter.X - Longest.Start.X + LongestCenter.Y; // Make line out of the points Line Normal = Line.FromPoints(NormalStart, NormalEnd); // Find the furthest intersection to the normal (skip the Longest) Point InterSection = new Point(); Point Furthest = new Point(); bool FurhtestAssinged = false; LineSegment seg; dist = 0; for (int i = 0; i < Outline.Count; i++) { if (i == LongestInd) { continue; } if (i != Outline.Count - 1) { seg = new LineSegment(Outline[i], Outline[i + 1]); } else { // last iteration if (Outline[i] == Outline[0]) { break; } seg = new LineSegment(Outline[i], Outline[0]); } if (seg.GetIntersectionWith(Normal) == null) { continue; } InterSection = (Point)seg.GetIntersectionWith(Normal); if (InterSection.DistanceTo(LongestCenter) > dist) { Furthest = InterSection; FurhtestAssinged = true; dist = InterSection.DistanceTo(LongestCenter); } } // Check, if there is a edge point that is close to the normal even further Point fPoint = new Point(); for (int i = 0; i < Outline.Count; i++) { fPoint.X = Outline[i].X; fPoint.Y = Outline[i].Y; if (Normal.DistanceToPoint(fPoint) < 1.5) { if (fPoint.DistanceTo(LongestCenter) > dist) { Furthest = fPoint; FurhtestAssinged = true; dist = fPoint.DistanceTo(LongestCenter); } } } Point ComponentCenter = new Point(); if (FurhtestAssinged) { // Find the midpoint of LongestCenter and Furthest: This is the centerpoint of component ComponentCenter.X = (float)Math.Round((LongestCenter.X - Furthest.X) / 2.0 + Furthest.X); ComponentCenter.Y = (float)Math.Round((LongestCenter.Y - Furthest.Y) / 2.0 + Furthest.Y); // Alignment is the angle of longest double Alignment; if (Math.Abs(Longest.End.X - Longest.Start.X) < 0.001) { Alignment = 0; } else { Alignment = Math.Atan((Longest.End.Y - Longest.Start.Y) / (Longest.End.X - Longest.Start.X)); Alignment = Alignment * 180.0 / Math.PI; // in deg. } Components.Add(new Shapes.Component(ComponentCenter, Alignment, Outline, Longest, NormalStart, NormalEnd)); } } SetVideoProcessing(Components, vp); return(Components); }