private void button2_Click(object sender, EventArgs e) { if (imgInput == null) { return; } try { var temp = imgInput.Convert <Gray, byte>().ThresholdBinary(new Gray(100), new Gray(255)) .Dilate(1).Erode(1); Mat imgLabel = new Mat(); Mat stats = new Mat(); Mat centroids = new Mat(); var nLabels = CvInvoke.ConnectedComponentsWithStats(temp, imgLabel, stats, centroids); imgCC = imgLabel.ToImage <Gray, byte>(); centroidPoints = new MCvPoint2D64f[nLabels]; centroids.CopyTo(centroidPoints); statsop = new CCStatsOp[nLabels]; stats.CopyTo(statsop); pictureBox2.Image = temp.Bitmap; } catch (Exception ee) { MessageBox.Show(ee.Message); } }
private int find_components(Mat edges, Mat Stats_Array, int max_components = 10, bool imshow = false) { int count, n, Components = 0; //IOutputArray dilated_image = null, CCWS_Centroids_image = null, CCWS_lables_image = null; ` Mat dilated_image = new Mat(); Mat CCWS_lables_image = new Mat(); Mat CCWS_Centroids_image = new Mat(); //Mat CCWS_Output_Stats = new Mat(); Mat Kernel = CvInvoke.GetStructuringElement(Emgu.CV.CvEnum.ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1)); // Dilate the image until there are just a few connected components. count = max_components + 1; n = 1; //components = None; Image <Gray, Byte> edges_temp = edges.ToImage <Gray, Byte>(); while (count > max_components) { n += 1; //Dont divide into 255 CvInvoke.Dilate(edges_temp, dilated_image, Kernel, new Point(-1, -1), n, BorderType.Default, new MCvScalar(1.0)); Components = CvInvoke.ConnectedComponentsWithStats(dilated_image, CCWS_lables_image, Stats_Array, CCWS_Centroids_image, LineType.EightConnected); count = Components; } CvInvoke.Imwrite("Edge_temp.jpg", edges_temp); CvInvoke.Imwrite("Dilate.jpg", dilated_image); Image <Gray, Byte> dilated_image_2 = dilated_image.ToImage <Gray, Byte>(); Console.WriteLine("{0} {1} ", dilated_image_2.Height, dilated_image_2.Width); return(Components); }
private void processToolStripMenuItem_Click(object sender, EventArgs e) { if (imgInput == null) { return; } try { var tempImg = imgInput.Convert <Gray, byte>().ThresholdBinary(new Gray(65), new Gray(255)).Dilate(1).Erode(1); Mat mLabel = new Mat(); Mat stats = new Mat(); Mat centr = new Mat(); int nLabels = CvInvoke.ConnectedComponentsWithStats(tempImg, mLabel, stats, centr); cc = mLabel.ToImage <Gray, byte>(); centrPoints = new MCvPoint2D64f[nLabels]; centr.CopyTo(centrPoints); statsOP = new CCStatsOP[nLabels]; stats.CopyTo(statsOP); pictureBox2.Image = tempImg.ToBitmap(); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
private double[][] GetvisibleData(Mat thresholdImg, Mat depthFrame) { //int minArea = Properties.UserSettings.Default.DataIndicatorMinimumArea; // Get Connected component in the frame using (Mat labels = new Mat(), stats = new Mat(), centroids = new Mat()) { int n; n = CvInvoke.ConnectedComponentsWithStats(thresholdImg, labels, stats, centroids, LineType.EightConnected, DepthType.Cv16U); // Copy centroid points to point array this.numbOfPoints = n - 2; if (this.maxPoints < numbOfPoints) { return(null); } MCvPoint2D64f[] centroidPointsEmgu; centroidPointsEmgu = new MCvPoint2D64f[n]; centroids.CopyTo(centroidPointsEmgu); // Convert centoid points to jagged array double[][] centroidPoints = GetCentroidPoints(centroidPointsEmgu, n); // add z-coordinates to the tracked points AssignZCoordinatesSurroundingBox(centroidPoints, stats, depthFrame); cameraData.ScreenToWorldCoordinates(centroidPoints); return(centroidPoints); } }
static bool CheckConnectedComponents(Mat grayImage) { // Threshold using Otsu bi-modal (black&white) assumption Mat binaryImage = grayImage.Clone(); double otsuThreshold = CvInvoke.Threshold(grayImage, binaryImage, 0.0, 255.0, Emgu.CV.CvEnum.ThresholdType.Otsu | Emgu.CV.CvEnum.ThresholdType.Binary); // dilate to connect two squares Mat kernel = new Mat(); CvInvoke.Dilate(binaryImage, binaryImage, kernel, new Point(-1, -1), 1, Emgu.CV.CvEnum.BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue); CvInvoke.Imwrite("C:\\Temp\\Dilate.png", binaryImage, new KeyValuePair <Emgu.CV.CvEnum.ImwriteFlags, int>(Emgu.CV.CvEnum.ImwriteFlags.PngCompression, 3)); // compute number of labels (should be 2: 0 for background, 1 for white) Mat labelRegion = new Mat(new System.Drawing.Size(binaryImage.Width, binaryImage.Height), Emgu.CV.CvEnum.DepthType.Cv32S, 1); Mat statistics = new Mat(); Mat centroids = new Mat(); var numberOfLabels = CvInvoke.ConnectedComponentsWithStats(binaryImage, labelRegion, statistics, centroids, Emgu.CV.CvEnum.LineType.EightConnected, Emgu.CV.CvEnum.DepthType.Cv32S); Console.WriteLine(" - Number of labels: %d\n", numberOfLabels); if (numberOfLabels != 2) { return(false); } // compute centers of background and foreground (should also be close to image center) Emgu.CV.Util.VectorOfPoint imageCentre = new Emgu.CV.Util.VectorOfPoint(new Point [] { new Point((int)(grayImage.Cols / 2.0f), (int)(grayImage.Rows / 2.0f)) }); Emgu.CV.Util.VectorOfPointF blackCenter = new Emgu.CV.Util.VectorOfPointF(new PointF[] { new PointF((float)centroids.GetDoubleValue(0, 0), (float)centroids.GetDoubleValue(0, 1)) }); Emgu.CV.Util.VectorOfPointF whiteCenter = new Emgu.CV.Util.VectorOfPointF(new PointF[] { new PointF((float)centroids.GetDoubleValue(1, 0), (float)centroids.GetDoubleValue(1, 1)) }); var blackCentroidDistance = CvInvoke.Norm(blackCenter, imageCentre, Emgu.CV.CvEnum.NormType.L2); var whiteCentroidDistance = CvInvoke.Norm(whiteCenter, imageCentre, Emgu.CV.CvEnum.NormType.L2); for (var label = 0; label < numberOfLabels; label++) { Console.WriteLine(" - [%d] centroid at (%.1lf,%.1lf)\n", label, (float)centroids.GetDoubleValue(label, 0), (float)centroids.GetDoubleValue(label, 1)); } return(numberOfLabels == 2 && blackCentroidDistance < 10.0 && whiteCentroidDistance < 10.0); }
private void test_connectedComponentsWithStats() { Mat frame = new Mat(); Mat Output_Label = new Mat(); Mat Output_Stats = new Mat(); Mat Output_Centroid = new Mat(); string path = "C:\\Users\\Asus\\Desktop\\Text_recognize\\Text_recognize\\bin\\Debug\\Edged_dilated.jpg"; frame = CvInvoke.Imread(path, ImreadModes.AnyColor); CvInvoke.ConnectedComponentsWithStats(frame, Output_Label, Output_Stats, Output_Centroid, LineType.EightConnected); Console.WriteLine(Output_Stats.Height); Console.WriteLine(Output_Stats.Width); Image <Gray, Byte> Output_Stats_im = Output_Stats.ToImage <Gray, Byte>(); Console.ReadLine(); }
public EdgeAlogorithm(string ImageFilePath) { Mat src = CvInvoke.Imread(ImageFilePath, ImreadModes.Color); Mat dst = CvInvoke.Imread(ImageFilePath, ImreadModes.Color); Mat OtsuImage = CvInvoke.Imread(ImageFilePath, ImreadModes.Color); CvInvoke.GaussianBlur(src, src, new System.Drawing.Size(3, 3), 0, 0); // 平滑濾波 CvInvoke.BilateralFilter(src, dst, 9, 30, 30); // 雙邊濾波 Mat dst1 = CvInvoke.Imread(ImageFilePath, ImreadModes.Color); Mat dst2 = CvInvoke.Imread(ImageFilePath, ImreadModes.Color); CvInvoke.Canny(dst, dst1, 10, 100, 3); // Canny Edge Decteion CvInvoke.Threshold(dst1, dst2, 128, 255, ThresholdType.BinaryInv); //反轉影像,讓邊緣呈現黑線 Mat GraylevelImage = CvInvoke.Imread(ImageFilePath, ImreadModes.Color); CvInvoke.CvtColor(src, GraylevelImage, ColorConversion.Rgb2Gray); CvInvoke.Threshold(GraylevelImage, OtsuImage, 0, 255, ThresholdType.Otsu); Mat labels = CvInvoke.Imread(ImageFilePath, ImreadModes.Color), stats = CvInvoke.Imread(ImageFilePath, ImreadModes.Color), centroids = CvInvoke.Imread(ImageFilePath, ImreadModes.Color); int nccomps = CvInvoke.ConnectedComponentsWithStats(OtsuImage, labels, stats, centroids); List <uint[]> colors; colors = new List <uint[]>(); colors.Add(new uint[] { 0, 0, 0 }); for (int loopnum = 1; loopnum < nccomps; loopnum++) { Random r = new Random(); if (stats.GetData(loopnum, 4)[0] < 200) { colors.Add(new uint[3] { 0, 0, 0 }); } else { colors.Add(new uint[3] { (uint)r.Next(0, 256), (uint)r.Next(0, 256), (uint)r.Next(0, 256) }); } } //var mat = new Mat(200, 200, DepthType.Cv64F, 3); for (int row = 0; row < src.Rows; row++) { for (int col = 0; col < src.Cols; col++) { src.SetValue(row, col, (byte)col); var value = src.GetValue(row, col); //Console.WriteLine("Value = " + value); /*if (value != 255 && Test < 50) * { * OtsuImage.SetValue(row, col, (byte)255); * Test++; * //System.Console.ReadKey(); * }*/ } } ImageContour imagecontour = new ImageContour(); Mat ImageContourImage = imagecontour.ImageContourMethod(OtsuImage); Step1 = dst; Step2 = dst2; Step3 = OtsuImage; ContourImage = ImageContourImage; /* * String win1 = "Canny_1"; //The name of the window * CvInvoke.NamedWindow(win1); //Create the window using the specific name * * String win2 = "Canny_2"; //The name of the window * CvInvoke.NamedWindow(win2); //Create the window using the specific name * * String win3 = "OtsuImage"; //The name of the window * CvInvoke.NamedWindow(win3); //Create the window using the specific name * * String win4 = "OtsuImage_ImageContourMethod"; //The name of the window * CvInvoke.NamedWindow(win4); //Create the window using the specific name * * CvInvoke.Imshow(win1, dst); //Show the image * CvInvoke.Imshow(win2, dst2); //Show the image * CvInvoke.Imshow(win3, OtsuImage); //Show the image * CvInvoke.Imshow(win4, ImageContourImage); //Show the image * CvInvoke.WaitKey(0); //Wait for the key pressing event * CvInvoke.DestroyWindow(win1); //Destroy the window if key is pressed * CvInvoke.DestroyWindow(win2); //Destroy the window if key is pressed * CvInvoke.DestroyWindow(win3); //Destroy the window if key is pressed * CvInvoke.DestroyWindow(win4); //Destroy the window if key is pressed */ }
public bool processImages() { collectImmages(startDir); IEdgeFilter filter = new KirschEdgeFilter(); int threshold_value = 150; //0-255 for (int i = 0; i < filePaths.Length; i++) { string aimDirThis = aimDir + "/res" + i + ".png"; string aimDirThisPre = aimDir + "/respre" + i + ".png"; Image original = Image.FromFile((filePaths[i])); Bitmap resized = new Bitmap(original, new Size(aimWidth, aimHeight)); Bitmap greyscale = MakeGrayscale3(resized); MemoryStream outStream = new MemoryStream(); imageProcessor.Load(greyscale); imageProcessor.DetectEdges(filter, false); imageProcessor.Save(aimDir + "/temp" + "/res" + i + ".png"); imageProcessor.Save(outStream); //Als nächstes eine Binärisierung mit Threshhold auf dem Kanten Bild //Dann alles im greyscale Bild nur Pixel behalten, die im Binär = 1 Image <Gray, Byte> img = new Image <Gray, Byte>(aimDir + "/temp" + "/res" + i + ".png"); img = img.ThresholdBinary(new Gray(threshold_value), new Gray(255)).Dilate(1).Erode(1); var labels = new Mat(); var stats = new Mat(); var centroids = new Mat(); var nLabels = CvInvoke.ConnectedComponentsWithStats(img, labels, stats, centroids); int biggestIndex = 0; int biggestArea = 0; int[] statsData = new int[stats.Rows * stats.Cols]; stats.CopyTo(statsData); // Inhalt der 2-D Matrix in 1D Array umwandeln //Suche größter weißer Bereich for (int j = 5; j < statsData.Length; j = j + 5) //erste Component ist meistens das Schwarze, kann deswegen ignoriert werden { var area = statsData[j + 4]; if (area > biggestArea) { biggestArea = area; biggestIndex = j; } } /* * var x = statsData[i * stats.Cols + 0]; * var y = statsData[i * stats.Cols + 1]; * var width = statsData[i * stats.Cols + 2]; * var height = statsData[i * stats.Cols + 3]; * var area = statsData[i * stats.Cols + 4]; */ //Bitmap source = greyscale; Bitmap edges = new Bitmap(outStream); int componentX = statsData[biggestIndex + 0]; int componentY = statsData[biggestIndex + 1]; int componentWidth = statsData[biggestIndex + 2]; int componentHeight = statsData[biggestIndex + 3]; Bitmap CroppedColor = cropping(componentX, componentY, componentWidth, componentHeight, resized, true); Bitmap CroppedImage = cropping(componentX, componentY, componentWidth, componentHeight, greyscale, false); string color = determineColor(CroppedColor); CroppedImage.Save(aimDir + "/res" + i + color + ".png", ImageFormat.Png); //CroppedColor.Save(aimDir + "/resC" + i + color + ".png", ImageFormat.Png); edges = null; CroppedImage = null; img = null; } return(true); }