//輪郭抽出して中心座標取得 Point GetCenterPointofLED(Mat grayImage) { OpenCvSharp.CPlusPlus.Point centerPoint = new OpenCvSharp.CPlusPlus.Point(); IplImage grayIpl = grayImage.ToIplImage().Clone(); IplImage calibIpl = new IplImage(grayIpl.Size, BitDepth.U8, 3); //中心の検出 CvBlobs blobs = new CvBlobs(); blobs.Label(grayIpl); //blobs.FilterByArea(20, 1500); CvBlob blob = blobs.LargestBlob(); try { if (blob != null) { centerPoint = new Point(blob.Centroid.X, blob.Centroid.Y); blobs.RenderBlobs(grayIpl, calibIpl); } }catch { Console.WriteLine("eroor:counter"); } this.CalibrationImage = new Mat(calibIpl); Console.WriteLine(centerPoint); return(centerPoint); }
public static void Blob_CvBlobs(int filterArea = 0) { Glb.DrawMatAndHist0(Glb.matSrc); var matThr = Glb.matSrc.CvtColor(ColorConversionCodes.BGR2GRAY).Threshold(128, 255, ThresholdTypes.Otsu); var blobs = new CvBlobs(); Glb.TimerStart(); int cnt = blobs.Label(matThr); Console.WriteLine("=> Label Time: {0}ms", Glb.TimerStop()); blobs.FilterByArea(filterArea, int.MaxValue); var matDsp = new Mat(Glb.matSrc.Rows, Glb.matSrc.Cols, MatType.CV_8UC3); matDsp.SetTo(Scalar.Black); Glb.TimerStart(); MyBlobRenderer.RenderBlobs(blobs, matDsp); Console.WriteLine("=> Render Time: {0}ms", Glb.TimerStop()); Console.WriteLine("=> Blob Count: {0}", blobs.Count); Glb.DrawMatAndHist1(matThr); Glb.DrawMatAndHist2(matDsp); matThr.Dispose(); matDsp.Dispose(); }
public IplImage BlobContourImage(IplImage src) { blobcontour = new IplImage(src.Size, BitDepth.U8, 3); bin = this.Binary(src, 50); CvBlobs blobs = new CvBlobs(); blobs.Label(bin); foreach (KeyValuePair <int, CvBlob> item in blobs) { CvBlob b = item.Value; CvContourChainCode cc = b.Contour; cc.Render(blobcontour); CvContourPolygon ex_polygon = cc.ConvertToPolygon(); foreach (CvPoint p in ex_polygon) { Cv.DrawCircle(blobcontour, p, 1, CvColor.Blue, -1); } for (int i = 0; i < b.InternalContours.Count; i++) { CvContourPolygon in_polygon = b.InternalContours[i].ConvertToPolygon(); foreach (CvPoint p in in_polygon) { Cv.DrawCircle(blobcontour, p, 1, CvColor.Red, -1); } } } return(blobcontour); }
/// <summary> /// Gets the largest blob /// </summary> /// <param name="imgSrc"></param> /// <param name="imgRender"></param> private void FilterByMaximumBlob(IplImage imgSrc, IplImage imgDst) { CvBlobs blobs = new CvBlobs(); imgDst.Zero(); blobs.Label(imgSrc); CvBlob max = blobs.GreaterBlob(); if (max == null) { return; } blobs.FilterByArea(max.Area, max.Area); blobs.FilterLabels(imgDst); }
private static void FilterByMaximalBlob(IplImage imgSrc, IplImage imgDst) { using (CvBlobs blobs = new CvBlobs()) using (IplImage imgLabelData = new IplImage(imgSrc.Size, CvBlobLib.DepthLabel, 1)) { imgDst.Zero(); blobs.Label(imgSrc, imgLabelData); CvBlob max = blobs[blobs.GreaterBlob()]; if (max == null) { return; } blobs.FilterByArea(max.Area, max.Area); blobs.FilterLabels(imgLabelData, imgDst); } }
public Blob() { using (var imgSrc = new IplImage(FilePath.Image.Shapes, LoadMode.Color)) using (var imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (var imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (var imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (var imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3)) { Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); CvBlobs blobs = new CvBlobs(); blobs.Label(imgBinary); foreach (KeyValuePair<int, CvBlob> item in blobs) { CvBlob b = item.Value; Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.Render(imgContour); CvContourPolygon polygon = cc.ConvertToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } /* CvPoint2D32f circleCenter; float circleRadius; GetEnclosingCircle(polygon, out circleCenter, out circleRadius); imgPolygon.Circle(circleCenter, (int) circleRadius, CvColor.Green, 2); */ } blobs.RenderBlobs(imgSrc, imgRender); using (new CvWindow("render", imgRender)) using (new CvWindow("contour", imgContour)) using (new CvWindow("polygon vertices", imgPolygon)) { Cv.WaitKey(0); } } }
public static IplImage test(IplImage target) { CvBlobs blobs = new CvBlobs(); IplImage lableImg = new IplImage(target.Size, CvBlobLib.DepthLabel, 1); IplImage retImg = new IplImage(target.Size, BitDepth.U8, 1); blobs.Label(target); CvBlob max = blobs.GreaterBlob(); if (max == null) { return(target); } blobs.FilterByArea(max.Area, max.Area); blobs.FilterLabels(retImg); return(retImg); }
public Blob() { using (var imgSrc = new IplImage(FilePath.Image.Shapes, LoadMode.Color)) using (var imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (var imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (var imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (var imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3)) { Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); CvBlobs blobs = new CvBlobs(); blobs.Label(imgBinary); foreach (KeyValuePair <int, CvBlob> item in blobs) { CvBlob b = item.Value; Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.Render(imgContour); CvContourPolygon polygon = cc.ConvertToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } /* * CvPoint2D32f circleCenter; * float circleRadius; * GetEnclosingCircle(polygon, out circleCenter, out circleRadius); * imgPolygon.Circle(circleCenter, (int) circleRadius, CvColor.Green, 2); */ } blobs.RenderBlobs(imgSrc, imgRender); using (new CvWindow("render", imgRender)) using (new CvWindow("contour", imgContour)) using (new CvWindow("polygon vertices", imgPolygon)) { Cv.WaitKey(0); } } }
// Update is called once per frame void Update() { IplImage frame = Cv.QueryFrame(capture); imgBinary = new IplImage(frame.Size, BitDepth.U8, 1); imgLabel = new IplImage(frame.Size, BitDepth.F32, 1); imgRender = new IplImage(frame.Size, BitDepth.U8, 3); imgContour = new IplImage(frame.Size, BitDepth.U8, 3); imgPolygon = new IplImage(frame.Size, BitDepth.U8, 3); Color[] cols = new Color[texture.width * texture.height]; Cv.CvtColor(frame, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); CvBlobs blobs = new CvBlobs(); uint result = blobs.Label(imgBinary, imgLabel); foreach (KeyValuePair <uint, CvBlob> item in blobs) { CvBlob b = item.Value; //Console.WriteLine ("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.RenderContourChainCode(imgContour); CvContourPolygon polygon = cc.ConvertChainCodesToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } } blobs.RenderBlobs(imgLabel, frame, imgRender); for (int y = 0; y < texture.height; y++) { for (int x = 0; x < texture.width; x++) { CvColor col = imgRender.Get2D(y, x); cols[y * texture.width + x] = new Color(col.R / 255.0f, col.G / 255.0f, col.B / 255.0f, 1.0f); } } // int t2 = System.Environment.TickCount; texture.SetPixels(cols); //int t3 = System.Environment.TickCount; //Debug.Log("t2-t1=" + (t2 - t1) + " t3-t2=" + (t3 - t2)); texture.Apply(); }
public BlobOld() { using (IplImage imgSrc = new IplImage(Const.ImageShapes, LoadMode.Color)) using (IplImage imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (IplImage imgLabel = new IplImage(imgSrc.Size, BitDepth.F32, 1)) using (IplImage imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (IplImage imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (IplImage imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3)) { Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); using (CvBlobs blobs = new CvBlobs()) { uint result = blobs.Label(imgBinary, imgLabel); foreach (KeyValuePair<uint, CvBlob> item in blobs) { CvBlob b = item.Value; Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.RenderContourChainCode(imgContour); CvContourPolygon polygon = cc.ConvertChainCodesToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } } blobs.RenderBlobs(imgLabel, imgSrc, imgRender); using (new CvWindow("render", imgRender)) using (new CvWindow("contour", imgContour)) using (new CvWindow("polygon vertices", imgPolygon)) { Cv.WaitKey(0); } } } }
public static void CarbonPaper(int x1 = 100, int y1 = 300, int x2 = 1100, int y2 = 1600, ThresholdTypes thrType = ThresholdTypes.Binary, int thr = 128, int filterArea = 30) { // 1. convert to grayscale var matGray = Glb.matSrc.CvtColor(ColorConversionCodes.BGR2GRAY); // 2. roi crop Rect roi = new Rect(x1, y1, x2 - x1 + 1, y2 - y1 + 1); var matGrayDrawRoi = Glb.matSrc.Clone(); matGrayDrawRoi.Rectangle(roi, Scalar.Yellow); Glb.DrawMat0(matGrayDrawRoi); var matRoi = new Mat(matGray, roi); Glb.DrawHist0(matRoi); // 3. threshold var matThr = matRoi.Threshold(thr, 255, thrType); Glb.DrawMatAndHist1(matThr); // 4. blob with area filter CvBlobs blobs = new CvBlobs(); blobs.Label(matThr); blobs.FilterByArea(filterArea, int.MaxValue); // 5. display blob var matDsp = new Mat(matRoi.Rows, matRoi.Cols, MatType.CV_8UC3); matDsp.SetTo(Scalar.Black); blobs.RenderBlobs(matDsp, matDsp, RenderBlobsModes.Color); Glb.DrawMatAndHist2(matDsp); Console.WriteLine("blobs.cnt = {0}", blobs.Count); matGray.Dispose(); matGrayDrawRoi.Dispose(); matRoi.Dispose(); matThr.Dispose(); matDsp.Dispose(); }
public IplImage BlobImage(IplImage src) { blob = new IplImage(src.Size, BitDepth.U8, 3); bin = this.Binary(src, 50); CvBlobs blobs = new CvBlobs(); blobs.Label(bin); blobs.RenderBlobs(src, blob); foreach (KeyValuePair <int, CvBlob> item in blobs) { CvBlob b = item.Value; Cv.PutText(blob, Convert.ToString(b.Label), b.Centroid, new CvFont(FontFace.HersheyComplex, 1, 1), CvColor.Red); } return(blob); }
public Blob() { using (IplImage imgSrc = new IplImage(Const.ImageShapes, LoadMode.Color)) using (IplImage imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (IplImage imgLabel = new IplImage(imgSrc.Size, BitDepth.F32, 1)) using (IplImage imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (IplImage imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (IplImage imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3)) { Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); CvBlobs blobs = new CvBlobs(); blobs.Label(imgBinary); foreach (KeyValuePair <int, CvBlob> item in blobs) { CvBlob b = item.Value; Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.Render(imgContour); CvContourPolygon polygon = cc.ConvertToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } } blobs.RenderBlobs(imgSrc, imgRender); using (new CvWindow("render", imgRender)) using (new CvWindow("contour", imgContour)) using (new CvWindow("polygon vertices", imgPolygon)) { Cv.WaitKey(0); } } }
/// <summary> /// Label the connected parts of a binary image. (cvLabel) /// </summary> /// <param name="img">Input binary image (depth=IPL_DEPTH_8U and num. channels=1).</param> /// <param name="blobs">List of blobs.</param> /// <returns>Number of pixels that has been labeled.</returns> public static int Label(Mat img, CvBlobs blobs) { if (img == null) throw new ArgumentNullException(nameof(img)); if (blobs == null) throw new ArgumentNullException(nameof(blobs)); return blobs.Label(img); }
//輪郭抽出して中心座標取得 Point GetCenterPointofLED(Mat grayImage) { OpenCvSharp.CPlusPlus.Point centerPoint = new OpenCvSharp.CPlusPlus.Point(); IplImage grayIpl = grayImage.ToIplImage().Clone(); IplImage calibIpl = new IplImage(grayIpl.Size, BitDepth.U8, 3); //中心の検出 CvBlobs blobs = new CvBlobs(); blobs.Label(grayIpl); //blobs.FilterByArea(20, 1500); CvBlob blob = blobs.LargestBlob(); try { if (blob != null) { centerPoint = new Point(blob.Centroid.X, blob.Centroid.Y); blobs.RenderBlobs(grayIpl, calibIpl); } }catch{ Console.WriteLine("eroor:counter"); } this.CalibrationImage = new Mat(calibIpl); Console.WriteLine(centerPoint); return centerPoint; }
// 別スレッド処理(キャプチャー) private void worker_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker bw = (BackgroundWorker)sender; Stopwatch sw = new Stopwatch(); string str; id = 0; //PID送信用UDP //バインドするローカルポート番号 // FSI_PID_DATA pid_data = new FSI_PID_DATA(); int localPort = mmFsiUdpPortMT3PV; System.Net.Sockets.UdpClient udpc2 = null;; /* try * { * udpc2 = new System.Net.Sockets.UdpClient(localPort); * * } * catch (Exception ex) * { * //匿名デリゲートで表示する * this.Invoke(new dlgSetString(ShowRText), new object[] { richTextBox1, ex.ToString() }); * } */ //videoInputオブジェクト const int DeviceID = 0; // 0; // 3 (pro), 4(piccolo) 7(DMK) const int CaptureFps = 30; // 30 int interval = (int)(1000 / CaptureFps / 10); const int CaptureWidth = 640; const int CaptureHeight = 480; // 画像保存枚数 int mmFsiPostRec = 60; int save_counter = mmFsiPostRec; using (VideoInput vi = new VideoInput()) { vi.SetIdealFramerate(DeviceID, CaptureFps); vi.SetupDevice(DeviceID, CaptureWidth, CaptureHeight); int width = vi.GetWidth(DeviceID); int height = vi.GetHeight(DeviceID); using (IplImage img = new IplImage(width, height, BitDepth.U8, 3)) using (IplImage img_dark8 = Cv.LoadImage(@"C:\piccolo\MT3V_dark.bmp", LoadMode.GrayScale)) //using (IplImage img_dark = new IplImage(width, height, BitDepth.U8, 3)) using (IplImage img_mono = new IplImage(width, height, BitDepth.U8, 1)) using (IplImage img2 = new IplImage(width, height, BitDepth.U8, 1)) // using (Bitmap bitmap = new Bitmap(width, height, PixelFormat.Format24bppRgb)) using (CvFont font = new CvFont(FontFace.HersheyComplex, 0.45, 0.45)) //using (CvWindow window0 = new CvWindow("FIFO0", WindowMode.AutoSize)) { //this.Size = new Size(width + 12, height + 148); double min_val, max_val; CvPoint min_loc, max_loc; int size = 15; int size2x = size / 2; int size2y = size / 2; int crop = 20; double sigma = 3; long elapsed0 = 0, elapsed1 = 0; double framerate0 = 0, framerate1 = 0; double alfa_fr = 0.99; sw.Start(); while (bw.CancellationPending == false) { if (vi.IsFrameNew(DeviceID)) { DateTime dn = DateTime.Now; //取得時刻 vi.GetPixels(DeviceID, img.ImageData, false, true); // 画面time表示 str = String.Format("Wide ID:{0:D2} ", id) + dn.ToString("yyyyMMdd_HHmmss_fff");// +String.Format(" ({0,000:F2},{1,000:F2}) ({2,000:0},{3,000:0})({4,0:F1})", gx, gy, max_loc.X, max_loc.Y, max_val); img.PutText(str, new CvPoint(10, 475), font, new CvColor(0, 100, 40)); Cv.CvtColor(img, img_mono, ColorConversion.BgrToGray); Cv.Sub(img_mono, img_dark8, imgdata.img); // dark減算 imgdata.id = ++id; imgdata.t = dn; imgdata.ImgSaveFlag = !(ImgSaveFlag != 0); //int->bool変換 if (fifo.Count == MaxFrame - 1) { fifo.EraseLast(); } fifo.InsertFirst(imgdata); #region 位置検出1//MinMaxLoc /*// 位置検出 * Cv.Smooth(imgdata.img, img2, SmoothType.Gaussian, size, 0, sigma, 0); * CvRect rect; * if (PvMode == MyDETECT) * { * rect = new CvRect( (int)(gx+0.5) - size, (int)(gy+0.5) - size, size*2, size*2); * Cv.SetImageROI(img2, rect); * Cv.MinMaxLoc(img2, out min_val, out max_val, out min_loc, out max_loc, null); * Cv.ResetImageROI(img2); * max_loc.X += (int)(gx + 0.5) - size; // 基準点が(1,1)のため+1 * max_loc.Y += (int)(gy + 0.5) - size; * } * else * { * rect = new CvRect(crop, crop, width - (crop + crop), height - (crop + crop)); * Cv.SetImageROI(img2, rect); * Cv.MinMaxLoc(img2, out min_val, out max_val, out min_loc, out max_loc, null); * Cv.ResetImageROI(img2); * max_loc.X += crop; // 基準点が(1,1)のため+1 * max_loc.Y += crop; * } * window0.ShowImage(img2); * * double m00, m10, m01; * size2x = size2y = size / 2; * if (max_loc.X - size2x < 0) size2x = max_loc.X; * if (max_loc.Y - size2y < 0) size2y = max_loc.Y; * if (max_loc.X + size2x >= width ) size2x = width -max_loc.X -1; * if (max_loc.Y + size2y >= height) size2y = height -max_loc.Y -1; * rect = new CvRect(max_loc.X - size2x, max_loc.Y - size2y, size, size); * CvMoments moments; * Cv.SetImageROI(img2, rect); * Cv.Moments(img2, out moments, false); * Cv.ResetImageROI(img2); * m00 = Cv.GetSpatialMoment(moments, 0, 0); * m10 = Cv.GetSpatialMoment(moments, 1, 0); * m01 = Cv.GetSpatialMoment(moments, 0, 1); * gx = max_loc.X - size2x + m10 / m00; * gy = max_loc.Y - size2y + m01 / m00; */ #endregion #region 位置検出2 //Blob Cv.Threshold(imgdata.img, img2, threshold_blob, 255, ThresholdType.Binary); //2ms blobs.Label(img2, imgLabel); //1.4ms max_label = blobs.GreaterBlob(); elapsed1 = sw.ElapsedTicks; //1.3ms if (blobs.Count > 1 && gx >= 0) { uint min_area = (uint)(threshold_min_area * blobs[max_label].Area); blobs.FilterByArea(min_area, uint.MaxValue); //0.001ms // 最適blobの選定(area大 かつ 前回からの距離小) double x = blobs[max_label].Centroid.X; double y = blobs[max_label].Centroid.Y; uint area = blobs[max_label].Area; //CvRect rect; distance_min = ((x - gx) * (x - gx) + (y - gy) * (y - gy)); //Math.Sqrt() foreach (var item in blobs) { //Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, item.Value.Centroid, item.Value.Area); x = item.Value.Centroid.X; y = item.Value.Centroid.Y; //rect = item.Value.Rect; distance = ((x - gx) * (x - gx) + (y - gy) * (y - gy)); //将来はマハラノビス距離 if (distance < distance_min) { d_val = (item.Value.Area) / max_area; if (distance <= 25) //近距離(5pix) { if (d_val >= 0.4) //&& d_val <= 1.2) { max_label = item.Key; distance_min = distance; } } else { if (d_val >= 0.8 && d_val <= 1.5) { max_label = item.Key; distance_min = distance; } } } //w.WriteLine("{0} {1} {2} {3} {4}", dis, dv, i, item.Key, item.Value.Area); } //gx = x; gy = y; max_val = area; } if (max_label > 0) { maxBlob = blobs[max_label]; max_centroid = maxBlob.Centroid; gx = max_centroid.X; gy = max_centroid.Y; max_area = maxBlob.Area; if (this.States == SAVE) { Pid_Data_Send(); timerSavePostTime.Stop(); timerSaveMainTime.Stop(); timerSaveMainTime.Start(); } } else { gx = gy = 0; max_area = 0; } #endregion // 画面表示 str = String.Format("ID:{0:D2} ", id) + dn.ToString("yyyyMMdd_HHmmss_fff") + String.Format(" ({0,000:F2},{1,000:F2}) ({2,000:0},{3,000:0})({4,0:F1})", gx, gy, xoa, yoa, max_area); if (imgdata.ImgSaveFlag) { str += " True"; } img.PutText(str, new CvPoint(10, 20), font, new CvColor(0, 255, 100)); img.Circle(new CvPoint((int)gx, (int)gy), 10, new CvColor(255, 255, 100)); bw.ReportProgress(0, img); // 処理速度 elapsed0 = sw.ElapsedTicks - elapsed1; // 1frameのticks elapsed1 = sw.ElapsedTicks; framerate0 = alfa_fr * framerate1 + (1 - alfa_fr) * (Stopwatch.Frequency / (double)elapsed0); framerate1 = framerate0; str = String.Format("fr time = {0}({1}){2:F1}", sw.Elapsed, id, framerate0); //," ", sw.ElapsedMilliseconds); //匿名デリゲートで現在の時間をラベルに表示する this.Invoke(new dlgSetString(ShowText), new object[] { textBox1, str }); //img.ToBitmap(bitmap); //pictureBox1.Refresh(); } Application.DoEvents(); Thread.Sleep(interval); } this.States = STOP; this.Invoke(new dlgSetColor(SetColor), new object[] { ObsStart, this.States }); this.Invoke(new dlgSetColor(SetColor), new object[] { ObsEndButton, this.States }); vi.StopDevice(DeviceID); //udpc2.Close(); } } }
/// <summary> /// Label the connected parts of a binary image. (cvLabel) /// </summary> /// <param name="img">Input binary image (depth=IPL_DEPTH_8U and num. channels=1).</param> /// <param name="blobs">List of blobs.</param> /// <returns>Number of pixels that has been labeled.</returns> public static int Label(IplImage img, CvBlobs blobs) { if (img == null) throw new ArgumentNullException("img"); if (blobs == null) throw new ArgumentNullException("blobs"); return blobs.Label(img); }
/// <summary> /// Takes blobs information based on colors in <see cref="hsv"/> list and then sends the info through UDP. /// </summary> /// <param name="sourceImage">Image in Mat format.</param> /// <returns>Image in Mat format.</returns> private Mat Renderer(Mat sourceImage) { Mat dstNoisy = src; Mat dstClear = new Mat(); Mat dst = new Mat(); Mat element = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(2 * MorphValue + 1, 2 * MorphValue + 1)); Cv2.Blur(dstNoisy, dstClear, new Size(9, 9)); Cv2.CvtColor(dstClear, dst, ColorConversionCodes.BGR2HSV); // Convert BGR to HSV. Mat dstThreshed = new Mat(); Mat dstPreview = new Mat(); if (hsv.Count > 0) { int blobCount = 1; bool theFirst = true; foreach (int[] scal in hsv) { if (theFirst) { Cv2.InRange(dst, new Scalar(scal[0] - 10, scal[1], scal[3]), new Scalar(scal[0] + 10, scal[2], scal[4]), dstPreview); theFirst = false; } else { Mat dstPreview2 = new Mat(); Cv2.InRange(dst, new Scalar(scal[0] - 10, scal[1], scal[3]), new Scalar(scal[0] + 10, scal[2], scal[4]), dstPreview2); Cv2.AddWeighted(dstThreshed, 1.0, dstPreview2, 1.0, 0.0, dstPreview); } Cv2.InRange(dst, new Scalar(scal[0] - 10, scal[1], scal[3]), new Scalar(scal[0] + 10, scal[2], scal[4]), dstThreshed); // Morphologic transformation to close the gaps inside the blob. Cv2.MorphologyEx(src: dstThreshed, dst: dstThreshed, op: MorphTypes.Close, element: element ); blobDetection.Label(dstThreshed); blobDetection.FilterByArea(MinBlobArea, MaxBlobArea); blobDetection.RenderBlobs(dstThreshed, src); CircleSegment[] circles = Cv2.HoughCircles(dstThreshed, HoughMethods.Gradient, 1, dstThreshed.Rows / 8); // Creates all udp datagrams---------------------------------------------------- if (blobDetection.Count != 0) { for (int i = 0; i < blobDetection.Count; i++) { int processKey = blobDetection.ElementAt(i).Key; udpDatagram_1 = "[$]tracking|id=" + data_id + "|label=" + blobCount + "|[$$]" + deviceName + ",[$$$]mesh,sample,"; for (int j = 0; j < blobDetection[processKey].Contour.ConvertToPolygon().Simplify(1).Count; j++) { if (orientation) { udpDatagram_1 += Math.Round((float)blobDetection[processKey].Contour.ConvertToPolygon().Simplify(1).ElementAt(j).X / dst.Cols, 4).ToString().Replace(',', '.'); udpDatagram_1 += "," + (1 - Math.Round((float)blobDetection[processKey].Contour.ConvertToPolygon().Simplify(1).ElementAt(j).Y / dst.Rows, 4)).ToString().Replace(',', '.'); udpDatagram_1 += ","; } else { udpDatagram_1 += (1 - Math.Round((float)blobDetection[processKey].Contour.ConvertToPolygon().Simplify(1).ElementAt(j).X / dst.Cols, 4)).ToString().Replace(',', '.'); udpDatagram_1 += "," + Math.Round((float)blobDetection[processKey].Contour.ConvertToPolygon().Simplify(1).ElementAt(j).Y / dst.Rows, 4).ToString().Replace(',', '.'); udpDatagram_1 += ","; } } udpDatagram_1 += ";"; udpDatagram_2 = "[$]tracking|id=" + data_id + "|label=" + blobCount + "|[$$]" + deviceName + ",[$$$]area,"; udpDatagram_2 += "value," + blobDetection[processKey].Contour.ConvertToPolygon().Simplify(1).Area().ToString().Replace(',', '.') + ";"; udpDatagram_3 = "[$]tracking|id=" + data_id + "|label=" + blobCount + "|[$$]" + deviceName + ",[$$$]place,"; if (orientation) { udpDatagram_3 += "position," + (Math.Round(blobDetection[processKey].Centroid.X / dst.Cols, 3)).ToString().Replace(',', '.') + "," + (Math.Round(1 - (blobDetection[processKey].Centroid.Y / dst.Rows), 3)).ToString().Replace(',', '.') + ";"; } else { udpDatagram_3 += "position," + (Math.Round(1 - (blobDetection[processKey].Centroid.X / dst.Cols), 3)).ToString().Replace(',', '.') + "," + (Math.Round(blobDetection[processKey].Centroid.Y / dst.Rows, 3)).ToString().Replace(',', '.') + ";"; } udpDatagram_4 = "[$]tracking|id=" + data_id + "|label=" + blobCount + "|[$$]" + deviceName + ",[$$$]color,"; udpDatagram_4 += "hsv," + scal[0] + "-" + (scal[1] + scal[2]) / 2 + "-" + (scal[3] + scal[4]) / 2 + ";"; //Geometry udpDatagram_5 = "[$]tracking|id=" + data_id + "|label=" + blobCount + "|[$$]" + deviceName + ",[$$$]form,geometry,"; CvContourPolygon poly = blobDetection[processKey].Contour.ConvertToPolygon(); double epsilon = 0.04 * Cv2.ArcLength(poly, true); Point[] counterResult = Cv2.ApproxPolyDP(poly, epsilon, closed: true); int contourSimple_counter = counterResult.Length; string geometry = ""; switch (contourSimple_counter) { case 3: geometry = "triangle"; break; case 4: Rect rect = Cv2.BoundingRect(poly); float aspectRatio = 0; if (rect.Y != 0) { aspectRatio = rect.X / rect.Y; } if (aspectRatio >= 0.95 && aspectRatio <= 1.05) { geometry = "square"; } else { geometry = "rectangle"; } break; default: geometry = "unidentified" + contourSimple_counter; break; } udpDatagram_5 += geometry + ";"; if (BlobLabel) { Cv2.PutText(src, geometry, blobDetection[processKey].Centroid, HersheyFonts.HersheySimplex, 0.5, new Scalar(0, 255, 0), 2); Cv2.PutText(src, "[" + scal[0] + ", " + ((scal[1] + scal[2]) / 2) + ", " + ((scal[3] + scal[4]) / 2) + "]", new Point(blobDetection[processKey].Centroid.X, blobDetection[processKey].Centroid.Y + 20), HersheyFonts.HersheySimplex, 0.45, new Scalar(0, 255, 0), 2); } udpDatagram_6 = "[$]tracking|id=" + data_id + "|label=" + blobCount + "|[$$]" + deviceName + ",[$$$]perimeter,value,"; udpDatagram_6 += blobDetection[processKey].Contour.Perimeter().ToString().Replace(',', '.') + ";"; // UDP sender--------------------------------------------------------------------- try { byte[] sendBytes_1 = Encoding.ASCII.GetBytes(udpDatagram_1); byte[] sendBytes_2 = Encoding.ASCII.GetBytes(udpDatagram_2); byte[] sendBytes_3 = Encoding.ASCII.GetBytes(udpDatagram_3); byte[] sendBytes_4 = Encoding.ASCII.GetBytes(udpDatagram_4); byte[] sendBytes_5 = Encoding.ASCII.GetBytes(udpDatagram_5); byte[] sendBytes_6 = Encoding.ASCII.GetBytes(udpDatagram_6); udpClient.Send(sendBytes_1, sendBytes_1.Length, IP_udp, Port_udp); udpClient.Send(sendBytes_2, sendBytes_2.Length, IP_udp, Port_udp); udpClient.Send(sendBytes_3, sendBytes_3.Length, IP_udp, Port_udp); udpClient.Send(sendBytes_4, sendBytes_4.Length, IP_udp, Port_udp); udpClient.Send(sendBytes_5, sendBytes_5.Length, IP_udp, Port_udp); udpClient.Send(sendBytes_6, sendBytes_6.Length, IP_udp, Port_udp); } catch (Exception e) { Console.WriteLine(e.ToString()); } udpDatagram_1 = ""; udpDatagram_2 = ""; udpDatagram_3 = ""; udpDatagram_4 = ""; blobCount++; } } } blobCount = 1; } // Same morphologic transformation but this time for the output image. Cv2.MorphologyEx(src: dstPreview, dst: dstPreview, op: MorphTypes.Close, element: element ); return(dstPreview); }
/// <summary> /// ラベリングにより最大の面積の領域を残す /// </summary> /// <param name="imgSrc"></param> /// <param name="imgRender"></param> private void FilterByMaximalBlob(IplImage imgSrc, IplImage imgDst) { CvBlobs blobs = new CvBlobs(); imgDst.Zero(); blobs.Label(imgSrc); CvBlob max = blobs.GreaterBlob(); if (max == null) return; blobs.FilterByArea(max.Area, max.Area); blobs.FilterLabels(imgDst); }
private void OnFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { double utcTime = (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { if (colorFrame != null) { colorFrame.CopyConvertedFrameDataToArray(this.colorArray, ColorImageFormat.Bgra); // System.Buffer.BlockCopy(this.colorArray, 0, this.byteColorArray,0,(this.kinect.ColorFrameSource.FrameDescription.Height * this.kinect.ColorFrameSource.FrameDescription.Width * BYTES_PER_COLOR_PIXEL)); // System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, this.byteColorArray, (this.kinect.ColorFrameSource.FrameDescription.Height * this.kinect.ColorFrameSource.FrameDescription.Width * BYTES_PER_COLOR_PIXEL), sizeof(double)); // this.colorConnector.Broadcast(this.byteColorArray); FrameDescription colorFrameDescription = colorFrame.FrameDescription; using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { this.colorBitmap.Lock(); // verify data and write the new color frame data to the display bitmap if ((colorFrameDescription.Width == this.colorBitmap.PixelWidth) && (colorFrameDescription.Height == this.colorBitmap.PixelHeight)) { colorFrame.CopyConvertedFrameDataToIntPtr( this.colorBitmap.BackBuffer, (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4), ColorImageFormat.Bgra); this.colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight)); } this.colorBitmap.Unlock(); } IplImage imgSrc = this.colorBitmap.ToIplImage(); // Source, BGR image. IplImage imgGray = new IplImage(imgSrc.Size, BitDepth.U8, 1); // Binary image that has the blobs. IplImage imghsv = new IplImage(imgSrc.Size, BitDepth.U8, 3); // HSV image for thresholding. Cv.CvtColor(imgSrc, imghsv, ColorConversion.BgrToHsv); Cv.InRange(imghsv, lower, upper, imgGray); CvBlobs blobs = new CvBlobs(); blobs.Label(imgGray); int min_area = 1500; foreach (KeyValuePair <int, CvBlob> item in blobs) { int label = item.Key; CvBlob blob = item.Value; blob.CalcCentroid(); int val = blob.Area; if (val > min_area) { min_area = val; float x = (float)blob.Centroid.X; float y = (float)blob.Centroid.Y; target_x = (int)x; target_y = (int)y; } // Debug.Print( "Coordinates" + (blob.Centroid.ToString()) +"Area" + val.ToString()); } /* * IplImage render = new IplImage(imgSrc.Size, BitDepth.U8, 3); * blobs.RenderBlobs(imgSrc, render); * * using (new CvWindow("Orange Blob Detection", WindowMode.AutoSize, render)) * { * CvWindow.WaitKey(0); * } */ } if (depthFrame != null) { // Debug.Print("HELLO"); depthFrame.CopyFrameDataToArray(this.depthArray); System.Buffer.BlockCopy(this.depthArray, 0, this.byteDepthArray, 0, this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width * BYTES_PER_DEPTH_PIXEL); System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, this.byteDepthArray, this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width * BYTES_PER_DEPTH_PIXEL, sizeof(double)); // Console.WriteLine("depth "+ this.byteDepthArray.Length); calculateScanFromDepth(this.depthArray); System.Buffer.BlockCopy(this.scan2DArray, 0, this.byteScan2DArray, 0, 6 * this.kinect.DepthFrameSource.FrameDescription.Width * 4 + 12); // System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, this.byteScan2DArray, this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width * 4, sizeof(double)); this.scan2DConnector.Broadcast(this.byteScan2DArray); this.depthConnector.Broadcast(this.byteDepthArray); } } } /*using (InfraredFrame irFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame()) * { * if (irFrame != null) * { * irFrame.CopyFrameDataToArray(this.irArray); * System.Buffer.BlockCopy(this.irArray, 0, this.byteIRArray, 0, this.kinect.InfraredFrameSource.FrameDescription.Height * this.kinect.InfraredFrameSource.FrameDescription.Width * BYTES_PER_IR_PIXEL); * System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, this.byteIRArray, this.kinect.InfraredFrameSource.FrameDescription.Height * this.kinect.InfraredFrameSource.FrameDescription.Width * BYTES_PER_IR_PIXEL, sizeof(double)); * this.irConnector.Broadcast(this.byteIRArray); * } * } * * using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) * { * if (bodyFrame != null) * { * bodyFrame.GetAndRefreshBodyData(this.bodyArray); * string jsonString = JsonConvert.SerializeObject(this.bodyArray); * int diff = 28000 - jsonString.Length; * for (int i = 0; i < diff;i++ ) * { * jsonString += " "; * } * byte[] bodyByteArray = new byte[jsonString.Length*sizeof(char) + sizeof(double)]; * System.Buffer.BlockCopy(jsonString.ToCharArray(), 0, bodyByteArray, 0, jsonString.Length * sizeof(char)); * System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, bodyByteArray, jsonString.Length * sizeof(char),sizeof(double)); * this.bodyConnector.Broadcast(bodyByteArray); * } * }*/ }
private static void Main(string[] args) { //new ConvexityDefect(); using (IplImage imgSrc = new IplImage(@"img\shapes.png", LoadMode.Color)) using (IplImage imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (IplImage imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (IplImage imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (IplImage imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3)) { Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); CvBlobs blobs = new CvBlobs(); blobs.Label(imgBinary); foreach (KeyValuePair<int, CvBlob> item in blobs) { CvBlob b = item.Value; Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.Render(imgContour); double perimeter = cc.Perimeter(); CvContourPolygon polygon = cc.ConvertToPolygon(); foreach (CvPoint p in polygon) { //imgPolygon.Rectangle(p.X, p.Y, p.X + 1, p.Y + 1, CvColor.Red); } CvContourPolygon convexHull = polygon.ContourConvexHull(); //imgPolygon.Zero(); convexHull.Render(imgPolygon); } blobs.RenderBlobs(imgSrc, imgRender); using (new CvWindow("src", imgSrc)) using (new CvWindow("binary", imgBinary)) using (new CvWindow("render", imgRender)) using (new CvWindow("contour", imgContour)) using (new CvWindow("polygon vertices", imgPolygon)) { Cv.WaitKey(0); } } }