public IplImage BlobContourImage(IplImage src) { blobcontour = new IplImage(src.Size, BitDepth.U8, 3); bin = this.Binary(src, 50); CvBlobs blobs = new CvBlobs(); blobs.Label(bin); foreach (KeyValuePair <int, CvBlob> item in blobs) { CvBlob b = item.Value; CvContourChainCode cc = b.Contour; cc.Render(blobcontour); CvContourPolygon ex_polygon = cc.ConvertToPolygon(); foreach (CvPoint p in ex_polygon) { Cv.DrawCircle(blobcontour, p, 1, CvColor.Blue, -1); } for (int i = 0; i < b.InternalContours.Count; i++) { CvContourPolygon in_polygon = b.InternalContours[i].ConvertToPolygon(); foreach (CvPoint p in in_polygon) { Cv.DrawCircle(blobcontour, p, 1, CvColor.Red, -1); } } } return(blobcontour); }
//輪郭抽出して中心座標取得 Point GetCenterPointofLED(Mat grayImage) { OpenCvSharp.CPlusPlus.Point centerPoint = new OpenCvSharp.CPlusPlus.Point(); IplImage grayIpl = grayImage.ToIplImage().Clone(); IplImage calibIpl = new IplImage(grayIpl.Size, BitDepth.U8, 3); //中心の検出 CvBlobs blobs = new CvBlobs(); blobs.Label(grayIpl); //blobs.FilterByArea(20, 1500); CvBlob blob = blobs.LargestBlob(); try { if (blob != null) { centerPoint = new Point(blob.Centroid.X, blob.Centroid.Y); blobs.RenderBlobs(grayIpl, calibIpl); } }catch { Console.WriteLine("eroor:counter"); } this.CalibrationImage = new Mat(calibIpl); Console.WriteLine(centerPoint); return(centerPoint); }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Image <Bgr, Byte> smoothedFrame = new Image <Bgr, byte>(frame.Size); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); foreach (var pair in blobs) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); //CvInvoke.PutText(frame, blob.ID.ToString(), Point.Round(blob.Center), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); } imageBox1.Image = frame; imageBox2.Image = forgroundMask; }
public void GetBody(Image <Bgr, Byte> frame, out PointF centroid) { using (Image <Gray, Byte> origGray = frame.Convert <Gray, Byte>()) using (Image <Gray, Byte> filteredImage = origGray.SmoothMedian(13)) using (Image <Gray, Byte> binary = filteredImage.ThresholdBinary(new Gray(ThresholdValue), new Gray(255))) using (Image <Gray, Byte> backgroundNot = BinaryBackground.Not()) using (Image <Gray, Byte> finalImage = binary.Add(backgroundNot)) using (Image <Gray, Byte> subbed = finalImage.Not()) { centroid = PointF.Empty; CvBlobs blobs = new CvBlobs(); BlobDetector.Detect(subbed, blobs); CvBlob mouseBlob = null; double maxArea = -1; foreach (var blob in blobs.Values) { if (blob.Area > maxArea) { mouseBlob = blob; maxArea = blob.Area; } } if (mouseBlob != null) { centroid = mouseBlob.Centroid; } } }
public void FindBody(Image <Gray, Byte> filteredImage, out double waistLength, out double waistVolume, out double waistVolume2, out double waistVolume3, out double waistVolume4, out PointF centroid) { using (Image <Gray, Byte> binary = filteredImage.ThresholdBinary(new Gray(ThresholdValue), new Gray(255))) using (Image <Gray, Byte> backgroundNot = BinaryBackground.Not()) using (Image <Gray, Byte> finalImage = binary.Add(backgroundNot)) using (Image <Gray, Byte> subbed = finalImage.Not()) { CvBlobs blobs = new CvBlobs(); BlobDetector.Detect(subbed, blobs); CvBlob mouseBlob = null; double maxArea = -1; foreach (var blob in blobs.Values) { if (blob.Area > maxArea) { mouseBlob = blob; maxArea = blob.Area; } } double gapDistance = 50; RBSK.Settings.GapDistance = gapDistance; centroid = mouseBlob.Centroid; waistLength = -1; waistVolume = -1; waistVolume2 = -1; waistVolume3 = -1; waistVolume4 = -1; } }
private void Application_Idle(object sender, EventArgs e) { Mat a = cap.QueryFrame(); if (a != null) { System.Threading.Thread.Sleep((int)(1000.0 / fps - 5)); imageBox1.Image = a; GC.Collect(); } // 讀取彩色影像 Image <Bgr, byte> image01 = new Image <Bgr, byte>(imageBox1.Image.Bitmap); imageBox1.Image = image01; var image2 = image01.InRange(new Bgr(10, 1, 13), new Bgr(76, 144, 240)); var image2_not = image2.Not(); using (CvBlobs blobs = new CvBlobs()) { blobDetector.Detect(image2, blobs); var image3 = image01.Copy(); foreach (var pair in blobs) { CvBlob b = pair.Value; CvInvoke.Rectangle(image3, b.BoundingBox, new MCvScalar(255.255, 255, 0), 5); } imageBox1.Image = image01; imageBox2.Image = image2; imageBox3.Image = image3; imageBox4.Image = image2_not; } }
static void Main(string[] args) { CvScalar hsv_min = Cv.RGB(150, 70, 70); // 抽出するHSV色領域の下限 CvScalar hsv_max = Cv.RGB(360, 255, 255); // 抽出するHSV色領域の上限 var cap = Cv.CreateCameraCapture(0); // カメラのキャプチャ IplImage im = new IplImage(); // カメラ画像(フレーム)格納用 IplImage hsv = Cv.CreateImage(new CvSize(640, 480), BitDepth.U8, 3); // HSV画像格納用 IplImage mask = Cv.CreateImage(new CvSize(640, 480), BitDepth.U8, 1); // マスク画像格納用 while (Cv.WaitKey(1) == -1) // 任意のキーが入力されるまでカメラ映像を表示 { im = Cv.QueryFrame(cap); // カメラからフレーム(画像)を取得 Cv.CvtColor(im, hsv, ColorConversion.BgrToHsv); // RGB色空間からHSV色空間に変換 Cv.InRangeS(hsv, hsv_min, hsv_max, mask); // 指定した範囲内の色抽出(マスクの作成) Cv.Dilate(mask, mask, null, 1); // 膨張処理 Cv.Erode(mask, mask, null, 1); // 収縮処理 Cv.Erode(mask, mask, null, 1); // 収縮処理 Cv.Dilate(mask, mask, null, 1); // 膨張処理 // エラー処理(マスクに白領域が全くないとラベリング処理でエラー) Cv.Ellipse(mask, new CvPoint(0, 0), new CvSize(1, 1), 0, 0, 360, CvColor.White, -1); CvBlobs blobs = new CvBlobs(mask); // マスク画像のラベリング処理 CvBlob maxBlob = blobs.LargestBlob(); // 面積が最大のラベルを抽出 CvPoint pt = maxBlob.Centroid; // 面積が最大のラベルの重心座標を取得 // 重心点に十字線を描く Cv.Line(im, new CvPoint(pt.X, pt.Y - 50), new CvPoint(pt.X, pt.Y + 50), new CvColor(0, 255, 0), 5); Cv.Line(im, new CvPoint(pt.X - 50, pt.Y), new CvPoint(pt.X + 50, pt.Y), new CvColor(0, 255, 0), 5); Cv.ShowImage("Frame", im); // 画面にフレームを表示 Cv.ShowImage("Mask", mask); // 画面にマスク画像を表示 } }
private Rectangle GetBlueGameBoxRegion(Rectangle rcGame, Image <Gray, Byte> imgGray) { byte[,,] pData = imgGray.Data; int nWid = imgGray.Width; int nHei = imgGray.Height; for (int y = 0; y < nHei; y++) { for (int x = 0; x < nWid; x++) { if (!rcGame.Contains(x, y)) { pData[y, x, 0] = 0; continue; } byte c = pData[y, x, 0]; if (c >= 100 && c <= 120) { pData[y, x, 0] = 255; } else { pData[y, x, 0] = 0; } } } CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(imgGray, blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); if (blobs.Count < 1) { return(Rectangle.Empty); } //------------------------------- Rectangle rc = Rectangle.Empty; int nSizeMax = 0; foreach (var pair in blobs) { CvBlob b = pair.Value; if (b.BoundingBox.Width * b.BoundingBox.Height > nSizeMax) { rc = b.BoundingBox; nSizeMax = rc.Width * rc.Height; } //break; } return(rc); }
/// <summary> /// 最適blobの選定 /// </summary> /// <remarks> /// 最適blobの選定(areaの大きさと前回からの距離) /// </remarks> public int mesure(CvBlobs blobs) { if (blobs.Count == 0) { return(0); } CvPoint2D64f pos_ans = new CvPoint2D64f(-1, -1); CvBlob maxBlob = blobs.LargestBlob(); int max_label = blobs.GreaterBlob().Label; if (blobs.Count == 0) { return(0); } pos_ans = maxBlob.Centroid; distance0 = Cal_distance_const(distance_pre); if (blobs.Count > 1) { // 最適blobの選定 double eval, eval_max = 0; foreach (var item in blobs) { eval = position_mesure.Cal_Evaluate(item.Value.Centroid, item.Value.Area, pos_pre, distance0); if (eval > eval_max) { eval_max = eval; max_label = item.Key; pos_ans = item.Value.Centroid; Console.WriteLine("{0} | Centroid:{1} Area:{2} eval:{3}", item.Key, item.Value.Centroid, item.Value.Area, eval); //w.WriteLine("{0} {1} {2} {3} {4}", dis, dv, i, item.Key, item.Value.Area); } //sw.Stop(); t5 = 1000.0 * sw.ElapsedTicks / Stopwatch.Frequency; sw.Reset(); sw.Start(); Console.WriteLine(" pos_ans:{0}", pos_ans); } } double dis = Cal_distance(pos_ans, pos_pre); if (distance_pre > dis) { distance_pre = (1 - dist_alpha) * distance_pre + dist_alpha * dis; } else { distance_pre = dis; } pos_pre = pos_ans; return(max_label); }
//returns 0,0,0,0 when there is no object in the image returns the bounding box of the object otherwise public Rectangle getBoundingBox(Image <Gray, Byte> img) { CvBlobs blobs = GetBlobs(img); if (blobs.Count > 0) { CvBlob blob = GetLargest(blobs); if (blob.Area >= 100) { return(blob.BoundingBox); } } return(new Rectangle(0, 0, 0, 0)); }
/// <summary> /// Gets the largest blob /// </summary> /// <param name="imgSrc"></param> /// <param name="imgRender"></param> private void FilterByMaximumBlob(IplImage imgSrc, IplImage imgDst) { CvBlobs blobs = new CvBlobs(); imgDst.Zero(); blobs.Label(imgSrc); CvBlob max = blobs.GreaterBlob(); if (max == null) { return; } blobs.FilterByArea(max.Area, max.Area); blobs.FilterLabels(imgDst); }
private static CvBlob GetLargest(CvBlobs blobs) { CvBlob mb = null; int max = 0; foreach (CvBlob blob in blobs.Values) { int size = blob.Area; if (size > max) { max = size; mb = blob; } } return(mb); }
// Update is called once per frame void Update() { IplImage frame = Cv.QueryFrame(capture); imgBinary = new IplImage(frame.Size, BitDepth.U8, 1); imgLabel = new IplImage(frame.Size, BitDepth.F32, 1); imgRender = new IplImage(frame.Size, BitDepth.U8, 3); imgContour = new IplImage(frame.Size, BitDepth.U8, 3); imgPolygon = new IplImage(frame.Size, BitDepth.U8, 3); Color[] cols = new Color[texture.width * texture.height]; Cv.CvtColor(frame, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); CvBlobs blobs = new CvBlobs(); uint result = blobs.Label(imgBinary, imgLabel); foreach (KeyValuePair <uint, CvBlob> item in blobs) { CvBlob b = item.Value; //Console.WriteLine ("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.RenderContourChainCode(imgContour); CvContourPolygon polygon = cc.ConvertChainCodesToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } } blobs.RenderBlobs(imgLabel, frame, imgRender); for (int y = 0; y < texture.height; y++) { for (int x = 0; x < texture.width; x++) { CvColor col = imgRender.Get2D(y, x); cols[y * texture.width + x] = new Color(col.R / 255.0f, col.G / 255.0f, col.B / 255.0f, 1.0f); } } // int t2 = System.Environment.TickCount; texture.SetPixels(cols); //int t3 = System.Environment.TickCount; //Debug.Log("t2-t1=" + (t2 - t1) + " t3-t2=" + (t3 - t2)); texture.Apply(); }
public Blob() { using (var imgSrc = new IplImage(FilePath.Image.Shapes, LoadMode.Color)) using (var imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (var imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (var imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (var imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3)) { Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); CvBlobs blobs = new CvBlobs(); blobs.Label(imgBinary); foreach (KeyValuePair <int, CvBlob> item in blobs) { CvBlob b = item.Value; Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.Render(imgContour); CvContourPolygon polygon = cc.ConvertToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } /* * CvPoint2D32f circleCenter; * float circleRadius; * GetEnclosingCircle(polygon, out circleCenter, out circleRadius); * imgPolygon.Circle(circleCenter, (int) circleRadius, CvColor.Green, 2); */ } blobs.RenderBlobs(imgSrc, imgRender); using (new CvWindow("render", imgRender)) using (new CvWindow("contour", imgContour)) using (new CvWindow("polygon vertices", imgPolygon)) { Cv.WaitKey(0); } } }
public static IplImage test(IplImage target) { CvBlobs blobs = new CvBlobs(); IplImage lableImg = new IplImage(target.Size, CvBlobLib.DepthLabel, 1); IplImage retImg = new IplImage(target.Size, BitDepth.U8, 1); blobs.Label(target); CvBlob max = blobs.GreaterBlob(); if (max == null) { return(target); } blobs.FilterByArea(max.Area, max.Area); blobs.FilterLabels(retImg); return(retImg); }
private Rectangle GetBlackGameBoxRegion(Image <Gray, Byte> imgGray) { int nWid = imgGray.Width; int nHei = imgGray.Height; byte[,,] pData = imgGray.Data; for (int y = 0; y < nHei; y++) { for (int x = 0; x < nWid; x++) { byte c = pData[y, x, 0]; if (c > 5) { pData[y, x, 0] = 0; } else { pData[y, x, 0] = 255; } } } CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(imgGray, blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); if (blobs.Count < 1) { return(Rectangle.Empty); } //------------------------------- Rectangle rc = Rectangle.Empty; foreach (var pair in blobs) { CvBlob b = pair.Value; rc = b.BoundingBox; //CvInvoke.Rectangle(imgBgr, b.BoundingBox, new MCvScalar(255.0, 0, 0), 2); break; } return(rc); }
public IplImage BlobImage(IplImage src) { blob = new IplImage(src.Size, BitDepth.U8, 3); bin = this.Binary(src, 50); CvBlobs blobs = new CvBlobs(); blobs.Label(bin); blobs.RenderBlobs(src, blob); foreach (KeyValuePair <int, CvBlob> item in blobs) { CvBlob b = item.Value; Cv.PutText(blob, Convert.ToString(b.Label), b.Centroid, new CvFont(FontFace.HersheyComplex, 1, 1), CvColor.Red); } return(blob); }
public Blob() { using (IplImage imgSrc = new IplImage(Const.ImageShapes, LoadMode.Color)) using (IplImage imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (IplImage imgLabel = new IplImage(imgSrc.Size, BitDepth.F32, 1)) using (IplImage imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (IplImage imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (IplImage imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3)) { Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); CvBlobs blobs = new CvBlobs(); blobs.Label(imgBinary); foreach (KeyValuePair <int, CvBlob> item in blobs) { CvBlob b = item.Value; Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.Render(imgContour); CvContourPolygon polygon = cc.ConvertToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } } blobs.RenderBlobs(imgSrc, imgRender); using (new CvWindow("render", imgRender)) using (new CvWindow("contour", imgContour)) using (new CvWindow("polygon vertices", imgPolygon)) { Cv.WaitKey(0); } } }
private void InitVariables() { camCount = camsColor32.Length; detectRoutines = new Coroutine[camCount]; detecting = new bool[camCount]; newblobs = new bool[camCount]; blobspercams = new int[camCount]; debugTex = new Texture2D[camCount]; blobs = new CvBlob[camCount][]; blobsPositionNormal = new Vector2[camCount][]; for (int i = 0; i < blobs.Length; i++) { blobs[i] = new CvBlob[CVParams.maxBlobCount]; blobsPositionNormal[i] = new Vector2[CVParams.maxBlobCount]; } if (debug) { for (int i = 0; i < debuggerMat.Length; i++) { debugTex[i] = new Texture2D((int)WCHandle.WebcamDimensions[i].x, (int)WCHandle.WebcamDimensions[i].y, UnityEngine.Experimental.Rendering.DefaultFormat.LDR, UnityEngine.Experimental.Rendering.TextureCreationFlags.None); debuggerMat[i].SetTexture("_BaseMap", debugTex[i]); } } }
public void BlobDetect() { BlobList.Clear(); CvBlobs blobs = new CvBlobs(); CvBlobDetector _blobDetector = new CvBlobDetector(); _blobDetector.Detect(MatBinary.ToImage <Gray, byte>(), blobs); foreach (KeyValuePair <uint, CvBlob> item in blobs) { CvBlob b = item.Value; BlobInfo blobinfo = new BlobInfo { Area = b.Area, Width = b.BoundingBox.Right - b.BoundingBox.Left, Height = b.BoundingBox.Bottom - b.BoundingBox.Top, }; blobinfo.Rect.Location = new Point(ROI.Left + b.BoundingBox.Left, ROI.Top + b.BoundingBox.Top); blobinfo.Rect.Size = new Size(blobinfo.Width, blobinfo.Height); BlobList.Add(blobinfo); } }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); capture.Retrieve(frame, 0); Mat grayFrame = new Mat(); CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray); //Mat smallGrayFrame = new Mat(); //CvInvoke.PyrDown(grayFrame, smallGrayFrame); //Mat smoothedGrayFrame = new Mat(); //CvInvoke.PyrUp(smallGrayFrame, smoothedGrayFrame); //Image<Gray, Byte> smallGrayFrame = grayFrame.PyrDown(); //Image<Gray, Byte> smoothedGrayFrame = smallGrayFrame.PyrUp(); //Mat cannyFrame = new Mat(); //CvInvoke.Canny(smoothedGrayFrame, cannyFrame, 100, 60); //Image<Gray, Byte> cannyFrame = smoothedGrayFrame.Canny(100, 60); Image <Bgra, Byte> _frame = frame.ToImage <Bgra, Byte>(); Image <Gray, Byte> _grayFrame = grayFrame.ToImage <Gray, Byte>(); Image <Gray, Byte>[] rgb_frame = _frame.Split(); //components of rgb image Image <Gray, Byte> red_com = rgb_frame[2] - _grayFrame; var red_bi = red_com.Convert <Gray, byte>().ThresholdBinary(new Gray(redThres), new Gray(255)); Image <Gray, Byte> blue_com = rgb_frame[0] - _grayFrame; var blue_bi = blue_com.Convert <Gray, byte>().ThresholdBinary(new Gray(blueThres), new Gray(255)); Image <Gray, Byte> green_com = rgb_frame[1] - _grayFrame; var green_bi = green_com.Convert <Gray, byte>().ThresholdBinary(new Gray(greenThres), new Gray(255)); //System.Windows.Forms.MessageBox.Show(""); /////////////////////////////////////////////////////////////////////////////////// //Blob detection //Red Blob detection Image <Bgr, Byte> smoothedFrame_r = new Image <Bgr, byte>(red_com.Size); CvInvoke.GaussianBlur(red_bi, smoothedFrame_r, new Size(3, 3), 1); //filter out noises Mat forgroundMask_r = new Mat(); fgDetector.Apply(smoothedFrame_r, forgroundMask_r); CvBlobs blobs_r = new CvBlobs(); blobDetector.Detect(forgroundMask_r.ToImage <Gray, byte>(), blobs_r); blobs_r.FilterByArea(minarea, maxarea); //blue Blob Detection Image <Bgr, Byte> smoothedFrame_b = new Image <Bgr, byte>(red_com.Size); CvInvoke.GaussianBlur(blue_bi, smoothedFrame_b, new Size(3, 3), 1); //filter out noises Mat forgroundMask_b = new Mat(); fgDetector.Apply(smoothedFrame_b, forgroundMask_b); CvBlobs blobs_b = new CvBlobs(); blobDetector.Detect(forgroundMask_b.ToImage <Gray, byte>(), blobs_b); blobs_b.FilterByArea(minarea, maxarea); //Green blob detection Image <Bgr, Byte> smoothedFrame_g = new Image <Bgr, byte>(red_com.Size); CvInvoke.GaussianBlur(green_bi, smoothedFrame_g, new Size(3, 3), 1); //filter out noises Mat forgroundMask_g = new Mat(); fgDetector.Apply(smoothedFrame_g, forgroundMask_g); CvBlobs blobs_g = new CvBlobs(); blobDetector.Detect(forgroundMask_g.ToImage <Gray, byte>(), blobs_g); blobs_g.FilterByArea(minarea, maxarea); //Mouse Interpretition float[] cent_r = new float[2]; float[] cent_g = new float[2]; float[] cent_b = new float[2]; //Corsor control with Green Marker foreach (var pair in blobs_g) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_g[0] = b.Centroid.X; cent_g[1] = b.Centroid.Y; } if (blobs_g.Count == 1 || mouseflag != 0) { //Cursor Movement Controlled //Primary Screem //if (Screen.AllScreens.Length == 1) { Cursor.Position = new Point(Screen.PrimaryScreen.Bounds.Width - (int)(cursor_mul * (int)cent_g[0] * Screen.PrimaryScreen.Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_g[1]) * Screen.PrimaryScreen.Bounds.Height / capture.Height); } //Secondary Screen //Cursor.Position = new Point((int)(cursor_mul * (int)cent_g[0] * Screen.AllScreens[1].Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_g[1]) * Screen.AllScreens[1].Bounds.Height / capture.Height); //Number of Screen = 2 and both a same time /* if (Screen.AllScreens.Length == 2) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_g[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_g[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } * //Number of screen =3 and all at same time * if (Screen.AllScreens.Length == 3) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_g[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width + Screen.AllScreens[2].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_g[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } */ /* * //Check for Clicks * if (blobs_r.Count == 1) * { * if(blobs_g.Count == 0) * { * if(ccount == 1) * { * //double click * mouse_event(MOUSEEVENTF_LEFTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_LEFTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * Thread.Sleep(150); * mouse_event(MOUSEEVENTF_LEFTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_LEFTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * } * else * { * ccount--; * } * } * * else if ((cent_g[0] - cent_r[0] >= 10 || cent_r[0] - cent_g[0] <= 10) && (cent_g[1] - cent_r[1] >= 10 || cent_r[1] - cent_g[1] <= 10)) * { * ccount = safevalue; * mouseflag = 1; * //single click * mouse_event(MOUSEEVENTF_LEFTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_LEFTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * } * } * else * { * ccount = 0; * * } * * } * * if (blobs_b.Count == 1) * { * foreach (var pair in blobs_b) * { * CvBlob b = pair.Value; * CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); * cent_b[0] = b.Centroid.X; * cent_b[1] = b.Centroid.Y; * } * * if (blobs_g.Count == 1 && (cent_g[0] - cent_b[0] >= 10 || cent_b[0] - cent_g[0] <= 10) && (cent_g[1] - cent_b[1] >= 10 || cent_b[1] - cent_g[1] <= 10)) * { * //right click * mouse_event(MOUSEEVENTF_RIGHTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_RIGHTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * } * * else if(blobs_g.Count == 0) * { * mouse_event(MOUSEEVENTF_VWHEEL, 0, 0, (scroll_y - (int)cent_b[1]) * scroll_mul_v, 0); * mouse_event(MOUSEEVENTF_HWHEEL, 0, 0, (scroll_x - (int)cent_b[0]) * scroll_mul_h, 0); * scroll_y = (int)cent_b[1]; * scroll_x = (int)cent_b[0]; * * } */ } captureImageBox.Image = frame; grayscaleImageBox.Image = red_bi; smoothedGrayscaleImageBox.Image = green_bi; cannyImageBox.Image = blue_bi; }
private void OnFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { double utcTime = (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { if (colorFrame != null) { colorFrame.CopyConvertedFrameDataToArray(this.colorArray, ColorImageFormat.Bgra); // System.Buffer.BlockCopy(this.colorArray, 0, this.byteColorArray,0,(this.kinect.ColorFrameSource.FrameDescription.Height * this.kinect.ColorFrameSource.FrameDescription.Width * BYTES_PER_COLOR_PIXEL)); // System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, this.byteColorArray, (this.kinect.ColorFrameSource.FrameDescription.Height * this.kinect.ColorFrameSource.FrameDescription.Width * BYTES_PER_COLOR_PIXEL), sizeof(double)); // this.colorConnector.Broadcast(this.byteColorArray); FrameDescription colorFrameDescription = colorFrame.FrameDescription; using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { this.colorBitmap.Lock(); // verify data and write the new color frame data to the display bitmap if ((colorFrameDescription.Width == this.colorBitmap.PixelWidth) && (colorFrameDescription.Height == this.colorBitmap.PixelHeight)) { colorFrame.CopyConvertedFrameDataToIntPtr( this.colorBitmap.BackBuffer, (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4), ColorImageFormat.Bgra); this.colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight)); } this.colorBitmap.Unlock(); } IplImage imgSrc = this.colorBitmap.ToIplImage(); // Source, BGR image. IplImage imgGray = new IplImage(imgSrc.Size, BitDepth.U8, 1); // Binary image that has the blobs. IplImage imghsv = new IplImage(imgSrc.Size, BitDepth.U8, 3); // HSV image for thresholding. Cv.CvtColor(imgSrc, imghsv, ColorConversion.BgrToHsv); Cv.InRange(imghsv, lower, upper, imgGray); CvBlobs blobs = new CvBlobs(); blobs.Label(imgGray); int min_area = 1500; foreach (KeyValuePair <int, CvBlob> item in blobs) { int label = item.Key; CvBlob blob = item.Value; blob.CalcCentroid(); int val = blob.Area; if (val > min_area) { min_area = val; float x = (float)blob.Centroid.X; float y = (float)blob.Centroid.Y; target_x = (int)x; target_y = (int)y; } // Debug.Print( "Coordinates" + (blob.Centroid.ToString()) +"Area" + val.ToString()); } /* * IplImage render = new IplImage(imgSrc.Size, BitDepth.U8, 3); * blobs.RenderBlobs(imgSrc, render); * * using (new CvWindow("Orange Blob Detection", WindowMode.AutoSize, render)) * { * CvWindow.WaitKey(0); * } */ } if (depthFrame != null) { // Debug.Print("HELLO"); depthFrame.CopyFrameDataToArray(this.depthArray); System.Buffer.BlockCopy(this.depthArray, 0, this.byteDepthArray, 0, this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width * BYTES_PER_DEPTH_PIXEL); System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, this.byteDepthArray, this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width * BYTES_PER_DEPTH_PIXEL, sizeof(double)); // Console.WriteLine("depth "+ this.byteDepthArray.Length); calculateScanFromDepth(this.depthArray); System.Buffer.BlockCopy(this.scan2DArray, 0, this.byteScan2DArray, 0, 6 * this.kinect.DepthFrameSource.FrameDescription.Width * 4 + 12); // System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, this.byteScan2DArray, this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width * 4, sizeof(double)); this.scan2DConnector.Broadcast(this.byteScan2DArray); this.depthConnector.Broadcast(this.byteDepthArray); } } } /*using (InfraredFrame irFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame()) * { * if (irFrame != null) * { * irFrame.CopyFrameDataToArray(this.irArray); * System.Buffer.BlockCopy(this.irArray, 0, this.byteIRArray, 0, this.kinect.InfraredFrameSource.FrameDescription.Height * this.kinect.InfraredFrameSource.FrameDescription.Width * BYTES_PER_IR_PIXEL); * System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, this.byteIRArray, this.kinect.InfraredFrameSource.FrameDescription.Height * this.kinect.InfraredFrameSource.FrameDescription.Width * BYTES_PER_IR_PIXEL, sizeof(double)); * this.irConnector.Broadcast(this.byteIRArray); * } * } * * using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) * { * if (bodyFrame != null) * { * bodyFrame.GetAndRefreshBodyData(this.bodyArray); * string jsonString = JsonConvert.SerializeObject(this.bodyArray); * int diff = 28000 - jsonString.Length; * for (int i = 0; i < diff;i++ ) * { * jsonString += " "; * } * byte[] bodyByteArray = new byte[jsonString.Length*sizeof(char) + sizeof(double)]; * System.Buffer.BlockCopy(jsonString.ToCharArray(), 0, bodyByteArray, 0, jsonString.Length * sizeof(char)); * System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, bodyByteArray, jsonString.Length * sizeof(char),sizeof(double)); * this.bodyConnector.Broadcast(bodyByteArray); * } * }*/ }
private void CaptureMotion() { try { float wFactor = (float)this.Width / (float)CaptureBox.Width; float hFactor = (float)this.Height / (float)CaptureBox.Height; CvArr array = null; CvCapture cap = CvCapture.FromCamera(CaptureDevice.Any); this.Invoke(new Action(() => { lblLoading.Visible = false; radioButton1.Visible = true; radioButton2.Visible = true; })); while (true) { IplImage img = cap.QueryFrame(); if (img == null) { continue; } img.Flip(array, FlipMode.Y); if (mode == 1) { string filepath = "haarcascade_frontalface_alt2.xml"; CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(filepath); CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(img, cascade, Cv.CreateMemStorage(), 3.0, 1, HaarDetectionType.Zero, new CvSize(70, 70), new CvSize(500, 500)); foreach (CvAvgComp face in faces) { //IplImage ClonedImage = img.Clone(); //Cv.SetImageROI(ClonedImage, face.Rect); //IplImage ThisFace = Cv.CreateImage(face.Rect.Size, ClonedImage.Depth, ClonedImage.NChannels); //Cv.Copy(ClonedImage, ThisFace, null); //Cv.ResetImageROI(ClonedImage); //Bitmap FaceImage = BitmapConverter.ToBitmap(ThisFace); //FaceImage.SetResolution(240, 180); //CaptureBox.Image = FaceImage; img.DrawRect(face.Rect, CvColor.Red, 3); Bitmap FaceImage = BitmapConverter.ToBitmap(img); FaceImage.SetResolution(240, 180); CaptureBox.Image = FaceImage; this.Invoke(new Action(() => { LifeBox.Left = (int)(face.Rect.Left * wFactor - (float)(LifeBox.Width / 2.0) - (float)(this.Width / 2.0)); LifeBox.Top = (int)(face.Rect.Top * hFactor - (float)(LifeBox.Height / 2.0) - (float)(this.Height / 2.0)); if (LifeBox.Left > (this.Width - LifeBox.Width - 12)) { LifeBox.Left = (this.Width - LifeBox.Width - 24); } if (LifeBox.Top > (this.Height - LifeBox.Height - 48)) { LifeBox.Top = (this.Height - LifeBox.Height - 48); } if (LifeBox.Left < 12) { LifeBox.Left = 12; } if (LifeBox.Top < 12) { LifeBox.Top = 12; } Thread.Sleep(30); })); break; } } else { int AllBlobs = 0; CvBlobs blobs = null; IplImage imgHSVsrc = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 3); IplImage imgHSVdst = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 1); Cv.CvtColor(img, imgHSVsrc, ColorConversion.BgrToHsv); Cv.InRangeS(imgHSVsrc, new CvScalar(86, 80, 30), new CvScalar(115, 250, 250), imgHSVdst); Cv.ReleaseImage(imgHSVsrc); blobs = new CvBlobs(imgHSVdst); blobs.FilterByArea(7000, 40000); AllBlobs = blobs.Count; foreach (KeyValuePair <int, CvBlob> blob in blobs) { CvBlob CurrentBlob = blob.Value; CvRect BlobRect = CurrentBlob.Rect; CvPoint Point1, Point2; Point1.X = BlobRect.X; Point1.Y = BlobRect.Y; Point2.X = BlobRect.X + BlobRect.Width; Point2.Y = BlobRect.Y + BlobRect.Height; img.DrawRect(Point1, Point2, CvColor.LightGreen, 3, LineType.AntiAlias); this.Invoke(new Action(() => { LifeBox.Left = (int)(BlobRect.Left * wFactor - (float)(LifeBox.Width / 2.0) - (float)(this.Width / 2.0)); LifeBox.Top = (int)(BlobRect.Top * hFactor - (float)(LifeBox.Height / 2.0) - (float)(this.Height / 2.0)); if (LifeBox.Left > (this.Width - LifeBox.Width - 12)) { LifeBox.Left = (this.Width - LifeBox.Width - 24); } if (LifeBox.Top > (this.Height - LifeBox.Height - 48)) { LifeBox.Top = (this.Height - LifeBox.Height - 48); } if (LifeBox.Left < 12) { LifeBox.Left = 12; } if (LifeBox.Top < 12) { LifeBox.Top = 12; } Thread.Sleep(30); })); break; } Bitmap Item = BitmapConverter.ToBitmap(img); Item.SetResolution(240, 180); CaptureBox.Image = Item; Bitmap HSVItem = BitmapConverter.ToBitmap(imgHSVdst); HSVItem.SetResolution(240, 180); HSVCaptureBox.Image = HSVItem; Cv.ReleaseImage(imgHSVdst); } } } catch (Exception e) { Console.WriteLine("ERROR: " + e.Message + "DETAILS: " + e.StackTrace); } }
private void ProcessFrame(object sender, EventArgs args) { //Get frame Mat frame = camera.QueryFrame(); //Process frame Image <Bgr, Byte> img = frame.ToImage <Bgr, Byte>(); img.ROI = new Rectangle(100, 100, 300, 300); Image <Hsv, Byte> HSVimg = img.Convert <Hsv, Byte>(); Image <Gray, Byte> binary = HSVimg.InRange(new Hsv(minH, minS, minV), new Hsv(maxH, maxS, maxV)); Image <Gray, Byte> eroded = binary.Erode(erosions); Image <Gray, Byte> dilated = eroded.Dilate(dilations); //Detect largest blob CvBlobDetector blobDetector = new CvBlobDetector(); CvBlobs blobs = new CvBlobs(); blobDetector.Detect(dilated, blobs); int maxBlobArea = 0; CvBlob largestBlob = null; foreach (CvBlob blob in blobs.Values) { if (blob.Area > maxBlobArea) { maxBlobArea = blob.Area; largestBlob = blob; } } if (largestBlob != null && largestBlob.Area >= 10000) { handContour = largestBlob.GetContour(); VectorOfInt convexHullIndices = new VectorOfInt(); VectorOfPoint convexHull = new VectorOfPoint(); CvInvoke.ConvexHull(new VectorOfPoint(handContour), convexHull); CvInvoke.ConvexHull(new VectorOfPoint(handContour), convexHullIndices); Mat defects = new Mat(); //img.Draw(handContour, new Bgr(0, 0, 255),3); img.Draw(convexHull.ToArray(), new Bgr(255, 0, 0), 3); try { CvInvoke.ConvexityDefects(new VectorOfPoint(handContour), convexHullIndices, defects); } catch (CvException exc) { MessageBox.Show(exc.Message); } if (!defects.IsEmpty) { Matrix <int> defectsInt = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels); defects.CopyTo(defectsInt); int countFingers = 0; for (int i = 0; i < defectsInt.Rows; i++) { int startIdx = defectsInt.Data[i, 0]; int endIdx = defectsInt.Data[i, 1]; int farthestIdx = defectsInt.Data[i, 2]; float distance = defectsInt.Data[i, 3]; if (distance >= 15000) { //distances.Add(distance); Point startPoint = handContour[startIdx]; Point endPoint = handContour[endIdx]; Point farthestPoint = handContour[farthestIdx]; img.Draw(new CircleF(startPoint, 2.0f), new Bgr(0, 255, 0), 2); img.Draw(new CircleF(endPoint, 2.0f), new Bgr(255, 0, 0), 2); img.Draw(new CircleF(farthestPoint, 2.0f), new Bgr(0, 0, 255), 2); CvInvoke.Line(img, startPoint, farthestPoint, new MCvScalar(255, 255, 0)); countFingers++; } } //Approssimo conteggio dita, e classifico : 1 dito = play, 5 dita = pausa if (Math.Abs(countFingers - 1) < Math.Abs(countFingers - 5) && Math.Abs(countFingers - 1) < Math.Abs(countFingers - 2)) { label10.Text = "Play"; axWindowsMediaPlayer1.Ctlcontrols.play(); } else if (Math.Abs(countFingers - 5) < Math.Abs(countFingers - 1) && Math.Abs(countFingers - 5) < Math.Abs(countFingers - 2)) { label10.Text = "Pause"; axWindowsMediaPlayer1.Ctlcontrols.pause(); } else if (Math.Abs(countFingers - 2) < Math.Abs(countFingers - 1) && Math.Abs(countFingers - 2) < Math.Abs(countFingers - 5)) { label10.Text = "Volume Up"; axWindowsMediaPlayer1.Ctlcontrols.pause(); axWindowsMediaPlayer1.settings.volume++; } } } pictureBox1.Image = binary.Bitmap; }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); capture.Retrieve(frame, 0); Mat frame_crop = frame; Image <Hsv, Byte> currenthsvFrame = (frame.ToImage <Bgr, Byte>()).Convert <Hsv, Byte>(); Image <Gray, Byte> color_one = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_two = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_three = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_four = new Image <Gray, Byte>(frame.Width, frame.Height); /* * Color one is Red * Color two is Blue * Color three is Green * Color Four is Yellow * Green is in Right Index Finger * Blue is in Left Index Finger * Red in Right Thumb * Yelloe in Left Thumb */ Hsv hsv_min_color_one = new Hsv(0, 135, 110); //Hsv hsv_max_color_one = new Hsv(6, 255, 255); Hsv hsv_max_color_one = new Hsv(8, 255, 255); Hsv hsv_min_color_two = new Hsv(112, 53, 10); Hsv hsv_max_color_two = new Hsv(119, 255, 255); /* * Hsv hsv_min_color_three = new Hsv(68, 59, 80); * Hsv hsv_max_color_three = new Hsv(85, 255, 255); * Hsv hsv_min_color_four = new Hsv(20, 165, 165); * Hsv hsv_max_color_four = new Hsv(36, 255, 255); */ Hsv hsv_min_color_three = new Hsv(83, 109, 105); Hsv hsv_max_color_three = new Hsv(109, 255, 255); Hsv hsv_min_color_four = new Hsv(18, 155, 155); Hsv hsv_max_color_four = new Hsv(35, 255, 255); color_one = currenthsvFrame.InRange(hsv_min_color_one, hsv_max_color_one); color_two = currenthsvFrame.InRange(hsv_min_color_two, hsv_max_color_two); color_three = currenthsvFrame.InRange(hsv_min_color_three, hsv_max_color_three); color_four = currenthsvFrame.InRange(hsv_min_color_four, hsv_max_color_four); //Blob detection #region Blob Detection //Color one detection Image <Bgr, Byte> smoothedFrame_cone = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_one, smoothedFrame_cone, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cone = new Mat(); fgDetector.Apply(smoothedFrame_cone, forgroundMask_cone); CvBlobs blobs_color_one = new CvBlobs(); blobDetector.Detect(forgroundMask_cone.ToImage <Gray, byte>(), blobs_color_one); blobs_color_one.FilterByArea(minarea, maxarea); //Color two Blob Detection Image <Bgr, Byte> smoothedFrame_ctwo = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_two, smoothedFrame_ctwo, new Size(3, 3), 1); //filter out noises Mat forgroundMask_ctwo = new Mat(); fgDetector.Apply(smoothedFrame_ctwo, forgroundMask_ctwo); CvBlobs blobs_color_two = new CvBlobs(); blobDetector.Detect(forgroundMask_ctwo.ToImage <Gray, byte>(), blobs_color_two); blobs_color_two.FilterByArea(minarea, maxarea); //Color three blob detection Image <Bgr, Byte> smoothedFrame_cthree = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_three, smoothedFrame_cthree, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cthree = new Mat(); fgDetector.Apply(smoothedFrame_cthree, forgroundMask_cthree); CvBlobs blobs_color_three = new CvBlobs(); blobDetector.Detect(forgroundMask_cthree.ToImage <Gray, byte>(), blobs_color_three); blobs_color_three.FilterByArea(minarea, maxarea); //Color four detection Image <Bgr, Byte> smoothedFrame_cfour = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_four, smoothedFrame_cfour, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cfour = new Mat(); fgDetector.Apply(smoothedFrame_cfour, forgroundMask_cfour); CvBlobs blobs_color_four = new CvBlobs(); blobDetector.Detect(forgroundMask_cfour.ToImage <Gray, byte>(), blobs_color_four); blobs_color_four.FilterByArea(minarea, maxarea); //Makers Interpretition float[] cent_color_one = new float[2]; float[] cent_color_two = new float[2]; float[] cent_color_three = new float[2]; float[] cent_color_four = new float[2]; //Centroids of Markers foreach (var pair in blobs_color_one) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_one[0] = b.Centroid.X; cent_color_one[1] = b.Centroid.Y; } foreach (var pair in blobs_color_two) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_two[0] = b.Centroid.X; cent_color_two[1] = b.Centroid.Y; } foreach (var pair in blobs_color_three) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_three[0] = b.Centroid.X; cent_color_three[1] = b.Centroid.Y; } foreach (var pair in blobs_color_four) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_four[0] = b.Centroid.X; cent_color_four[1] = b.Centroid.Y; } #endregion #region Calculation int click_flag = 0; int[] x_cor = new int[4]; int[] y_cor = new int[4]; if (blobs_color_one.Count != 0 && blobs_color_two.Count != 0 && blobs_color_three.Count != 0 && blobs_color_four.Count != 0) { foreach (var pair in blobs_color_one) { CvBlob b = pair.Value; foreach (var pairr in blobs_color_two) { CvBlob c = pairr.Value; if ((b.Centroid.X - c.Centroid.X) * (b.Centroid.X - c.Centroid.X) + (b.Centroid.Y - c.Centroid.Y) * (b.Centroid.Y - c.Centroid.Y) <= 5000) { click_flag = 1; x_cor[0] = ((int)b.Centroid.X); x_cor[1] = ((int)c.Centroid.X); y_cor[0] = ((int)b.Centroid.Y); y_cor[1] = ((int)c.Centroid.Y); break; } } if (click_flag == 1) { break; } } if (click_flag == 1) { click_flag = 0; foreach (var pair in blobs_color_three) { CvBlob b = pair.Value; foreach (var pairr in blobs_color_four) { CvBlob c = pairr.Value; if ((b.Centroid.X - c.Centroid.X) * (b.Centroid.X - c.Centroid.X) + (b.Centroid.Y - c.Centroid.Y) * (b.Centroid.Y - c.Centroid.Y) <= 10000) { click_flag = 1; x_cor[2] = ((int)b.Centroid.X); x_cor[3] = ((int)c.Centroid.X); y_cor[2] = ((int)b.Centroid.Y); y_cor[3] = ((int)c.Centroid.Y); break; } } if (click_flag == 1) { break; } } } } if (click_flag == 1) { //MessageBox.Show("clicked"); SoundPlayer simpleSound = new SoundPlayer(@"click_sound.wav"); simpleSound.Play(); Array.Sort(x_cor); Array.Sort(y_cor); Bitmap ori_image = frame_crop.ToImage <Bgr, Byte>().ToBitmap(); Bitmap crop_image = new Bitmap(x_cor[2] - x_cor[1], y_cor[2] - y_cor[1]); Graphics g = Graphics.FromImage(crop_image); g.DrawImage(ori_image, -x_cor[1], -y_cor[1]); //string name = string.Format("SAP_{0:ddMMyyyy_hh_mm_ss}.jpg",DateTime.Now); frame.Save(@"C:\Users\Shubhankar\Pictures\Camera Roll\" + string.Format("SAP_{0:ddMMyyyy_hh_mm_ss}_original.jpg", DateTime.Now)); crop_image.Save(@"C:\Users\Shubhankar\Pictures\Camera Roll\" + string.Format("SAP_{0:ddMMyyyy_hh_mm_ss}.jpg", DateTime.Now)); Thread.Sleep(500); } #endregion #region Click Gesture #endregion captureImageBox.Image = frame; grayscaleImageBox.Image = color_one; smoothedGrayscaleImageBox.Image = color_two; cannyImageBox.Image = color_three; Color4ImageBox.Image = color_four; }
private void ProcessFrameMP4(object sender, EventArgs e) { px = new Point(px1, px2); py = new Point(py1, py2); if (cap != null) { cap.Retrieve(frame, 0); currentframe = frame.ToImage <Bgr, byte>(); Mat mask = new Mat(); sub.Apply(currentframe, mask); Mat kernelOp = new Mat(); Mat kernelCl = new Mat(); Mat kernelEl = new Mat(); Mat Dilate = new Mat(); kernelOp = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1)); kernelCl = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(11, 11), new Point(-1, -1)); var element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1)); CvInvoke.GaussianBlur(mask, mask, new Size(13, 13), 1.5); CvInvoke.MorphologyEx(mask, mask, MorphOp.Open, kernelOp, new Point(-1, -1), 1, BorderType.Default, new MCvScalar()); CvInvoke.MorphologyEx(mask, mask, MorphOp.Close, kernelCl, new Point(-1, -1), 1, BorderType.Default, new MCvScalar()); CvInvoke.Dilate(mask, mask, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar)); CvInvoke.Threshold(mask, mask, 127, 255, ThresholdType.Binary); detect.Detect(mask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(500, 20000); tracks.Update(blobs, 20.0, 1, 10); Image <Bgr, byte> result = new Image <Bgr, byte>(currentframe.Size); using (Image <Gray, Byte> blobMask = detect.DrawBlobsMask(blobs)) { frame.CopyTo(result, blobMask); } CvInvoke.Line(currentframe, px, py, new MCvScalar(0, 0, 255), 2); foreach (KeyValuePair <uint, CvTrack> pair in tracks) { if (pair.Value.Inactive == 0) //only draw the active tracks. { int cx = Convert.ToInt32(pair.Value.Centroid.X); int cy = Convert.ToInt32(pair.Value.Centroid.Y); CvBlob b = blobs[pair.Value.BlobLabel]; Bgr color = detect.MeanColor(b, frame.ToImage <Bgr, Byte>()); result.Draw(pair.Key.ToString(), pair.Value.BoundingBox.Location, FontFace.HersheySimplex, 0.5, color); currentframe.Draw(pair.Value.BoundingBox, new Bgr(0, 0, 255), 1); Point[] contour = b.GetContour(); //result.Draw(contour, new Bgr(0, 0, 255), 1); Point center = new Point(cx, cy); CvInvoke.Circle(currentframe, center, 1, new MCvScalar(255, 0, 0), 2); if (center.Y <= px.Y + 10 && center.Y > py.Y - 10 && center.X <= py.X && center.X > px.X) { if (pair.Key.ToString() != "") { if (!carid.Contains(pair.Key.ToString())) { carid.Add(pair.Key.ToString()); if (carid.Count == 20) { carid.Clear(); } carcount++; if (carcount != countBrd + 1 && carcount != countBrd + 2 && carcount != countBrd + 3 && carcount != countBrd + 4 && carcount != countBrd + 5) { //Json Logger Logs log = new Logs() { Date = DateTime.Now.ToString(), Id = carcount }; string strResultJson = JsonConvert.SerializeObject(log); File.AppendAllText(cfg.LogSavePath + @"\log.json", strResultJson + Environment.NewLine); } } } CvInvoke.Line(currentframe, px, py, new MCvScalar(0, 255, 0), 2); } } } CvInvoke.PutText(currentframe, "Count :" + carcount.ToString(), new Point(10, 25), FontFace.HersheySimplex, 1, new MCvScalar(255, 0, 255), 2, LineType.AntiAlias); //Frame Rate double framerate = cap.GetCaptureProperty(CapProp.Fps); Thread.Sleep((int)(1000.0 / framerate)); if (firstCount == false && carcount == countBrd) { Image_Name = cfg.PhotoSavePath + @"\" + "Car" + DateTime.Now.ToString("dd-mm-yyyy-hh-mm-ss") + ".jpg"; currentframe.Save(Image_Name); sendMail = new Thread(SendMail); sendMail.Start(); firstCount = true; } if (isRecording) { if (firstFrameTime != null) { writer.WriteVideoFrame(currentframe.Bitmap, DateTime.Now - firstFrameTime.Value); } else { writer.WriteVideoFrame(currentframe.Bitmap); firstFrameTime = DateTime.Now; } } //pictureBox1.SizeMode = PictureBoxSizeMode.StretchImage; pictureBox1.Image = currentframe.Bitmap; } }
private void ProcessFrameRTSP(object sender, EventArgs e) { Point px = new Point(px1, px2); Point py = new Point(py1, py2); if (cap != null) { cap.Retrieve(frame, 0); currentframe = frame.ToImage <Bgr, byte>(); Mat mask = new Mat(); sub.Apply(currentframe, mask); Mat kernelOp = new Mat(); Mat kernelCl = new Mat(); Mat kernelEl = new Mat(); Mat Dilate = new Mat(); kernelOp = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1)); kernelCl = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(11, 11), new Point(-1, -1)); var element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1)); CvInvoke.GaussianBlur(mask, mask, new Size(13, 13), 1.5); CvInvoke.MorphologyEx(mask, mask, MorphOp.Open, kernelOp, new Point(-1, -1), 1, BorderType.Default, new MCvScalar()); CvInvoke.MorphologyEx(mask, mask, MorphOp.Close, kernelCl, new Point(-1, -1), 1, BorderType.Default, new MCvScalar()); CvInvoke.Dilate(mask, mask, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar)); CvInvoke.Threshold(mask, mask, 127, 255, ThresholdType.Binary); detect.Detect(mask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); tracks.Update(blobs, 20.0, 1, 10); Image <Bgr, byte> result = new Image <Bgr, byte>(currentframe.Size); using (Image <Gray, Byte> blobMask = detect.DrawBlobsMask(blobs)) { frame.CopyTo(result, blobMask); } CvInvoke.Line(currentframe, px, py, new MCvScalar(0, 0, 255), 1); foreach (KeyValuePair <uint, CvTrack> pair in tracks) { if (pair.Value.Inactive == 0) //only draw the active tracks. { int cx = Convert.ToInt32(pair.Value.Centroid.X); int cy = Convert.ToInt32(pair.Value.Centroid.Y); CvBlob b = blobs[pair.Value.BlobLabel]; Bgr color = detect.MeanColor(b, frame.ToImage <Bgr, Byte>()); currentframe.Draw(pair.Value.BoundingBox, new Bgr(0, 0, 255), 1); //Point[] contour = b.GetContour(); // result.Draw(contour, new Bgr(0, 0, 255), 1); Point center = new Point(cx, cy); CvInvoke.Circle(currentframe, center, 1, new MCvScalar(255, 0, 0), 2); if (center.Y <= px.Y + 10 && center.Y > py.Y - 10 && center.X <= py.X && center.X > px.X) { if (pair.Key.ToString() != "") { if (!carid.Contains(pair.Key.ToString())) { carid.Add(pair.Key.ToString()); if (carid.Count == 20) { carid.Clear(); } carcount++; Thread logTh = new Thread(SendMail); logTh.Start(); } } CvInvoke.Line(currentframe, px, py, new MCvScalar(0, 255, 0), 2); } } } if (isRecording) { if (firstFrameTime != null) { writer.WriteVideoFrame(currentframe.Bitmap); } else { writer.WriteVideoFrame(currentframe.Bitmap); firstFrameTime = DateTime.Now; } } CvInvoke.PutText(currentframe, "Count :" + carcount.ToString(), new Point(10, 25), FontFace.HersheySimplex, 1, new MCvScalar(0, 255, 255), 2, LineType.AntiAlias); pictureBox1.Image = currentframe.Bitmap; if (firstCount == false && carcount == 15) { Image_Name = cfg.PhotoSavePath + @"\" + "Car" + DateTime.Now.ToString("dd-mm-yyyy-hh-mm-ss") + ".jpg"; currentframe.Save(Image_Name); firstCount = true; } // Thread th = new Thread(currentframepicBoxRtsp); // th.Start(); } }
private void timer_process_Tick(object sender, EventArgs e) { timer_process.Enabled = false; Bitmap bitmap = new Bitmap(Screen.PrimaryScreen.Bounds.Width, Screen.PrimaryScreen.Bounds.Height); Graphics graphics = Graphics.FromImage(bitmap as Image); graphics.CopyFromScreen(0, 0, 0, 0, bitmap.Size); bitmap.Save("4.png"); long Ticks = DateTime.Now.Millisecond; Mat mat = new Mat("4.png", Emgu.CV.CvEnum.LoadImageType.Color); Image <Bgr, Byte> imgBgr = mat.ToImage <Bgr, Byte>(); Image <Gray, Byte> imgGray = mat.ToImage <Gray, Byte>(); int nWid = imgGray.Width; int nHei = imgGray.Height; byte[,,] pData = imgGray.Data; for (int y = 0; y < nHei; y++) { for (int x = 0; x < nWid; x++) { byte c = pData[y, x, 0]; if (c > 5) { pData[y, x, 0] = 0; } else { pData[y, x, 0] = 255; } } } CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(imgGray, blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); if (blobs.Count < 1) { timer_process.Enabled = true; return; } //------------------------------- Rectangle rc = Rectangle.Empty; foreach (var pair in blobs) { CvBlob b = pair.Value; rc = b.BoundingBox; //CvInvoke.Rectangle(imgBgr, b.BoundingBox, new MCvScalar(255.0, 0, 0), 2); break; } // -------Detect Blue Region ---- / imgGray = imgBgr.Convert <Gray, Byte>(); pData = imgGray.Data; for (int y = 0; y < nHei; y++) { for (int x = 0; x < nWid; x++) { if (!rc.Contains(x, y)) { pData[y, x, 0] = 0; continue; } byte c = pData[y, x, 0]; if (c >= 100 && c <= 120) { pData[y, x, 0] = 255; } else { pData[y, x, 0] = 0; } } } blobs.Clear(); _blobDetector.Detect(imgGray, blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); if (blobs.Count < 1) { timer_process.Enabled = true; return; } //------------------------------- rc = Rectangle.Empty; int nSizeMax = 0; foreach (var pair in blobs) { CvBlob b = pair.Value; if (b.BoundingBox.Width * b.BoundingBox.Height > nSizeMax) { rc = b.BoundingBox; nSizeMax = rc.Width * rc.Height; } //break; } CvInvoke.Rectangle(imgBgr, rc, new MCvScalar(255, 255, 0), 2); Global.g_rcROI = rc; Global.DEF_MAIN_BOARD_X = 238; Global.DEF_MAIN_BOARD_Y = 42; Global.DEF_MAIN_BOARD_W = 570; Global.DEF_MAIN_BOARD_H = 570; int nGameBoardX = Global.DEF_MAIN_BOARD_X + rc.X; int nGameBoardY = Global.DEF_MAIN_BOARD_Y + rc.Y; Global.GetRatioCalcedValues(rc.Width, rc.Height, ref nGameBoardX, ref nGameBoardY); Global.GetRatioCalcedValues(rc.Width, rc.Height, ref Global.DEF_MAIN_BOARD_W, ref Global.DEF_MAIN_BOARD_H); CvInvoke.Rectangle(imgBgr, new Rectangle(nGameBoardX, nGameBoardY, Global.DEF_MAIN_BOARD_W, Global.DEF_MAIN_BOARD_H), new MCvScalar(255, 255, 0), 2); Global.DEF_MARKS_X = 15; Global.DEF_MARKS_Y = 204; Global.DEF_MARKS_W = 189; Global.DEF_MARKS_H = 69; int nMarksX = Global.DEF_MARKS_X + rc.X; int nMarksY = Global.DEF_MARKS_Y + rc.Y; Global.GetRatioCalcedValues(rc.Width, rc.Height, ref nMarksX, ref nMarksY); Global.GetRatioCalcedValues(rc.Width, rc.Height, ref Global.DEF_MARKS_W, ref Global.DEF_MARKS_H); CvInvoke.Rectangle(imgBgr, new Rectangle(nMarksX, nMarksY, Global.DEF_MARKS_W, Global.DEF_MARKS_H), new MCvScalar(255, 255, 0), 2); int nStepX = Global.DEF_MAIN_BOARD_W / 8; int nStepY = Global.DEF_MAIN_BOARD_H / 8; var rois = new List <Rectangle>(); // List of rois var imageparts = new List <Image <Bgr, byte> >(); // List of extracted image parts for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) { Rectangle roi = new Rectangle(nGameBoardX + j * nStepX, nGameBoardY + i * nStepY, Global.DEF_ITEM_W, Global.DEF_ITEM_H); rois.Add(roi); imgBgr.ROI = roi; imageparts.Add(imgBgr.Copy()); } } imgBgr.ROI = Rectangle.Empty; m_LstCharacter.Clear(); bool bCanProcess = true; int k = 0, nRow = 0, nCol = 0; foreach (Image <Bgr, Byte> img in imageparts) { int nCharac = (int)ImageMatcher.DetermineCharacter(img); m_LstCharacter.Add(nCharac); MovementDecision.g_AllocCharacters[nRow, nCol] = nCharac; nCol++; if (nCol >= 8) { nRow++; nCol = 0; } //if (nCharac != 0) CvInvoke.Rectangle(imgBgr, rois[k], new MCvScalar(255, 255, 0), 2); //CvInvoke.Rectangle(imgBgr, rois[k], cols[nCharac - 1], 2); if (nCharac == 0) { bCanProcess = false; } k++; } string szLine = ""; lstBox.Items.Clear(); for (int i = 0; i < 8; i++) { szLine = ""; for (int j = 0; j < 8; j++) { szLine += "" + MovementDecision.g_AllocCharacters[i, j] + " "; } lstBox.Items.Add(szLine); } //imgBgr.Save("processed.png"); picScr.Image = imgBgr.Bitmap; if (!bCanProcess) { timer_process.Enabled = true; return; } MovementDecision.Process(); long Ticks2 = DateTime.Now.Millisecond; lbProcessTime.Text = "" + (Ticks2 - Ticks); timer_process.Enabled = true; }
private void UpdateFrameNumber() { Video.SetFrame(SliderValue); using (Image <Bgr, Byte> orig = Video.GetFrameImage()) using (Image <Gray, Byte> origGray = orig.Convert <Gray, Byte>()) using (Image <Gray, Byte> binary = origGray.ThresholdBinary(new Gray(ThresholdValue), new Gray(255))) using (Image <Gray, Byte> subbed = BinaryBackground.AbsDiff(binary)) { CvBlobs blobs = new CvBlobs(); BlobDetector.Detect(subbed, blobs); CvBlob mouseBlob = null; double maxArea = -1; foreach (var blob in blobs.Values) { if (blob.Area > maxArea) { mouseBlob = blob; maxArea = blob.Area; } } //double gapDistance = GetBestGapDistance(rbsk); double gapDistance = 50; RBSK.Settings.GapDistance = gapDistance; //PointF[] headPoints = ProcessFrame(orig, RBSK); PointF center = mouseBlob.Centroid; //LineSegment2DF[] targetPoints = null; Point[] mouseContour = mouseBlob.GetContour(); orig.DrawPolyline(mouseContour, true, new Bgr(Color.Cyan)); Image1 = ImageService.ToBitmapSource(orig); PointF[] result; if (HeadPoints != null) { result = HeadPoints[SliderValue].HeadPoints; } else { double prob = 0; RBSK headRbsk = MouseService.GetStandardMouseRules(); headRbsk.Settings.GapDistance = 65; headRbsk.Settings.BinaryThreshold = 20; List <List <PointF> > allKeyPoints = headRbsk.FindKeyPoints(mouseContour, headRbsk.Settings.NumberOfSlides, false); result = headRbsk.FindPointsFromRules(allKeyPoints[0], binary, ref prob); } if (result != null) { using (Image <Bgr, Byte> test = orig.Clone()) { foreach (var point in result) { test.Draw(new CircleF(point, 3), new Bgr(Color.Red), 3); } Image1 = ImageService.ToBitmapSource(test); } } else { return; } RotatedRect rotatedRect = CvInvoke.MinAreaRect(mouseContour.Select(x => new PointF(x.X, x.Y)).ToArray()); //Console.WriteLine("Size: " + rotatedRect.Size); ISkeleton skel = ModelResolver.Resolve <ISkeleton>(); Image <Gray, Byte> tempBinary = binary.Clone(); System.Drawing.Rectangle rect = mouseBlob.BoundingBox; Image <Gray, Byte> binaryRoi = tempBinary.GetSubRect(rect); using (Image <Bgr, Byte> displayImage = subbed.Convert <Bgr, Byte>()) using (Image <Gray, Byte> skelImage = skel.GetSkeleton(binaryRoi)) using (Image <Bgr, Byte> drawImage = orig.Clone()) using (Image <Bgr, Byte> tempImage2 = new Image <Bgr, byte>(drawImage.Size)) { //----------------------------------------- if (SkelImage != null) { SkelImage.Dispose(); } SkelImage = skelImage.Clone(); //-------------------------------------------- tempImage2.SetValue(new Bgr(Color.Black)); ISpineFinding spineFinder = ModelResolver.Resolve <ISpineFinding>(); spineFinder.NumberOfCycles = 3; spineFinder.NumberOfIterations = 1; spineFinder.SkeletonImage = skelImage; //spineFinder.RotatedRectangle = rotatedRect; Image5 = ImageService.ToBitmapSource(skelImage); const int delta = 20; double smallestAngle = double.MaxValue; Point tailPoint = Point.Empty; for (int i = 0; i < mouseContour.Length; i++) { int leftDelta = i - delta; int rightDelta = i + delta; if (leftDelta < 0) { leftDelta += mouseContour.Length; } if (rightDelta >= mouseContour.Length) { rightDelta -= mouseContour.Length; } Point testPoint = mouseContour[i]; Point leftPoint = mouseContour[leftDelta]; Point rightPoint = mouseContour[rightDelta]; Vector v1 = new Vector(leftPoint.X - testPoint.X, leftPoint.Y - testPoint.Y); Vector v2 = new Vector(rightPoint.X - testPoint.X, rightPoint.Y - testPoint.Y); double angle = Math.Abs(Vector.AngleBetween(v1, v2)); if (angle < 30 && angle > 9) { if (angle < smallestAngle) { smallestAngle = angle; tailPoint = testPoint; } } } PointF headCornerCorrect = new PointF(result[2].X - rect.X, result[2].Y - rect.Y); PointF tailCornerCorrect = new PointF(tailPoint.X - rect.X, tailPoint.Y - rect.Y); PointF[] spine = spineFinder.GenerateSpine(headCornerCorrect, tailCornerCorrect); Point topCorner = mouseBlob.BoundingBox.Location; PointF[] spineCornerCorrected = new PointF[spine.Length]; for (int i = 0; i < spine.Length; i++) { spineCornerCorrected[i] = new PointF(spine[i].X + topCorner.X, spine[i].Y + topCorner.Y); } ITailFinding tailFinding = ModelResolver.Resolve <ITailFinding>(); double rotatedWidth = rotatedRect.Size.Width < rotatedRect.Size.Height ? rotatedRect.Size.Width : rotatedRect.Size.Height; List <Point> bodyPoints; if (result != null) { double firstDist = result[2].DistanceSquared(spineCornerCorrected.First()); double lastDist = result[2].DistanceSquared(spineCornerCorrected.Last()); if (firstDist < lastDist) { spineCornerCorrected = spineCornerCorrected.Reverse().ToArray(); } } double waistLength; double pelvicArea1, pelvicArea2; tailFinding.FindTail(mouseContour, spineCornerCorrected, displayImage, rotatedWidth, mouseBlob.Centroid, out bodyPoints, out waistLength, out pelvicArea1, out pelvicArea2); Console.WriteLine(smallestAngle); if (!tailPoint.IsEmpty) { drawImage.Draw(new CircleF(tailPoint, 4), new Bgr(Color.Red), 3); } if (bodyPoints != null && bodyPoints.Count > 0) { Point[] bPoints = bodyPoints.ToArray(); double volume = MathExtension.PolygonArea(bPoints); Emgu.CV.Structure.Ellipse fittedEllipse = PointCollection.EllipseLeastSquareFitting(bPoints.Select(x => x.ToPointF()).ToArray()); //CvInvoke.Ellipse(drawImage, fittedEllipse.RotatedRect, new MCvScalar(0, 0, 255), 2); Console.WriteLine("Volume: " + volume + " - " + (fittedEllipse.RotatedRect.Size.Width * fittedEllipse.RotatedRect.Size.Height) + ", Waist Length: " + waistLength); //Alter this to something better if (MathExtension.PolygonArea(bPoints) > (rotatedRect.Size.Height * rotatedRect.Size.Width) / 6 || true) { //tempImage2.FillConvexPoly(bPoints, new Bgr(Color.White)); tempImage2.DrawPolyline(bPoints, true, new Bgr(Color.White)); PointF centroid = MathExtension.FindCentroid(bPoints); System.Drawing.Rectangle minRect; Image <Gray, Byte> temp2 = new Image <Gray, byte>(tempImage2.Width + 2, tempImage2.Height + 2); CvInvoke.FloodFill(tempImage2, temp2, centroid.ToPoint(), new MCvScalar(255, 255, 255), out minRect, new MCvScalar(5, 5, 5), new MCvScalar(5, 5, 5)); using (Image <Gray, Byte> nonZeroImage = tempImage2.Convert <Gray, Byte>()) { int[] volume2 = nonZeroImage.CountNonzero(); Console.WriteLine("Volume2: " + volume2[0]); //int tester = 9; //using (Image<Gray, Byte> t1 = nonZeroImage.Erode(tester)) //using (Image<Gray, Byte> t2 = t1.Dilate(tester)) //using (Image<Gray, Byte> t3 = t2.Erode(tester)) //using (Image<Gray, Byte> t4 = t3.Dilate(tester)) //using (Image<Gray, Byte> t5 = t4.Erode(tester)) //using (Image<Gray, Byte> t6 = t5.Dilate(tester)) //using (Image<Gray, Byte> t7 = t6.Erode(tester)) //{ // Image6 = ImageService.ToBitmapSource(t7); //} } tempImage2.Draw(new CircleF(centroid, 2), new Bgr(Color.Blue), 2); double distanceToSpine = double.MaxValue; PointF p11 = PointF.Empty, p22 = PointF.Empty; for (int i = 1; i < spineCornerCorrected.Length; i++) { PointF point1 = spineCornerCorrected[i - 1]; PointF point2 = spineCornerCorrected[i]; double cDist = MathExtension.MinDistanceFromLineToPoint(point1, point2, centroid); if (cDist < distanceToSpine) { p11 = point1; p22 = point2; distanceToSpine = cDist; } } PointSideVector psv = MathExtension.FindSide(p11, p22, centroid); if (psv == PointSideVector.Below) { distanceToSpine *= -1; } Console.WriteLine(distanceToSpine + ","); } } for (int i = 1; i < spine.Length; i++) { PointF point1 = spine[i - 1]; PointF point2 = spine[i]; point1.X += topCorner.X; point1.Y += topCorner.Y; point2.X += topCorner.X; point2.Y += topCorner.Y; LineSegment2D line = new LineSegment2D(new Point((int)point1.X, (int)point1.Y), new Point((int)point2.X, (int)point2.Y)); drawImage.Draw(line, new Bgr(Color.Aqua), 2); tempImage2.Draw(line, new Bgr(Color.Cyan), 2); } drawImage.Draw(new CircleF(mouseBlob.Centroid, 2), new Bgr(Color.Blue), 2); Image3 = ImageService.ToBitmapSource(drawImage); Image6 = ImageService.ToBitmapSource(tempImage2); double rotatedRectArea = rotatedRect.Size.Width * rotatedRect.Size.Height; if (rotatedRectArea < 75000) { //Console.WriteLine(rotatedRectArea); //return; } else { //Console.WriteLine(rotatedRectArea); } double height = rotatedRect.Size.Height; double width = rotatedRect.Size.Width; //double angle = rotatedRect.Angle; bool heightLong = height > width; double halfLength; PointF[] vertices = rotatedRect.GetVertices(); if (heightLong) { halfLength = height; } else { halfLength = width; } halfLength /= 2; PointF[] sidePoints1 = new PointF[4], midPoints = new PointF[2]; PointF p1 = vertices[0], p2 = vertices[1], p3 = vertices[2], p4 = vertices[3]; double d1 = p1.DistanceSquared(p2); double d2 = p2.DistanceSquared(p3); if (d1 < d2) { //p1 and p2, p3 and p4 are side points sidePoints1[0] = p1; sidePoints1[1] = p2; sidePoints1[2] = p4; sidePoints1[3] = p3; midPoints[0] = p1.MidPoint(p4); midPoints[1] = p2.MidPoint(p3); } else { //p2 and p3, p1 and p4 are side points sidePoints1[0] = p1; sidePoints1[1] = p4; sidePoints1[2] = p2; sidePoints1[3] = p3; midPoints[0] = p1.MidPoint(p2); midPoints[1] = p3.MidPoint(p4); } PointF intersection1 = PointF.Empty; PointF intersection2 = PointF.Empty; using (Image <Gray, Byte> halfTest1 = origGray.CopyBlank()) using (Image <Gray, Byte> halfTest2 = origGray.CopyBlank()) { Point[] rect1 = new Point[] { new Point((int)sidePoints1[0].X, (int)sidePoints1[0].Y), new Point((int)midPoints[0].X, (int)midPoints[0].Y), new Point((int)midPoints[1].X, (int)midPoints[1].Y), new Point((int)sidePoints1[1].X, (int)sidePoints1[1].Y) }; Point[] rect2 = new Point[] { new Point((int)sidePoints1[2].X, (int)sidePoints1[2].Y), new Point((int)midPoints[0].X, (int)midPoints[0].Y), new Point((int)midPoints[1].X, (int)midPoints[1].Y), new Point((int)sidePoints1[3].X, (int)sidePoints1[3].Y) }; if (MathExtension.PolygonContainsPoint(rect1, center)) { //Rect 1 is head, look for line in r2 } else if (MathExtension.PolygonContainsPoint(rect2, center)) { //Rect 2 is head, look for line in r1 } else { //Something has gone wrong } halfTest1.FillConvexPoly(rect1, new Gray(255)); halfTest2.FillConvexPoly(rect2, new Gray(255)); //Image5 = ImageService.ToBitmapSource(halfTest1); //Image6 = ImageService.ToBitmapSource(halfTest2); //binary.Copy(holder1, halfTest1); //binary.Copy(holder2, halfTest2); int count1, count2; //using (Image<Gray, Byte> binaryInverse = subbed.Not()) using (Image <Gray, Byte> holder1 = subbed.Copy(halfTest1)) using (Image <Gray, Byte> holder2 = subbed.Copy(halfTest2)) { //Image4 = ImageService.ToBitmapSource(subbed); //Image5 = ImageService.ToBitmapSource(holder1); //Image6 = ImageService.ToBitmapSource(holder2); count1 = holder1.CountNonzero()[0]; count2 = holder2.CountNonzero()[0]; } PointF qr1 = PointF.Empty, qr2 = PointF.Empty, qr3 = PointF.Empty, qr4 = PointF.Empty; if (count1 > count2) { //holder 1 is head, holder 2 is rear qr1 = sidePoints1[2]; qr2 = sidePoints1[2].MidPoint(midPoints[0]); qr3 = sidePoints1[3].MidPoint(midPoints[1]); qr4 = sidePoints1[3]; } else if (count1 < count2) { //holder 2 is head, holder 1 is year qr1 = sidePoints1[0]; qr2 = sidePoints1[0].MidPoint(midPoints[0]); qr3 = sidePoints1[1].MidPoint(midPoints[1]); qr4 = sidePoints1[1]; } //fat line is qr2, qr3 PointF centerPoint = qr2.MidPoint(qr3); PointF i1 = qr2; PointF i2 = qr3; intersection1 = MathExtension.PolygonLineIntersectionPoint(centerPoint, i1, mouseContour); intersection2 = MathExtension.PolygonLineIntersectionPoint(centerPoint, i2, mouseContour); } double deltaX = halfLength * Math.Cos(rotatedRect.Angle * MathExtension.Deg2Rad); double deltaY = halfLength * Math.Sin(rotatedRect.Angle * MathExtension.Deg2Rad); const double scaleFactor = 0.25; PointF newPoint = new PointF((float)(center.X - (deltaX * scaleFactor)), (float)(center.Y - (deltaY * scaleFactor))); PointF intersectionPoint1 = PointF.Empty; PointF intersectionPoint2 = PointF.Empty; Point[] temp = null; PointF[] headPoints = RBSKService.RBSKParallel(binary, MouseService.GetStandardMouseRules(), ref temp); if (headPoints != null) { PointF tip = headPoints[2]; //targetPoints = new LineSegment2DF[3]; Point centerInt = new Point((int)newPoint.X, (int)newPoint.Y); //targetPoints[0] = new LineSegment2DF(centerInt, new PointF(tip.X, tip.Y)); Vector forwardVec = new Vector(tip.X - newPoint.X, tip.Y - newPoint.Y); Vector rotatedVec = new Vector(-forwardVec.Y, forwardVec.X); PointF i1 = new PointF((float)(newPoint.X + (rotatedVec.X * 1)), (float)(newPoint.Y + (rotatedVec.Y * 1))); PointF i2 = new PointF((float)(newPoint.X - (rotatedVec.X * 1)), (float)(newPoint.Y - (rotatedVec.Y * 1))); //targetPoints[1] = new LineSegment2DF(centerInt, i1); //targetPoints[2] = new LineSegment2DF(centerInt, i2); intersectionPoint1 = MathExtension.PolygonLineIntersectionPoint(newPoint, i1, mouseContour); intersectionPoint2 = MathExtension.PolygonLineIntersectionPoint(newPoint, i2, mouseContour); } //displayImage.Draw(mouseBlob.BoundingBox, new Bgr(Color.Red), 2); displayImage.Draw(new CircleF(mouseBlob.Centroid, 3), new Bgr(Color.Blue), 2); displayImage.Draw(rotatedRect, new Bgr(Color.Yellow), 3); //displayImage.Draw(mouseContour, new Bgr(Color.Aqua), 2); //displayImage.FillConvexPoly(new Point[] { new Point((int)sidePoints1[0].X, (int)sidePoints1[0].Y), new Point((int)midPoints[0].X, (int)midPoints[0].Y), new Point((int)midPoints[1].X, (int)midPoints[1].Y), new Point((int)sidePoints1[1].X, (int)sidePoints1[1].Y) }, new Bgr(Color.Blue)); //if (targetPoints != null) //{ // displayImage.Draw(targetPoints[0], new Bgr(Color.Green), 2); // displayImage.Draw(targetPoints[1], new Bgr(Color.Green), 2); // displayImage.Draw(targetPoints[2], new Bgr(Color.Green), 2); //} //if (!intersection1.IsEmpty && !intersection2.IsEmpty) //{ // LineSegment2DF lineSegment = new LineSegment2DF(intersection1, intersection2); // displayImage.Draw(lineSegment, new Bgr(Color.MediumPurple), 4); // //Console.WriteLine(lineSegment.Length); //} //displayImage.Draw(new CircleF(newPoint, 4), new Bgr(Color.MediumPurple), 3); //Console.WriteLine(rotatedRect.Angle); Image4 = ImageService.ToBitmapSource(displayImage); } } }
private void ExtractBlobAndCrop(Image <Gray, byte> skin) { using (MemStorage storage = new MemStorage()) { Image <Gray, Byte> smoothedFrame = new Image <Gray, byte>(skin.Size); CvInvoke.GaussianBlur(skin, smoothedFrame, new Size(3, 3), 1); //filter out noises imageBoxFrameGrabber.Image = skin; Mat forgroundMask = new Mat(); Mat ss = new Mat(); ss = skin.Mat; //grabber.Retrieve(ss); fgDetector.Apply(ss, forgroundMask); //imageBox1.Image = forgroundMask; CvBlobs blobs = new CvBlobs(); //blobDetector.Detect(forgroundMask.ToImage<Gray, byte>(), blobs); blobDetector.Detect(skin, blobs); blobs.FilterByArea(30000, 150000); CvBlob b = null; CvBlob btemp; int area = 0; foreach (var pair in blobs) { btemp = pair.Value; if (area < btemp.Area) { b = pair.Value; area = btemp.Area; } } //Crop LArgest Blob Bitmap skin_bit = skin.ToBitmap(); //MessageBox.Show("" + area); if (area != 0) { CvInvoke.Rectangle(currentFrame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); //Rectangle rec = new Rectangle(b.BoundingBox.X, b.BoundingBox.Y, b.BoundingBox.Width, b.BoundingBox.Height); Bitmap crop_image = new Bitmap((b.BoundingBox.Width > b.BoundingBox.Height ? b.BoundingBox.Width : b.BoundingBox.Height), (b.BoundingBox.Width > b.BoundingBox.Height ? b.BoundingBox.Width : b.BoundingBox.Height)); //Bitmap crop_image = skin_bit.Clone(rec, skin_bit.PixelFormat); Graphics g = Graphics.FromImage(crop_image); g.DrawImage(skin_bit, -b.BoundingBox.X, -b.BoundingBox.Y); //g.DrawImage(skin_bit, -50, -50); croped = new Image <Gray, Byte>(crop_image).Resize(350, 350, Inter.Cubic); croped1 = new Image <Gray, Byte>(crop_image).Resize(100, 100, Inter.Cubic); croped2 = new Image <Gray, Byte>(crop_image).Resize(50, 50, Inter.Cubic); int gesture_number = fow_prop.image(croped2); label1.Text = "" + gesture_number; imageBoxSkin.Image = croped; crop_image.Dispose(); skin_bit.Dispose(); } } }