void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage<Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (frame.Width + frame.Width)/2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); foreach (var pair in _tracker) { CvTrack b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); CvInvoke.PutText(frame, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); } imageBox1.Image = frame; imageBox2.Image = forgroundMask; }
static void Main(string[] args) { CvScalar hsv_min = Cv.RGB(150, 70, 70); // 抽出するHSV色領域の下限 CvScalar hsv_max = Cv.RGB(360, 255, 255); // 抽出するHSV色領域の上限 var cap = Cv.CreateCameraCapture(0); // カメラのキャプチャ IplImage im = new IplImage(); // カメラ画像(フレーム)格納用 IplImage hsv = Cv.CreateImage(new CvSize(640, 480), BitDepth.U8, 3); // HSV画像格納用 IplImage mask = Cv.CreateImage(new CvSize(640, 480), BitDepth.U8, 1); // マスク画像格納用 while (Cv.WaitKey(1) == -1) // 任意のキーが入力されるまでカメラ映像を表示 { im = Cv.QueryFrame(cap); // カメラからフレーム(画像)を取得 Cv.CvtColor(im, hsv, ColorConversion.BgrToHsv); // RGB色空間からHSV色空間に変換 Cv.InRangeS(hsv, hsv_min, hsv_max, mask); // 指定した範囲内の色抽出(マスクの作成) Cv.Dilate(mask, mask, null, 1); // 膨張処理 Cv.Erode(mask, mask, null, 1); // 収縮処理 Cv.Erode(mask, mask, null, 1); // 収縮処理 Cv.Dilate(mask, mask, null, 1); // 膨張処理 // エラー処理(マスクに白領域が全くないとラベリング処理でエラー) Cv.Ellipse(mask, new CvPoint(0, 0), new CvSize(1, 1), 0, 0, 360, CvColor.White, -1); CvBlobs blobs = new CvBlobs(mask); // マスク画像のラベリング処理 CvBlob maxBlob = blobs.LargestBlob(); // 面積が最大のラベルを抽出 CvPoint pt = maxBlob.Centroid; // 面積が最大のラベルの重心座標を取得 // 重心点に十字線を描く Cv.Line(im, new CvPoint(pt.X, pt.Y - 50), new CvPoint(pt.X, pt.Y + 50), new CvColor(0, 255, 0), 5); Cv.Line(im, new CvPoint(pt.X - 50, pt.Y), new CvPoint(pt.X + 50, pt.Y), new CvColor(0, 255, 0), 5); Cv.ShowImage("Frame", im); // 画面にフレームを表示 Cv.ShowImage("Mask", mask); // 画面にマスク画像を表示 } }
public int FindPlates() { IplImage labelImg = new IplImage(src.Size, CvBlobLib.DepthLabel, 1); blobs = new CvBlobs(); plate.Clear(); CvBlobLib.Label(timg, labelImg, blobs); CvBlobLib.FilterByArea(blobs, x, y); IplImage srctemp = src.Clone(); CvBlobLib.RenderBlobs(labelImg, blobs, src, srctemp, RenderBlobsMode.BoundingBox | RenderBlobsMode.Angle); foreach (var item in blobs) { item.Value.SetImageROItoBlob(pimg); // ratio values of plate between 3.5 and 5.4 double ratio = (double)item.Value.Rect.Width / item.Value.Rect.Height; double angle = (double)item.Value.CalcAngle(); //if (ratio > 3.5 && ratio < 5.4 && angle > -15 && angle < 15) if (ratio > 1 && ratio < 6 && angle > -15 && angle < 15) { // IplImage platetemp = new IplImage(new CvSize(pimg.ROI.Width, pimg.ROI.Height), pimg.Depth, pimg.NChannels); IplImage platetemp = new IplImage(new CvSize(140, 27), pimg.Depth, pimg.NChannels); Cv.Resize(pimg, platetemp); // Cv.Copy(pimg, platetemp); plate.Add(platetemp); src.Rectangle(item.Value.Rect, new CvScalar(0, 0, 255), 2, LineType.Link4); } } // CvBlobLib.RenderBlobs(labelImg, blobs, src, src, RenderBlobsMode.BoundingBox); src.ResetROI(); return plate.Count; }
private CvBlobs GetBlobs(Image<Gray, Byte> img) { Image<Gray, Byte> im = img.Clone(); im._Dilate(2); im._Erode(2); CvBlobs blobs = new CvBlobs(); detector.Detect(im, blobs); return blobs; }
public ObjectTracker( TrackerSettings settings) { _settings = settings; _foregroundDetector = new BackgroundSubtractorMOG2(_settings.BackgroundSubtractorHistory.Value, _settings.BackgroundSubtractorMaxComponents.Value, false); _blobDetector = new CvBlobDetector(); _blobs = new CvBlobs(); _tracks = new CvTracks(); _trackedObjectIdentities = new Dictionary<uint, TrackedObject>(); }
private static void CvBlobsSample() { var src = new Mat("data/shapes.png", ImreadModes.GrayScale); var bin = src.Threshold(0, 255, ThresholdTypes.Otsu); var view = bin.CvtColor(ColorConversionCodes.GRAY2BGR); var blobs = new CvBlobs(bin); blobs.RenderBlobs(bin, view, RenderBlobsMode.Angle | RenderBlobsMode.BoundingBox | RenderBlobsMode.Color); Window.ShowImages(bin, view); }
private static CvBlob GetLargest(CvBlobs blobs) { CvBlob mb = null; int max = 0; foreach (CvBlob blob in blobs.Values) { int size = blob.Area; if (size > max) { max = size; mb = blob; } } return mb; }
public void SimpleTest() { using (var src = new IplImage(@"Image\Blob\shapes2.png", LoadMode.GrayScale)) using (var binary = new IplImage(src.Size, BitDepth.U8, 1)) using (var render = new IplImage(src.Size, BitDepth.U8, 3)) { Cv.Threshold(src, binary, 0, 255, ThresholdType.Otsu); var blobs = new CvBlobs(binary); blobs.RenderBlobs(src, render); using (new CvWindow(render)) { Cv.WaitKey(); } } }
private static void FilterByMaximalBlob(IplImage imgSrc, IplImage imgDst) { using (CvBlobs blobs = new CvBlobs()) using (IplImage imgLabelData = new IplImage(imgSrc.Size, CvBlobLib.DepthLabel, 1)) { imgDst.Zero(); blobs.Label(imgSrc, imgLabelData); CvBlob max = blobs[blobs.GreaterBlob()]; if (max == null) { return; } blobs.FilterByArea(max.Area, max.Area); blobs.FilterLabels(imgLabelData, imgDst); } }
/// <summary> /// Detect blobs from input image. /// </summary> /// <param name="img">The input image</param> /// <param name="blobs">The storage for the detected blobs</param> /// <returns>Number of pixels that has been labeled.</returns> public uint Detect(Image<Gray, Byte> img, CvBlobs blobs) { Size size = img.Size; if (_data == null || _data.GetLength(0) != size.Height || _data.GetLength(1) != size.Width) { DisposeObject(); _data = new UInt32[size.Height, size.Width]; _dataHandle = GCHandle.Alloc(_data, GCHandleType.Pinned); _ptr = CvInvoke.cvCreateImageHeader(size, (CvEnum.IplDepth)(_sizeOfUInt32 * 8), 1); CvInvoke.cvSetData(_ptr, _dataHandle.AddrOfPinnedObject(), _sizeOfUInt32 * size.Width); } return cvbCvLabel(img, _ptr, blobs); }
public Blob() { using (var imgSrc = new IplImage(FilePath.Image.Shapes, LoadMode.Color)) using (var imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (var imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (var imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (var imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3)) { Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); CvBlobs blobs = new CvBlobs(); blobs.Label(imgBinary); foreach (KeyValuePair<int, CvBlob> item in blobs) { CvBlob b = item.Value; Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.Render(imgContour); CvContourPolygon polygon = cc.ConvertToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } /* CvPoint2D32f circleCenter; float circleRadius; GetEnclosingCircle(polygon, out circleCenter, out circleRadius); imgPolygon.Circle(circleCenter, (int) circleRadius, CvColor.Green, 2); */ } blobs.RenderBlobs(imgSrc, imgRender); using (new CvWindow("render", imgRender)) using (new CvWindow("contour", imgContour)) using (new CvWindow("polygon vertices", imgPolygon)) { Cv.WaitKey(0); } } }
public void Dispose() { if (_foregroundDetector == null) return; try { _blobDetector.Dispose(); _blobs.Dispose(); _tracks.Dispose(); ((IDisposable)_foregroundDetector).Dispose(); } catch (Exception ex) { Log.Error("Exception disposing foreground detector", ex); } _blobDetector = null; _blobs = null; _tracks = null; _foregroundDetector = null; }
public BlobOld() { using (IplImage imgSrc = new IplImage(Const.ImageShapes, LoadMode.Color)) using (IplImage imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (IplImage imgLabel = new IplImage(imgSrc.Size, BitDepth.F32, 1)) using (IplImage imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (IplImage imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (IplImage imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3)) { Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); using (CvBlobs blobs = new CvBlobs()) { uint result = blobs.Label(imgBinary, imgLabel); foreach (KeyValuePair<uint, CvBlob> item in blobs) { CvBlob b = item.Value; Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.RenderContourChainCode(imgContour); CvContourPolygon polygon = cc.ConvertChainCodesToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } } blobs.RenderBlobs(imgLabel, imgSrc, imgRender); using (new CvWindow("render", imgRender)) using (new CvWindow("contour", imgContour)) using (new CvWindow("polygon vertices", imgPolygon)) { Cv.WaitKey(0); } } } }
protected override void processarImagem(bool mapeamento) { preencherImagemPlanoDeFundo(); preencherImagemBinariaSemPlanoDeFundo(); Mat imagemCinza = ConverterParaCinzas(mImagemBinariaSemPlanoDeFundo); if (mapeamento) { //Image<Bgr, byte> mCopiaMenorPlanoFundo = mCopiaImagemPlanoDeFundo.Resize(0.7, Inter.Area); Mat mCopiaMenorPlanoFundo = new Mat(); CvInvoke.Resize(mCopiaImagemPlanoDeFundo, mCopiaMenorPlanoFundo, mCopiaImagemPlanoDeFundo.Size, 0.7, 0.7); // Image<Gray, byte> mCopiaMenorImagemCinza = imagemCinza.Resize(0.7, Inter.Area); // Image<Gray, byte> mCopiaMenorImagemCinza = imagemCinza.Clone(); //imagemCinza = imagemCinza.Resize(0.7, Inter.Area); mJanelaCalibracao.PlanoDeFundo.Image = mCopiaMenorPlanoFundo; //mJanelaCalibracao.Objetos.Image = imagemCinza; } //mTracker.Process(mImagemColorida, ConverterParaCinzas(mImagemBinariaSemPlanoDeFundo)); mblobs = new CvBlobs(); mBlobDetector.Detect(imagemCinza.ToImage<Gray, byte>(), mblobs); mblobs.FilterByArea(100, int.MaxValue); }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); capture.Retrieve(frame, 0); Mat frame_crop = frame; Image <Hsv, Byte> currenthsvFrame = (frame.ToImage <Bgr, Byte>()).Convert <Hsv, Byte>(); Image <Gray, Byte> color_one = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_two = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_three = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_four = new Image <Gray, Byte>(frame.Width, frame.Height); /* * Color one is Red * Color two is Blue * Color three is Green * Color Four is Yellow * Green is in Right Index Finger * Blue is in Left Index Finger * Red in Right Thumb * Yelloe in Left Thumb */ Hsv hsv_min_color_one = new Hsv(0, 135, 110); //Hsv hsv_max_color_one = new Hsv(6, 255, 255); Hsv hsv_max_color_one = new Hsv(8, 255, 255); Hsv hsv_min_color_two = new Hsv(112, 53, 10); Hsv hsv_max_color_two = new Hsv(119, 255, 255); /* * Hsv hsv_min_color_three = new Hsv(68, 59, 80); * Hsv hsv_max_color_three = new Hsv(85, 255, 255); * Hsv hsv_min_color_four = new Hsv(20, 165, 165); * Hsv hsv_max_color_four = new Hsv(36, 255, 255); */ Hsv hsv_min_color_three = new Hsv(83, 109, 105); Hsv hsv_max_color_three = new Hsv(109, 255, 255); Hsv hsv_min_color_four = new Hsv(18, 155, 155); Hsv hsv_max_color_four = new Hsv(35, 255, 255); color_one = currenthsvFrame.InRange(hsv_min_color_one, hsv_max_color_one); color_two = currenthsvFrame.InRange(hsv_min_color_two, hsv_max_color_two); color_three = currenthsvFrame.InRange(hsv_min_color_three, hsv_max_color_three); color_four = currenthsvFrame.InRange(hsv_min_color_four, hsv_max_color_four); //Blob detection #region Blob Detection //Color one detection Image <Bgr, Byte> smoothedFrame_cone = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_one, smoothedFrame_cone, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cone = new Mat(); fgDetector.Apply(smoothedFrame_cone, forgroundMask_cone); CvBlobs blobs_color_one = new CvBlobs(); blobDetector.Detect(forgroundMask_cone.ToImage <Gray, byte>(), blobs_color_one); blobs_color_one.FilterByArea(minarea, maxarea); //Color two Blob Detection Image <Bgr, Byte> smoothedFrame_ctwo = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_two, smoothedFrame_ctwo, new Size(3, 3), 1); //filter out noises Mat forgroundMask_ctwo = new Mat(); fgDetector.Apply(smoothedFrame_ctwo, forgroundMask_ctwo); CvBlobs blobs_color_two = new CvBlobs(); blobDetector.Detect(forgroundMask_ctwo.ToImage <Gray, byte>(), blobs_color_two); blobs_color_two.FilterByArea(minarea, maxarea); //Color three blob detection Image <Bgr, Byte> smoothedFrame_cthree = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_three, smoothedFrame_cthree, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cthree = new Mat(); fgDetector.Apply(smoothedFrame_cthree, forgroundMask_cthree); CvBlobs blobs_color_three = new CvBlobs(); blobDetector.Detect(forgroundMask_cthree.ToImage <Gray, byte>(), blobs_color_three); blobs_color_three.FilterByArea(minarea, maxarea); //Color four detection Image <Bgr, Byte> smoothedFrame_cfour = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_four, smoothedFrame_cfour, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cfour = new Mat(); fgDetector.Apply(smoothedFrame_cfour, forgroundMask_cfour); CvBlobs blobs_color_four = new CvBlobs(); blobDetector.Detect(forgroundMask_cfour.ToImage <Gray, byte>(), blobs_color_four); blobs_color_four.FilterByArea(minarea, maxarea); //Makers Interpretition float[] cent_color_one = new float[2]; float[] cent_color_two = new float[2]; float[] cent_color_three = new float[2]; float[] cent_color_four = new float[2]; //Centroids of Markers foreach (var pair in blobs_color_one) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_one[0] = b.Centroid.X; cent_color_one[1] = b.Centroid.Y; } foreach (var pair in blobs_color_two) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_two[0] = b.Centroid.X; cent_color_two[1] = b.Centroid.Y; } foreach (var pair in blobs_color_three) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_three[0] = b.Centroid.X; cent_color_three[1] = b.Centroid.Y; } foreach (var pair in blobs_color_four) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_four[0] = b.Centroid.X; cent_color_four[1] = b.Centroid.Y; } #endregion #region Calculation int click_flag = 0; int[] x_cor = new int[4]; int[] y_cor = new int[4]; if (blobs_color_one.Count != 0 && blobs_color_two.Count != 0 && blobs_color_three.Count != 0 && blobs_color_four.Count != 0) { foreach (var pair in blobs_color_one) { CvBlob b = pair.Value; foreach (var pairr in blobs_color_two) { CvBlob c = pairr.Value; if ((b.Centroid.X - c.Centroid.X) * (b.Centroid.X - c.Centroid.X) + (b.Centroid.Y - c.Centroid.Y) * (b.Centroid.Y - c.Centroid.Y) <= 5000) { click_flag = 1; x_cor[0] = ((int)b.Centroid.X); x_cor[1] = ((int)c.Centroid.X); y_cor[0] = ((int)b.Centroid.Y); y_cor[1] = ((int)c.Centroid.Y); break; } } if (click_flag == 1) { break; } } if (click_flag == 1) { click_flag = 0; foreach (var pair in blobs_color_three) { CvBlob b = pair.Value; foreach (var pairr in blobs_color_four) { CvBlob c = pairr.Value; if ((b.Centroid.X - c.Centroid.X) * (b.Centroid.X - c.Centroid.X) + (b.Centroid.Y - c.Centroid.Y) * (b.Centroid.Y - c.Centroid.Y) <= 10000) { click_flag = 1; x_cor[2] = ((int)b.Centroid.X); x_cor[3] = ((int)c.Centroid.X); y_cor[2] = ((int)b.Centroid.Y); y_cor[3] = ((int)c.Centroid.Y); break; } } if (click_flag == 1) { break; } } } } if (click_flag == 1) { //MessageBox.Show("clicked"); SoundPlayer simpleSound = new SoundPlayer(@"click_sound.wav"); simpleSound.Play(); Array.Sort(x_cor); Array.Sort(y_cor); Bitmap ori_image = frame_crop.ToImage <Bgr, Byte>().ToBitmap(); Bitmap crop_image = new Bitmap(x_cor[2] - x_cor[1], y_cor[2] - y_cor[1]); Graphics g = Graphics.FromImage(crop_image); g.DrawImage(ori_image, -x_cor[1], -y_cor[1]); //string name = string.Format("SAP_{0:ddMMyyyy_hh_mm_ss}.jpg",DateTime.Now); frame.Save(@"C:\Users\Shubhankar\Pictures\Camera Roll\" + string.Format("SAP_{0:ddMMyyyy_hh_mm_ss}_original.jpg", DateTime.Now)); crop_image.Save(@"C:\Users\Shubhankar\Pictures\Camera Roll\" + string.Format("SAP_{0:ddMMyyyy_hh_mm_ss}.jpg", DateTime.Now)); Thread.Sleep(500); } #endregion #region Click Gesture #endregion captureImageBox.Image = frame; grayscaleImageBox.Image = color_one; smoothedGrayscaleImageBox.Image = color_two; cannyImageBox.Image = color_three; Color4ImageBox.Image = color_four; }
private void UpdateFrameNumber() { Video.SetFrame(SliderValue); using (Image <Bgr, Byte> orig = Video.GetFrameImage()) using (Image <Gray, Byte> origGray = orig.Convert <Gray, Byte>()) using (Image <Gray, Byte> binary = origGray.ThresholdBinary(new Gray(ThresholdValue), new Gray(255))) using (Image <Gray, Byte> subbed = BinaryBackground.AbsDiff(binary)) { CvBlobs blobs = new CvBlobs(); BlobDetector.Detect(subbed, blobs); CvBlob mouseBlob = null; double maxArea = -1; foreach (var blob in blobs.Values) { if (blob.Area > maxArea) { mouseBlob = blob; maxArea = blob.Area; } } //double gapDistance = GetBestGapDistance(rbsk); double gapDistance = 50; RBSK.Settings.GapDistance = gapDistance; //PointF[] headPoints = ProcessFrame(orig, RBSK); PointF center = mouseBlob.Centroid; //LineSegment2DF[] targetPoints = null; Point[] mouseContour = mouseBlob.GetContour(); orig.DrawPolyline(mouseContour, true, new Bgr(Color.Cyan)); Image1 = ImageService.ToBitmapSource(orig); PointF[] result; if (HeadPoints != null) { result = HeadPoints[SliderValue].HeadPoints; } else { double prob = 0; RBSK headRbsk = MouseService.GetStandardMouseRules(); headRbsk.Settings.GapDistance = 65; headRbsk.Settings.BinaryThreshold = 20; List <List <PointF> > allKeyPoints = headRbsk.FindKeyPoints(mouseContour, headRbsk.Settings.NumberOfSlides, false); result = headRbsk.FindPointsFromRules(allKeyPoints[0], binary, ref prob); } if (result != null) { using (Image <Bgr, Byte> test = orig.Clone()) { foreach (var point in result) { test.Draw(new CircleF(point, 3), new Bgr(Color.Red), 3); } Image1 = ImageService.ToBitmapSource(test); } } else { return; } RotatedRect rotatedRect = CvInvoke.MinAreaRect(mouseContour.Select(x => new PointF(x.X, x.Y)).ToArray()); //Console.WriteLine("Size: " + rotatedRect.Size); ISkeleton skel = ModelResolver.Resolve <ISkeleton>(); Image <Gray, Byte> tempBinary = binary.Clone(); System.Drawing.Rectangle rect = mouseBlob.BoundingBox; Image <Gray, Byte> binaryRoi = tempBinary.GetSubRect(rect); using (Image <Bgr, Byte> displayImage = subbed.Convert <Bgr, Byte>()) using (Image <Gray, Byte> skelImage = skel.GetSkeleton(binaryRoi)) using (Image <Bgr, Byte> drawImage = orig.Clone()) using (Image <Bgr, Byte> tempImage2 = new Image <Bgr, byte>(drawImage.Size)) { //----------------------------------------- if (SkelImage != null) { SkelImage.Dispose(); } SkelImage = skelImage.Clone(); //-------------------------------------------- tempImage2.SetValue(new Bgr(Color.Black)); ISpineFinding spineFinder = ModelResolver.Resolve <ISpineFinding>(); spineFinder.NumberOfCycles = 3; spineFinder.NumberOfIterations = 1; spineFinder.SkeletonImage = skelImage; //spineFinder.RotatedRectangle = rotatedRect; Image5 = ImageService.ToBitmapSource(skelImage); const int delta = 20; double smallestAngle = double.MaxValue; Point tailPoint = Point.Empty; for (int i = 0; i < mouseContour.Length; i++) { int leftDelta = i - delta; int rightDelta = i + delta; if (leftDelta < 0) { leftDelta += mouseContour.Length; } if (rightDelta >= mouseContour.Length) { rightDelta -= mouseContour.Length; } Point testPoint = mouseContour[i]; Point leftPoint = mouseContour[leftDelta]; Point rightPoint = mouseContour[rightDelta]; Vector v1 = new Vector(leftPoint.X - testPoint.X, leftPoint.Y - testPoint.Y); Vector v2 = new Vector(rightPoint.X - testPoint.X, rightPoint.Y - testPoint.Y); double angle = Math.Abs(Vector.AngleBetween(v1, v2)); if (angle < 30 && angle > 9) { if (angle < smallestAngle) { smallestAngle = angle; tailPoint = testPoint; } } } PointF headCornerCorrect = new PointF(result[2].X - rect.X, result[2].Y - rect.Y); PointF tailCornerCorrect = new PointF(tailPoint.X - rect.X, tailPoint.Y - rect.Y); PointF[] spine = spineFinder.GenerateSpine(headCornerCorrect, tailCornerCorrect); Point topCorner = mouseBlob.BoundingBox.Location; PointF[] spineCornerCorrected = new PointF[spine.Length]; for (int i = 0; i < spine.Length; i++) { spineCornerCorrected[i] = new PointF(spine[i].X + topCorner.X, spine[i].Y + topCorner.Y); } ITailFinding tailFinding = ModelResolver.Resolve <ITailFinding>(); double rotatedWidth = rotatedRect.Size.Width < rotatedRect.Size.Height ? rotatedRect.Size.Width : rotatedRect.Size.Height; List <Point> bodyPoints; if (result != null) { double firstDist = result[2].DistanceSquared(spineCornerCorrected.First()); double lastDist = result[2].DistanceSquared(spineCornerCorrected.Last()); if (firstDist < lastDist) { spineCornerCorrected = spineCornerCorrected.Reverse().ToArray(); } } double waistLength; double pelvicArea1, pelvicArea2; tailFinding.FindTail(mouseContour, spineCornerCorrected, displayImage, rotatedWidth, mouseBlob.Centroid, out bodyPoints, out waistLength, out pelvicArea1, out pelvicArea2); Console.WriteLine(smallestAngle); if (!tailPoint.IsEmpty) { drawImage.Draw(new CircleF(tailPoint, 4), new Bgr(Color.Red), 3); } if (bodyPoints != null && bodyPoints.Count > 0) { Point[] bPoints = bodyPoints.ToArray(); double volume = MathExtension.PolygonArea(bPoints); Emgu.CV.Structure.Ellipse fittedEllipse = PointCollection.EllipseLeastSquareFitting(bPoints.Select(x => x.ToPointF()).ToArray()); //CvInvoke.Ellipse(drawImage, fittedEllipse.RotatedRect, new MCvScalar(0, 0, 255), 2); Console.WriteLine("Volume: " + volume + " - " + (fittedEllipse.RotatedRect.Size.Width * fittedEllipse.RotatedRect.Size.Height) + ", Waist Length: " + waistLength); //Alter this to something better if (MathExtension.PolygonArea(bPoints) > (rotatedRect.Size.Height * rotatedRect.Size.Width) / 6 || true) { //tempImage2.FillConvexPoly(bPoints, new Bgr(Color.White)); tempImage2.DrawPolyline(bPoints, true, new Bgr(Color.White)); PointF centroid = MathExtension.FindCentroid(bPoints); System.Drawing.Rectangle minRect; Image <Gray, Byte> temp2 = new Image <Gray, byte>(tempImage2.Width + 2, tempImage2.Height + 2); CvInvoke.FloodFill(tempImage2, temp2, centroid.ToPoint(), new MCvScalar(255, 255, 255), out minRect, new MCvScalar(5, 5, 5), new MCvScalar(5, 5, 5)); using (Image <Gray, Byte> nonZeroImage = tempImage2.Convert <Gray, Byte>()) { int[] volume2 = nonZeroImage.CountNonzero(); Console.WriteLine("Volume2: " + volume2[0]); //int tester = 9; //using (Image<Gray, Byte> t1 = nonZeroImage.Erode(tester)) //using (Image<Gray, Byte> t2 = t1.Dilate(tester)) //using (Image<Gray, Byte> t3 = t2.Erode(tester)) //using (Image<Gray, Byte> t4 = t3.Dilate(tester)) //using (Image<Gray, Byte> t5 = t4.Erode(tester)) //using (Image<Gray, Byte> t6 = t5.Dilate(tester)) //using (Image<Gray, Byte> t7 = t6.Erode(tester)) //{ // Image6 = ImageService.ToBitmapSource(t7); //} } tempImage2.Draw(new CircleF(centroid, 2), new Bgr(Color.Blue), 2); double distanceToSpine = double.MaxValue; PointF p11 = PointF.Empty, p22 = PointF.Empty; for (int i = 1; i < spineCornerCorrected.Length; i++) { PointF point1 = spineCornerCorrected[i - 1]; PointF point2 = spineCornerCorrected[i]; double cDist = MathExtension.MinDistanceFromLineToPoint(point1, point2, centroid); if (cDist < distanceToSpine) { p11 = point1; p22 = point2; distanceToSpine = cDist; } } PointSideVector psv = MathExtension.FindSide(p11, p22, centroid); if (psv == PointSideVector.Below) { distanceToSpine *= -1; } Console.WriteLine(distanceToSpine + ","); } } for (int i = 1; i < spine.Length; i++) { PointF point1 = spine[i - 1]; PointF point2 = spine[i]; point1.X += topCorner.X; point1.Y += topCorner.Y; point2.X += topCorner.X; point2.Y += topCorner.Y; LineSegment2D line = new LineSegment2D(new Point((int)point1.X, (int)point1.Y), new Point((int)point2.X, (int)point2.Y)); drawImage.Draw(line, new Bgr(Color.Aqua), 2); tempImage2.Draw(line, new Bgr(Color.Cyan), 2); } drawImage.Draw(new CircleF(mouseBlob.Centroid, 2), new Bgr(Color.Blue), 2); Image3 = ImageService.ToBitmapSource(drawImage); Image6 = ImageService.ToBitmapSource(tempImage2); double rotatedRectArea = rotatedRect.Size.Width * rotatedRect.Size.Height; if (rotatedRectArea < 75000) { //Console.WriteLine(rotatedRectArea); //return; } else { //Console.WriteLine(rotatedRectArea); } double height = rotatedRect.Size.Height; double width = rotatedRect.Size.Width; //double angle = rotatedRect.Angle; bool heightLong = height > width; double halfLength; PointF[] vertices = rotatedRect.GetVertices(); if (heightLong) { halfLength = height; } else { halfLength = width; } halfLength /= 2; PointF[] sidePoints1 = new PointF[4], midPoints = new PointF[2]; PointF p1 = vertices[0], p2 = vertices[1], p3 = vertices[2], p4 = vertices[3]; double d1 = p1.DistanceSquared(p2); double d2 = p2.DistanceSquared(p3); if (d1 < d2) { //p1 and p2, p3 and p4 are side points sidePoints1[0] = p1; sidePoints1[1] = p2; sidePoints1[2] = p4; sidePoints1[3] = p3; midPoints[0] = p1.MidPoint(p4); midPoints[1] = p2.MidPoint(p3); } else { //p2 and p3, p1 and p4 are side points sidePoints1[0] = p1; sidePoints1[1] = p4; sidePoints1[2] = p2; sidePoints1[3] = p3; midPoints[0] = p1.MidPoint(p2); midPoints[1] = p3.MidPoint(p4); } PointF intersection1 = PointF.Empty; PointF intersection2 = PointF.Empty; using (Image <Gray, Byte> halfTest1 = origGray.CopyBlank()) using (Image <Gray, Byte> halfTest2 = origGray.CopyBlank()) { Point[] rect1 = new Point[] { new Point((int)sidePoints1[0].X, (int)sidePoints1[0].Y), new Point((int)midPoints[0].X, (int)midPoints[0].Y), new Point((int)midPoints[1].X, (int)midPoints[1].Y), new Point((int)sidePoints1[1].X, (int)sidePoints1[1].Y) }; Point[] rect2 = new Point[] { new Point((int)sidePoints1[2].X, (int)sidePoints1[2].Y), new Point((int)midPoints[0].X, (int)midPoints[0].Y), new Point((int)midPoints[1].X, (int)midPoints[1].Y), new Point((int)sidePoints1[3].X, (int)sidePoints1[3].Y) }; if (MathExtension.PolygonContainsPoint(rect1, center)) { //Rect 1 is head, look for line in r2 } else if (MathExtension.PolygonContainsPoint(rect2, center)) { //Rect 2 is head, look for line in r1 } else { //Something has gone wrong } halfTest1.FillConvexPoly(rect1, new Gray(255)); halfTest2.FillConvexPoly(rect2, new Gray(255)); //Image5 = ImageService.ToBitmapSource(halfTest1); //Image6 = ImageService.ToBitmapSource(halfTest2); //binary.Copy(holder1, halfTest1); //binary.Copy(holder2, halfTest2); int count1, count2; //using (Image<Gray, Byte> binaryInverse = subbed.Not()) using (Image <Gray, Byte> holder1 = subbed.Copy(halfTest1)) using (Image <Gray, Byte> holder2 = subbed.Copy(halfTest2)) { //Image4 = ImageService.ToBitmapSource(subbed); //Image5 = ImageService.ToBitmapSource(holder1); //Image6 = ImageService.ToBitmapSource(holder2); count1 = holder1.CountNonzero()[0]; count2 = holder2.CountNonzero()[0]; } PointF qr1 = PointF.Empty, qr2 = PointF.Empty, qr3 = PointF.Empty, qr4 = PointF.Empty; if (count1 > count2) { //holder 1 is head, holder 2 is rear qr1 = sidePoints1[2]; qr2 = sidePoints1[2].MidPoint(midPoints[0]); qr3 = sidePoints1[3].MidPoint(midPoints[1]); qr4 = sidePoints1[3]; } else if (count1 < count2) { //holder 2 is head, holder 1 is year qr1 = sidePoints1[0]; qr2 = sidePoints1[0].MidPoint(midPoints[0]); qr3 = sidePoints1[1].MidPoint(midPoints[1]); qr4 = sidePoints1[1]; } //fat line is qr2, qr3 PointF centerPoint = qr2.MidPoint(qr3); PointF i1 = qr2; PointF i2 = qr3; intersection1 = MathExtension.PolygonLineIntersectionPoint(centerPoint, i1, mouseContour); intersection2 = MathExtension.PolygonLineIntersectionPoint(centerPoint, i2, mouseContour); } double deltaX = halfLength * Math.Cos(rotatedRect.Angle * MathExtension.Deg2Rad); double deltaY = halfLength * Math.Sin(rotatedRect.Angle * MathExtension.Deg2Rad); const double scaleFactor = 0.25; PointF newPoint = new PointF((float)(center.X - (deltaX * scaleFactor)), (float)(center.Y - (deltaY * scaleFactor))); PointF intersectionPoint1 = PointF.Empty; PointF intersectionPoint2 = PointF.Empty; Point[] temp = null; PointF[] headPoints = RBSKService.RBSKParallel(binary, MouseService.GetStandardMouseRules(), ref temp); if (headPoints != null) { PointF tip = headPoints[2]; //targetPoints = new LineSegment2DF[3]; Point centerInt = new Point((int)newPoint.X, (int)newPoint.Y); //targetPoints[0] = new LineSegment2DF(centerInt, new PointF(tip.X, tip.Y)); Vector forwardVec = new Vector(tip.X - newPoint.X, tip.Y - newPoint.Y); Vector rotatedVec = new Vector(-forwardVec.Y, forwardVec.X); PointF i1 = new PointF((float)(newPoint.X + (rotatedVec.X * 1)), (float)(newPoint.Y + (rotatedVec.Y * 1))); PointF i2 = new PointF((float)(newPoint.X - (rotatedVec.X * 1)), (float)(newPoint.Y - (rotatedVec.Y * 1))); //targetPoints[1] = new LineSegment2DF(centerInt, i1); //targetPoints[2] = new LineSegment2DF(centerInt, i2); intersectionPoint1 = MathExtension.PolygonLineIntersectionPoint(newPoint, i1, mouseContour); intersectionPoint2 = MathExtension.PolygonLineIntersectionPoint(newPoint, i2, mouseContour); } //displayImage.Draw(mouseBlob.BoundingBox, new Bgr(Color.Red), 2); displayImage.Draw(new CircleF(mouseBlob.Centroid, 3), new Bgr(Color.Blue), 2); displayImage.Draw(rotatedRect, new Bgr(Color.Yellow), 3); //displayImage.Draw(mouseContour, new Bgr(Color.Aqua), 2); //displayImage.FillConvexPoly(new Point[] { new Point((int)sidePoints1[0].X, (int)sidePoints1[0].Y), new Point((int)midPoints[0].X, (int)midPoints[0].Y), new Point((int)midPoints[1].X, (int)midPoints[1].Y), new Point((int)sidePoints1[1].X, (int)sidePoints1[1].Y) }, new Bgr(Color.Blue)); //if (targetPoints != null) //{ // displayImage.Draw(targetPoints[0], new Bgr(Color.Green), 2); // displayImage.Draw(targetPoints[1], new Bgr(Color.Green), 2); // displayImage.Draw(targetPoints[2], new Bgr(Color.Green), 2); //} //if (!intersection1.IsEmpty && !intersection2.IsEmpty) //{ // LineSegment2DF lineSegment = new LineSegment2DF(intersection1, intersection2); // displayImage.Draw(lineSegment, new Bgr(Color.MediumPurple), 4); // //Console.WriteLine(lineSegment.Length); //} //displayImage.Draw(new CircleF(newPoint, 4), new Bgr(Color.MediumPurple), 3); //Console.WriteLine(rotatedRect.Angle); Image4 = ImageService.ToBitmapSource(displayImage); } } }
/// <summary> /// This is the method that actually does the work. /// </summary> /// <param name="DA">The DA object can be used to retrieve data from input parameters and /// to store data in output parameters.</param> protected override void SolveInstance(IGH_DataAccess DA) { string path = ""; int size = 0; bool debug = false; int maxNgon = 3; double tol = -1; if (!DA.GetData(0, ref path)) { return; } if (!DA.GetData(1, ref size)) { return; } DA.GetData(2, ref maxNgon); DA.GetData(3, ref tol); IplImage img = new IplImage(path, LoadMode.Color); IplImage imgHSV = new IplImage(img.Size, BitDepth.U8, 3); Cv.CvtColor(img, imgHSV, ColorConversion.RgbToHsv); var channels = imgHSV.Split(); IplImage hue = channels[0]; IplImage Render = new IplImage(img.Size, BitDepth.U8, 3); //色抽出マスク用画像宣言 IplImage imgB1 = new IplImage(img.Size, BitDepth.U8, 1); IplImage imgB2 = new IplImage(img.Size, BitDepth.U8, 1); IplImage imgR = new IplImage(img.Size, BitDepth.U8, 1); IplImage imgG = new IplImage(img.Size, BitDepth.U8, 1); IplImage imgB = new IplImage(img.Size, BitDepth.U8, 1); int RG = 30; int GB = 90; int BR = 150; int off = 1; int smin = 30; int bmin = 30; //色抽出用閾値作成 CvScalar Bmin1 = new CvScalar(0, smin, bmin); CvScalar Bmax1 = new CvScalar(RG - off, 255, 255); CvScalar Bmin2 = new CvScalar(BR + off, smin, bmin); CvScalar Bmax2 = new CvScalar(180, 255, 255); CvScalar Gmin = new CvScalar(RG + off, smin, bmin); CvScalar Gmax = new CvScalar(GB - off, 255, 255); CvScalar Rmin = new CvScalar(GB + off, smin, bmin); CvScalar Rmax = new CvScalar(BR - off, 255, 255); //閾値を用いて色抽出 Cv.InRangeS(imgHSV, Bmin1, Bmax1, imgB1); Cv.InRangeS(imgHSV, Bmin2, Bmax2, imgB2); Cv.Add(imgB1, imgB2, imgB); Cv.InRangeS(imgHSV, Gmin, Gmax, imgG); Cv.InRangeS(imgHSV, Rmin, Rmax, imgR); //Blobs化 CvBlobs Rs = new CvBlobs(imgR); CvBlobs Gs = new CvBlobs(imgG); CvBlobs Bs = new CvBlobs(imgB); int minArea = img.Width * img.Height / 20000; int maxArea = img.Width * img.Height; Bs.FilterByArea(minArea, maxArea); Rs.FilterByArea(minArea, maxArea); Gs.FilterByArea(minArea, maxArea); //blobの配列化 CvBlob[] Rblobs = new CvBlob[Rs.Count]; CvBlob[] Bblobs = new CvBlob[Bs.Count]; CvBlob[] Gblobs = new CvBlob[Gs.Count]; Rs.Values.CopyTo(Rblobs, 0); Bs.Values.CopyTo(Bblobs, 0); Gs.Values.CopyTo(Gblobs, 0); if (!debug) { string deb = ""; foreach (var bbbb in Rblobs) { deb += bbbb.Area + "\r\n"; } //BlobからLine化 List <Line> Rlines = ExtractLinesFromBlobs(Rblobs); List <Line> Blines = ExtractLinesFromBlobs(Bblobs); List <Line> Glines = ExtractLinesFromBlobs(Gblobs); //scale double MinSize = Math.Min(img.Width, img.Height); double ScaleFactor = (double)size / MinSize; var scale = Transform.Scale(new Point3d(0, 0, 0), ScaleFactor); Network network = new Network(); //ネットワークにLineを色ごとにラベル付きで入れる foreach (var l in Rlines) { l.Transform(scale); network.Add(l, 0); } foreach (var l in Blines) { l.Transform(scale); network.Add(l, 1); } foreach (var l in Glines) { l.Transform(scale); network.Add(l, 2); } double t = network.SearchWeldToleranceBinary(0, (double)size / 10, 0, 10); if (tol != -1) { network.weld(tol * size); } else { network.weld(t); } deb += "tolerance: " + t + "\r\n\r\n"; //ウェルド後のエッジ抽出 Rlines = network.ExtractLines(0); Blines = network.ExtractLines(1); Glines = network.ExtractLines(2); List <List <int> > faces = network.detectCycles(maxNgon); deb += "B: " + Bs.Count.ToString() + "\r\n"; deb += "R: " + Rs.Count.ToString() + "\r\n"; deb += "G: " + Gs.Count.ToString() + "\r\n"; Mesh mesh = GenerateMesh(network.verts, faces); mesh.Normals.ComputeNormals(); DA.SetDataList(0, network.verts); DA.SetDataList(1, Rlines); DA.SetDataList(2, Blines); DA.SetDataList(3, Glines); DA.SetData(4, mesh); } else { //赤レンダリング Rs.RenderBlobs(img, Render, RenderBlobsMode.Angle); Rs.RenderBlobs(img, Render, RenderBlobsMode.BoundingBox); Rs.RenderBlobs(img, Render, RenderBlobsMode.Centroid); //青レンダリング Bs.RenderBlobs(img, Render, RenderBlobsMode.Angle); Bs.RenderBlobs(img, Render, RenderBlobsMode.BoundingBox); Bs.RenderBlobs(img, Render, RenderBlobsMode.Centroid); //黒レンダリング Gs.RenderBlobs(img, Render, RenderBlobsMode.Angle); Gs.RenderBlobs(img, Render, RenderBlobsMode.BoundingBox); Gs.RenderBlobs(img, Render, RenderBlobsMode.Centroid); Cv.NamedWindow("test"); IplImage Render2 = new IplImage(img.Size.Width / 4, img.Size.Height / 4, BitDepth.U8, 3); string deb = ""; deb += "B: " + Bs.Count.ToString() + "\r\n"; deb += "R: " + Rs.Count.ToString() + "\r\n"; deb += "G: " + Gs.Count.ToString() + "\r\n"; Cv.Resize(Render, Render2); Cv.ShowImage("test", Render2); Cv.WaitKey(); Cv.DestroyWindow("test"); } Cv.ReleaseImage(img); Cv.ReleaseImage(imgHSV); }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); capture.Retrieve(frame, 0); Mat grayFrame = new Mat(); CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray); //Mat smallGrayFrame = new Mat(); //CvInvoke.PyrDown(grayFrame, smallGrayFrame); //Mat smoothedGrayFrame = new Mat(); //CvInvoke.PyrUp(smallGrayFrame, smoothedGrayFrame); //Image<Gray, Byte> smallGrayFrame = grayFrame.PyrDown(); //Image<Gray, Byte> smoothedGrayFrame = smallGrayFrame.PyrUp(); //Mat cannyFrame = new Mat(); //CvInvoke.Canny(smoothedGrayFrame, cannyFrame, 100, 60); //Image<Gray, Byte> cannyFrame = smoothedGrayFrame.Canny(100, 60); Image <Bgra, Byte> _frame = frame.ToImage <Bgra, Byte>(); Image <Gray, Byte> _grayFrame = grayFrame.ToImage <Gray, Byte>(); Image <Gray, Byte>[] rgb_frame = _frame.Split(); //components of rgb image Image <Gray, Byte> red_com = rgb_frame[2] - _grayFrame; var red_bi = red_com.Convert <Gray, byte>().ThresholdBinary(new Gray(redThres), new Gray(255)); Image <Gray, Byte> blue_com = rgb_frame[0] - _grayFrame; var blue_bi = blue_com.Convert <Gray, byte>().ThresholdBinary(new Gray(blueThres), new Gray(255)); Image <Gray, Byte> green_com = rgb_frame[1] - _grayFrame; var green_bi = green_com.Convert <Gray, byte>().ThresholdBinary(new Gray(greenThres), new Gray(255)); //System.Windows.Forms.MessageBox.Show(""); /////////////////////////////////////////////////////////////////////////////////// //Blob detection //Red Blob detection Image <Bgr, Byte> smoothedFrame_r = new Image <Bgr, byte>(red_com.Size); CvInvoke.GaussianBlur(red_bi, smoothedFrame_r, new Size(3, 3), 1); //filter out noises Mat forgroundMask_r = new Mat(); fgDetector.Apply(smoothedFrame_r, forgroundMask_r); CvBlobs blobs_r = new CvBlobs(); blobDetector.Detect(forgroundMask_r.ToImage <Gray, byte>(), blobs_r); blobs_r.FilterByArea(minarea, maxarea); //blue Blob Detection Image <Bgr, Byte> smoothedFrame_b = new Image <Bgr, byte>(red_com.Size); CvInvoke.GaussianBlur(blue_bi, smoothedFrame_b, new Size(3, 3), 1); //filter out noises Mat forgroundMask_b = new Mat(); fgDetector.Apply(smoothedFrame_b, forgroundMask_b); CvBlobs blobs_b = new CvBlobs(); blobDetector.Detect(forgroundMask_b.ToImage <Gray, byte>(), blobs_b); blobs_b.FilterByArea(minarea, maxarea); //Green blob detection Image <Bgr, Byte> smoothedFrame_g = new Image <Bgr, byte>(red_com.Size); CvInvoke.GaussianBlur(green_bi, smoothedFrame_g, new Size(3, 3), 1); //filter out noises Mat forgroundMask_g = new Mat(); fgDetector.Apply(smoothedFrame_g, forgroundMask_g); CvBlobs blobs_g = new CvBlobs(); blobDetector.Detect(forgroundMask_g.ToImage <Gray, byte>(), blobs_g); blobs_g.FilterByArea(minarea, maxarea); //Mouse Interpretition float[] cent_r = new float[2]; float[] cent_g = new float[2]; float[] cent_b = new float[2]; //Corsor control with Green Marker foreach (var pair in blobs_g) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_g[0] = b.Centroid.X; cent_g[1] = b.Centroid.Y; } if (blobs_g.Count == 1 || mouseflag != 0) { //Cursor Movement Controlled //Primary Screem //if (Screen.AllScreens.Length == 1) { Cursor.Position = new Point(Screen.PrimaryScreen.Bounds.Width - (int)(cursor_mul * (int)cent_g[0] * Screen.PrimaryScreen.Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_g[1]) * Screen.PrimaryScreen.Bounds.Height / capture.Height); } //Secondary Screen //Cursor.Position = new Point((int)(cursor_mul * (int)cent_g[0] * Screen.AllScreens[1].Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_g[1]) * Screen.AllScreens[1].Bounds.Height / capture.Height); //Number of Screen = 2 and both a same time /* if (Screen.AllScreens.Length == 2) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_g[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_g[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } * //Number of screen =3 and all at same time * if (Screen.AllScreens.Length == 3) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_g[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width + Screen.AllScreens[2].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_g[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } */ /* * //Check for Clicks * if (blobs_r.Count == 1) * { * if(blobs_g.Count == 0) * { * if(ccount == 1) * { * //double click * mouse_event(MOUSEEVENTF_LEFTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_LEFTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * Thread.Sleep(150); * mouse_event(MOUSEEVENTF_LEFTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_LEFTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * } * else * { * ccount--; * } * } * * else if ((cent_g[0] - cent_r[0] >= 10 || cent_r[0] - cent_g[0] <= 10) && (cent_g[1] - cent_r[1] >= 10 || cent_r[1] - cent_g[1] <= 10)) * { * ccount = safevalue; * mouseflag = 1; * //single click * mouse_event(MOUSEEVENTF_LEFTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_LEFTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * } * } * else * { * ccount = 0; * * } * * } * * if (blobs_b.Count == 1) * { * foreach (var pair in blobs_b) * { * CvBlob b = pair.Value; * CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); * cent_b[0] = b.Centroid.X; * cent_b[1] = b.Centroid.Y; * } * * if (blobs_g.Count == 1 && (cent_g[0] - cent_b[0] >= 10 || cent_b[0] - cent_g[0] <= 10) && (cent_g[1] - cent_b[1] >= 10 || cent_b[1] - cent_g[1] <= 10)) * { * //right click * mouse_event(MOUSEEVENTF_RIGHTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_RIGHTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * } * * else if(blobs_g.Count == 0) * { * mouse_event(MOUSEEVENTF_VWHEEL, 0, 0, (scroll_y - (int)cent_b[1]) * scroll_mul_v, 0); * mouse_event(MOUSEEVENTF_HWHEEL, 0, 0, (scroll_x - (int)cent_b[0]) * scroll_mul_h, 0); * scroll_y = (int)cent_b[1]; * scroll_x = (int)cent_b[0]; * * } */ } captureImageBox.Image = frame; grayscaleImageBox.Image = red_bi; smoothedGrayscaleImageBox.Image = green_bi; cannyImageBox.Image = blue_bi; }
public CameraTrackingFindSubjectsReturnModel FindSubjects() { double largestW = 0; double largestH = 0; double centerX = 0; double centerY = 0; bool foundSubject = false; Rectangle subject = new Rectangle(); // get detection 'blobs' or regions CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(this.lastMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (this.lastFrame.Width + this.lastFrame.Width) / 2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); FrameWidth = this.lastFrame.Width; FrameHeight = this.lastFrame.Height; foreach (var pair in _tracker) { CvTrack b = pair.Value; // limit the largest and smallest size boxes we care about. if (b.BoundingBox.Width < (this.lastFrame.Width / this.smallestDetectionWidthSizeDivisor) || b.BoundingBox.Height < (this.lastFrame.Height / this.smallestDetectionHeightSizeDivisor) || (b.BoundingBox.Width > (this.lastFrame.Width / this.largestDetectionWidthSizeDivisor) && b.BoundingBox.Height > (this.lastFrame.Height / this.largestDetectionHeightSizeDivisor))) { continue; } // keep track of the largest regions as we only care to track the largest if (b.BoundingBox.Width > largestW) { subject = b.BoundingBox; largestW = b.BoundingBox.Width; largestH = b.BoundingBox.Height; centerX = b.Centroid.X; centerY = b.Centroid.Y; CvInvoke.Rectangle( this.lastFrame, b.BoundingBox, new MCvScalar( 255.0, 255.0, 255.0), 20); CvInvoke.PutText( this.lastFrame, b.Id.ToString(), new Point( (int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); foundSubject = true; } else { CvInvoke.Rectangle( this.lastFrame, b.BoundingBox, new MCvScalar( 255.0, 255.0, 255.0), 1); CvInvoke.PutText( this.lastFrame, b.Id.ToString(), new Point( (int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar( 255.0, 255.0, 255.0)); } } return(new CameraTrackingFindSubjectsReturnModel() { CenterX = centerX, CenterY = centerY, BoundingBox = subject, FoundSubject = foundSubject }); }
private unsafe void ProcessCapture() { try { Screenshot ssht = _capture.Capture(); Image <Gray, byte> process; fixed(byte *p = ssht.Data) { IntPtr ptr = (IntPtr)p; Screen = new Image <Bgra, byte>(ssht.Width, ssht.Height, ssht.Stride, ptr) { ROI = BOX_SIZE }; process = Screen.Convert <Gray, byte>(); } ssht.Dispose(); _force = new Vec2(); // Read player position from memory _playerPos = GetPlayerPosition(); Rectangle powerDetectionROI = new Rectangle( (int)Math.Max(_playerPos.X - 100, 0), (int)Math.Max(_playerPos.Y - 75, 0), (int)Math.Min(BOX_SIZE.Width - (_playerPos.X - 100), 200), (int)Math.Min(BOX_SIZE.Height - (_playerPos.Y - 75), 100)); // Look for power process.ROI = powerDetectionROI; using (Image <Gray, float> result = process.MatchTemplate(_imgPower, TemplateMatchingType.SqdiffNormed)) { double minDistSq = double.MaxValue; int minX = 0, minY = 0; for (int y = 0; y < result.Height; y++) { for (int x = 0; x < result.Width; x++) { if (result.Data[y, x, 0] < 0.20) { double dist = (x - _playerPos.X) * (x - _playerPos.X) + (y - _playerPos.Y) * (y - _playerPos.Y); if (dist < minDistSq) { minDistSq = dist; minX = x; minY = y; } } } } if (minDistSq != double.MaxValue) { Rectangle match = new Rectangle(minX + process.ROI.X, minY + process.ROI.Y, _imgPower.Width, _imgPower.Height); if (DO_DRAWING) { Screen.Draw(match, new Bgra(0, 255, 255, 255), 2); Screen.Draw(new LineSegment2DF(match.Location, _playerPos), new Bgra(0, 255, 255, 255), 1); } Vec2 acc = Vec2.CalculateForce( new Vec2(match.X + _imgPower.Width / 2.0, match.Y + _imgPower.Height / 2.0), new Vec2(_playerPos.X, _playerPos.Y), 4000); _force += acc; } } // Processing bounding box Rectangle bulletDetectionROI = new Rectangle( (int)Math.Max(_playerPos.X - INITIAL_DETECTION_RADIUS, 0), (int)Math.Max(_playerPos.Y - INITIAL_DETECTION_RADIUS, 0), (int)Math.Min(BOX_SIZE.Width - ((int)_playerPos.X - INITIAL_DETECTION_RADIUS), INITIAL_DETECTION_RADIUS * 2), (int)Math.Min(BOX_SIZE.Height - ((int)_playerPos.Y - INITIAL_DETECTION_RADIUS), INITIAL_DETECTION_RADIUS * 2)); process.ROI = bulletDetectionROI; if (TEST_MODE) { return; } Vec2 _playerVec = new Vec2(_playerPos.X, _playerPos.Y); var binthresh = process.SmoothBlur(3, 3).ThresholdBinary(new Gray(240), new Gray(255)); //220 // Detect blobs (bullets) on screen CvBlobs resultingImgBlobs = new CvBlobs(); uint noBlobs = _bDetect.Detect(binthresh, resultingImgBlobs); int blobCount = 0; resultingImgBlobs.FilterByArea(10, 500); foreach (CvBlob targetBlob in resultingImgBlobs.Values) { if (DO_DRAWING) { Screen.ROI = new Rectangle(process.ROI.X + BOX_SIZE.X, process.ROI.Y + BOX_SIZE.Y, process.ROI.Width, process.ROI.Height); Screen.FillConvexPoly(targetBlob.GetContour(), new Bgra(0, 0, 255, 255)); } // Find closest point on blob contour to player Point minPoint = targetBlob.GetContour()[0]; double minDist = double.MaxValue; foreach (var point in targetBlob.GetContour()) { Point adj = new Point(point.X + process.ROI.X, point.Y + process.ROI.Y); double dist = (adj.X - _playerPos.X) * (adj.X - _playerPos.X) + (adj.Y - _playerPos.Y) * (adj.Y - _playerPos.Y); if (dist < minDist) { minPoint = adj; minDist = dist; } } // Ensure the bullet is in the correct range if (minDist < _detectionRadius * _detectionRadius) { // Calculate forces Vec2 acc = Vec2.CalculateForce(new Vec2(minPoint.X, minPoint.Y), _playerVec, -5000); _force += acc; if (DO_DRAWING) { Screen.ROI = BOX_SIZE; Screen.Draw(new LineSegment2DF(_playerPos, minPoint), new Bgra(0, 255, 128, 255), 1); } blobCount++; } } Screen.ROI = BOX_SIZE; process.ROI = Rectangle.Empty; // Calculate new detection orb radius //float nRad = Math.Max(20.0f, INITIAL_DETECTION_RADIUS/(1 + blobCount*0.3f)); if (blobCount >= 1) { _detectionRadius = (_detectionRadius * 29 + 5.0f) / 30.0f; } else { _detectionRadius = (_detectionRadius * 59 + INITIAL_DETECTION_RADIUS) / 60.0f; } // Account for border force, to prevent cornering //if (BOX_SIZE.Width - _playerPos.X < 120) _force += new Vec2(Vec2.CalculateForce(BOX_SIZE.Width - _playerPos.X, -4000), 0); //if (_playerPos.X < 120) _force += new Vec2(Vec2.CalculateForce(_playerPos.X, 4000), 0); if (BOX_SIZE.Height - _playerPos.Y < 50) { _force += new Vec2(0, Vec2.CalculateForce(BOX_SIZE.Height - _playerPos.Y, -2000)); } if (_playerPos.Y < 200) { _force += new Vec2(0, Vec2.CalculateForce(_playerPos.Y, 2000)); } // Corners are the devil _force += Vec2.CalculateForce(new Vec2(BOX_SIZE.Width, BOX_SIZE.Height), _playerVec, -2000); _force += Vec2.CalculateForce(new Vec2(0, BOX_SIZE.Height), _playerVec, -2000); _force += Vec2.CalculateForce(new Vec2(0, 0), _playerVec, -2000); _force += Vec2.CalculateForce(new Vec2(BOX_SIZE.Width, 0), _playerVec, -2000); // Assist force if (ShouldAssist) { Vec2 sub = new Vec2(AssistPoint.X, AssistPoint.Y) - _playerVec; double dist = sub.Length(); _force += new Vec2(sub.X / dist * 2, sub.Y / dist * 2); } //imageToShow.Draw("BLOB_AREA: " + percBlob, new Point(10, 20), FontFace.HersheyPlain, 1, new Bgra(255, 255, 255, 255), 1); if (DO_DRAWING) { Screen.Draw( new Rectangle((int)(_playerPos.X - 3), (int)(_playerPos.Y - 3), 6, 6), new Bgra(0, 255, 0, 255), 2); Screen.Draw(new CircleF(_playerPos, _detectionRadius), new Bgra(0, 255, 255, 255), 1); if (ShouldAssist) { Screen.Draw( new LineSegment2DF(_playerPos, AssistPoint), new Bgra(128, 0, 255, 255), 2); Screen.Draw("ASSIST", new Point(10, 40), FontFace.HersheyPlain, 1, new Bgra(0, 255, 0, 255), 1); } // Draw force vector Screen.Draw( new LineSegment2DF(_playerPos, new PointF((float)(_playerPos.X + _force.X), (float)(_playerPos.Y + _force.Y))), new Bgra(0, 128, 255, 255), 5); Screen.Draw(powerDetectionROI, new Bgra(255, 255, 0, 255), 1); Screen.Draw(bulletDetectionROI, new Bgra(0, 0, 255, 255), 1); if (DoMovement) { Screen.Draw("DO_MOVEMENT", new Point(10, 20), FontFace.HersheyPlain, 1, new Bgra(0, 255, 0, 255), 1); } _form.imageBox.Image = Screen; } process.Dispose(); } catch (Exception e) { Console.WriteLine(e.ToString()); } }
/// <summary> /// Updates list of tracks based on current blobs. /// </summary> /// <param name="blobs">List of blobs.</param> /// <param name="tracks">List of tracks.</param> /// <param name="thDistance">Max distance to determine when a track and a blob match.</param> /// <param name="thInactive">Max number of frames a track can be inactive.</param> /// <param name="thActive">If a track becomes inactive but it has been active less than thActive frames, the track will be deleted.</param> /// <remarks> /// Tracking based on: /// A. Senior, A. Hampapur, Y-L Tian, L. Brown, S. Pankanti, R. Bolle. Appearance Models for /// Occlusion Handling. Second International workshop on Performance Evaluation of Tracking and /// Surveillance Systems & CVPR'01. December, 2001. /// (http://www.research.ibm.com/peoplevision/PETS2001.pdf) /// </remarks> public static void UpdateTracks(CvBlobs blobs, CvTracks tracks, double thDistance, int thInactive, int thActive) { if (blobs == null) throw new ArgumentNullException(nameof(blobs)); blobs.UpdateTracks(tracks, thDistance, thInactive, thActive); }
/// <summary> /// 指先の位置を探す /// </summary> /// <param name="targetName"></param> /// <returns></returns> public System.Drawing.Point FingerFinder(string targetName = "") { //To find the finger point which on the Graphics //カメラのパラメタ capFlag = true; Mat tempImg, flipImg; Mat grayImg, renderImg; Mat srcImgbyCam = new Mat(); double centerX = 0.0, centerY = 0.0; ColorRecognition iRo = new ColorRecognition(); var capture = new VideoCapture(CaptureDevice.Any) { //キャプチャする画像のサイズフレームレートの指定 FrameHeight = 480, FrameWidth = 320, //FrameHeight = 640, FrameWidth = 480, }; using (capture) { while (capFlag) { //カメラから画像をキャプチャする capFlag = capture.Read(srcImgbyCam); if (srcImgbyCam.Empty()) { break; } //Camera Test window Cv2.ImShow("srcImgbyCam", srcImgbyCam); flipImg = srcImgbyCam.Clone(); flipImg = flipImg.Flip(FlipMode.XY); tempImg = Mat.Zeros(srcImgbyCam.Size(), srcImgbyCam.Type()); grayImg = new Mat(srcImgbyCam.Size(), MatType.CV_8UC1); //指検出方法 iRo.FindColor(ref flipImg, ref tempImg); Cv2.CvtColor(tempImg, grayImg, ColorConversionCodes.BGR2GRAY); Cv2.Threshold(grayImg, grayImg, 100, 255, ThresholdTypes.Binary); //ラベリング処理 //CvBlobs blobs = new CvBlobs(grayImg2Ipl); CvBlobs blobs = new CvBlobs(grayImg); renderImg = new Mat(srcImgbyCam.Size(), MatType.CV_8UC3); //ラベリング結果の描画 blobs.RenderBlobs(srcImgbyCam, renderImg); //緑最大面積を返す CvBlob maxblob = blobs.LargestBlob(); if (maxblob != null) { centerX = Math.Round(maxblob.Centroid.X, 2); centerY = Math.Round(maxblob.Centroid.Y, 2); //手動のキャリブレーション centerX = (int)((centerX - 12) / 12.87); centerY = (int)((centerY - 43) / 12.40); //手動のキャリブレーション Ⅱ centerX = (int)((centerX - 2) * 2); centerY = (int)((centerY - 1) * 2); //For Debug textBox_CenterCoordinates.Text = centerX.ToString() + " , " + centerY.ToString(); } int keyValue = Cv2.WaitKey(100); if (keyValue == 27) { Window.DestroyAllWindows(); //対象Release tempImg.Release(); flipImg.Release(); grayImg.Release(); renderImg.Release(); srcImgbyCam.Release(); capFlag = false; break; //ESC キーで閉じる } } } return(new System.Drawing.Point(Convert.ToInt32(centerX + movement.X), Convert.ToInt32(centerY + movement.Y))); }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); capture.Retrieve(frame, 0); Image <Hsv, Byte> currenthsvFrame = (frame.ToImage <Bgr, Byte>()).Convert <Hsv, Byte>(); Image <Gray, Byte> color_one = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_two = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_three = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_four = new Image <Gray, Byte>(frame.Width, frame.Height); /* * Color one is Red * Color two is Blue * Color three is Green * Color Four is Yellow * Green is in Right Index Finger * Blue is in Left Index Finger * Red in Right Thumb * Yelloe in Left Thumb */ /* Hsv hsv_min_color_one = new Hsv(0, 135, 110); * Hsv hsv_max_color_one = new Hsv(6, 255, 255); * Hsv hsv_min_color_two = new Hsv(112, 53, 10); * Hsv hsv_max_color_two = new Hsv(119, 255, 255); * Hsv hsv_min_color_three = new Hsv(68, 59, 80); * Hsv hsv_max_color_three = new Hsv(85, 255, 255); * Hsv hsv_min_color_four = new Hsv(20, 165, 165); * Hsv hsv_max_color_four = new Hsv(36, 255, 255);*/ Hsv hsv_min_color_one = new Hsv(0, 135, 50); //Hsv hsv_max_color_one = new Hsv(6, 255, 255); Hsv hsv_max_color_one = new Hsv(8, 255, 255); Hsv hsv_min_color_two = new Hsv(112, 53, 10); Hsv hsv_max_color_two = new Hsv(119, 255, 255); /* * Hsv hsv_min_color_three = new Hsv(68, 59, 80); * Hsv hsv_max_color_three = new Hsv(85, 255, 255); * Hsv hsv_min_color_four = new Hsv(20, 165, 165); * Hsv hsv_max_color_four = new Hsv(36, 255, 255); */ Hsv hsv_min_color_three = new Hsv(65, 70, 0); Hsv hsv_max_color_three = new Hsv(109, 255, 255); Hsv hsv_min_color_four = new Hsv(18, 155, 155); Hsv hsv_max_color_four = new Hsv(35, 255, 255); color_one = currenthsvFrame.InRange(hsv_min_color_one, hsv_max_color_one); color_two = currenthsvFrame.InRange(hsv_min_color_two, hsv_max_color_two); color_three = currenthsvFrame.InRange(hsv_min_color_three, hsv_max_color_three); color_four = currenthsvFrame.InRange(hsv_min_color_four, hsv_max_color_four); //Blob detection #region Blob Detection //Color one detection Image <Bgr, Byte> smoothedFrame_cone = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_one, smoothedFrame_cone, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cone = new Mat(); fgDetector.Apply(smoothedFrame_cone, forgroundMask_cone); CvBlobs blobs_color_one = new CvBlobs(); blobDetector.Detect(forgroundMask_cone.ToImage <Gray, byte>(), blobs_color_one); blobs_color_one.FilterByArea(minarea, maxarea); //Color two Blob Detection Image <Bgr, Byte> smoothedFrame_ctwo = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_two, smoothedFrame_ctwo, new Size(3, 3), 1); //filter out noises Mat forgroundMask_ctwo = new Mat(); fgDetector.Apply(smoothedFrame_ctwo, forgroundMask_ctwo); CvBlobs blobs_color_two = new CvBlobs(); blobDetector.Detect(forgroundMask_ctwo.ToImage <Gray, byte>(), blobs_color_two); blobs_color_two.FilterByArea(minarea, maxarea); //Color three blob detection Image <Bgr, Byte> smoothedFrame_cthree = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_three, smoothedFrame_cthree, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cthree = new Mat(); fgDetector.Apply(smoothedFrame_cthree, forgroundMask_cthree); CvBlobs blobs_color_three = new CvBlobs(); blobDetector.Detect(forgroundMask_cthree.ToImage <Gray, byte>(), blobs_color_three); blobs_color_three.FilterByArea(minarea, maxarea); //Color four detection Image <Bgr, Byte> smoothedFrame_cfour = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_four, smoothedFrame_cfour, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cfour = new Mat(); fgDetector.Apply(smoothedFrame_cfour, forgroundMask_cfour); CvBlobs blobs_color_four = new CvBlobs(); blobDetector.Detect(forgroundMask_cfour.ToImage <Gray, byte>(), blobs_color_four); blobs_color_four.FilterByArea(minarea, maxarea); #endregion //Makers Interpretition float[] cent_color_one = new float[2]; float[] cent_color_two = new float[2]; float[] cent_color_three = new float[2]; float[] cent_color_four = new float[2]; cent_color_one[0] = 0; cent_color_one[1] = 0; cent_color_two[0] = 0; cent_color_two[1] = 0; cent_color_three[0] = green_history_x; cent_color_three[1] = green_history_y; cent_color_four[0] = 0; cent_color_four[1] = 0; //Corsor control with Green Marker if (blobs_color_three.Count == 1 || mouseflag != 0) { foreach (var pair in blobs_color_three) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cursor_history_x.Enqueue((int)b.Centroid.X); cursor_history_y.Enqueue((int)b.Centroid.Y); cursor_history_x.Dequeue(); cursor_history_y.Dequeue(); cent_color_three[0] = (int)b.Centroid.X; cent_color_three[1] = (int)b.Centroid.Y; /*int temp_sum = 0; * int[] temp = cursor_history_x.ToArray(); * for (int i = 0; i < queue_cursor_length; i++) * temp_sum += temp[i]; * cent_color_three[0] = temp_sum / queue_cursor_length; * * temp_sum = 0; * temp = cursor_history_y.ToArray(); * for (int i = 0; i < queue_cursor_length; i++) * temp_sum += temp[i]; * cent_color_three[1] = temp_sum / queue_cursor_length; * * green_history_x = (int)cent_color_three[0]; * green_history_y = (int)cent_color_three[1];*/ } //Cursor Movement Controlled //Primary Screem // if (Screen.AllScreens.Length == 1) { //Cursor.Position = new Point(Screen.PrimaryScreen.Bounds.Width - (int)(cursor_mul * (int)cent_color_three[0] * Screen.PrimaryScreen.Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_color_three[1]) * Screen.PrimaryScreen.Bounds.Height / capture.Height); Cursor.Position = new Point((int)((cursor_mul_x * (int)cent_color_three[0]) * (Screen.PrimaryScreen.Bounds.Width) / capture.Width) + cursor_add_x, (((int)cursor_mul_y * (int)cent_color_three[1]) * Screen.PrimaryScreen.Bounds.Height / capture.Height) + cursor_add_y); //mouse_event(MOUSEEVENTF_MOVE, ( (-(int)cent_color_three[0] + green_history_x)), ( (-(int)cent_color_three[1] + green_history_y)),0,0); //mouse_event(MOUSEEVENTF_ABSOLUTE, 0, 0, 0, 0); } //Secondary Screen //Cursor.Position = new Point((int)(cursor_mul * (int)cent_color_three[0] * Screen.AllScreens[1].Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_color_three[1]) * Screen.AllScreens[1].Bounds.Height / capture.Height); //Number of Screen = 2 and both a same time /* if (Screen.AllScreens.Length == 2) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_color_three[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_color_three[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } * //Number of screen =3 and all at same time * if (Screen.AllScreens.Length == 3) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_color_three[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width + Screen.AllScreens[2].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_color_three[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } */ //Check for Clicks if (blobs_color_one.Count == 1) { foreach (var pair in blobs_color_one) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_one[0] = b.Centroid.X; cent_color_one[1] = b.Centroid.Y; } if (blobs_color_three.Count == 0) { if (ccount == 1) { //double click mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_LEFTDOWN, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_LEFTUP, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); Thread.Sleep(150); mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_LEFTDOWN, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_LEFTUP, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); } else { ccount--; } } else if ((cent_color_one[0] - cent_color_three[0]) * (cent_color_one[0] - cent_color_three[0]) + (cent_color_one[1] - cent_color_three[1]) * (cent_color_one[1] - cent_color_three[1]) <= 5000) { ccount = safevalue; mouseflag = 1; //single click mouse_event(MOUSEEVENTF_LEFTDOWN, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_LEFTUP, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_ABSOLUTE, 0, 0, 0, 0); } } else { ccount = 0; } } if (blobs_color_two.Count == 1) { foreach (var pair in blobs_color_two) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_two[0] = b.Centroid.X; cent_color_two[1] = b.Centroid.Y; } if (blobs_color_three.Count == 1 && ((cent_color_three[0] - cent_color_two[0]) * (cent_color_three[0] - cent_color_two[0]) + (cent_color_three[1] - cent_color_two[1]) * (cent_color_three[1] - cent_color_two[1]) <= 5000)) { //right click mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_RIGHTDOWN, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_RIGHTUP, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); } else //if(blobs_g.Count == 0) { //MessageBox.Show("d"); //Cursor.Position = new Point(Screen.PrimaryScreen.Bounds.Width - (int)(cursor_mul * green_history_x * Screen.PrimaryScreen.Bounds.Width / capture.Width), (int)(cursor_mul * green_history_y) * Screen.PrimaryScreen.Bounds.Height / capture.Height); //mouse_event(MOUSEEVENTF_VWHEEL, 0, 0, (scroll_y - (int)cent_color_two[1]) * scroll_mul_v, 0); mouse_event(MOUSEEVENTF_HWHEEL, 0, 0, (uint)((scroll_x - (int)cent_color_two[0]) * scroll_mul_h), 0); mouse_event(MOUSEEVENTF_VWHEEL, (uint)Cursor.Position.X, (uint)Cursor.Position.Y, 50, 0); //mouse_event(MOUSEEVENTF_HWHEEL, 0, 0, 50, 0); scroll_y = (int)cent_color_two[1]; scroll_x = (int)cent_color_two[0]; } } captureImageBox.Image = frame; grayscaleImageBox.Image = color_one; smoothedGrayscaleImageBox.Image = color_two; cannyImageBox.Image = color_three; Color4ImageBox.Image = color_four; }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = _capture.QueryFrame(); if (frame == null) { if (mEtapa == 1) { preencherParametrosMapeamento(); } _capture.Dispose(); return; } mContadorDeFrames++; if (mEtapa == 0) { verificarEatualizarParametrosCalibracao(); } _capture.Retrieve(frame, 0); Image <Bgr, Byte> smoothedFrame = new Image <Bgr, byte>(frame.Size); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(parametros.AlphaMediaMovel, parametros.AlphaMediaMovel), parametros.AlphaMediaMovel); //filter out noises // use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); mDetector.Apply(smoothedFrame, forgroundMask); //CvInvoke.AbsDiff(smoothedFrame, forgroundMask.ToImage<Bgr, byte>(), vPlanoFundo); mblobs = new CvBlobs(); mBlobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), mblobs); mblobs.FilterByArea(100, int.MaxValue); if (mEtapa == 0) { mJanelaCalibracao.Imagem.Image = frame; Mat vCopiaMenorBinaria = new Mat(); CvInvoke.Resize(forgroundMask, vCopiaMenorBinaria, new Size(0, 0), 0.7, 0.7, Inter.Area); mJanelaCalibracao.PlanoDeFundo.Image = smoothedFrame; mJanelaCalibracao.Objetos.Image = vCopiaMenorBinaria; } if (mEtapa == 1) { mJanelaAreaRestrita.Imagem.Image = frame; } if (mEtapa == 2) { mJanelaMonitoramento.ImagemMonitorada.Image = frame; } mImagemColorida = frame; if (mEtapa == 0) { desenharParametroTamanhoPessoa(); desenharRetanguloPessoa(); } if (mEtapa == 1) { desenharEMapear(); } if (mEtapa == 2) { atualizarParametros(parametros); desenharEprocessar(); } }
//private void CountBlobs(WriteableBitmap writableBitmap) private void CountBlobs(WriteableBitmap writeableBitmap) { Mat imgIR = writeableBitmap.ToMat();// CV_16UC1 imgIR.ConvertTo(imgIR, MatType.CV_8UC1, 1.0 / 256.0); Mat imgIRbin = new Mat(imgIR.Rows, imgIR.Cols, MatType.CV_8UC1); Cv2.Threshold(imgIR, imgIRbin, 225, 255, ThresholdTypes.Binary); //imgIR.SaveImage("D:/imgIR.png"); CvBlobs blobs = new CvBlobs(imgIRbin); blobs.FilterByArea(30, 2000); //label_sample.Content = blobs.Count().ToString(); // Canvasに追加 canvas_blob.Children.Clear(); list_arr_index.Clear(); //label_sample.Content = blobs.Count.ToString(); if (blobs.Count > 0) { foreach (KeyValuePair <int, CvBlob> item in blobs) { int labelValue = item.Key; CvBlob blob = item.Value; Rectangle blob_rect = new Rectangle { Width = blob.Rect.Width, Height = blob.Rect.Height, Stroke = Brushes.Red, StrokeThickness = 2 }; canvas_blob.Children.Add(blob_rect); Canvas.SetLeft(blob_rect, blob.Rect.Left); Canvas.SetTop(blob_rect, blob.Rect.Top); } // blobsから各blobのindexを取り出す//////////////////// // blobsからLabelsに変換 LabelData labelBlobs = blobs.Labels; // Labelsを1dデータに変換 int[] label_blobs_vector = new int[labelBlobs.Rows * labelBlobs.Cols]; int ii = 0; //for (int i_col = 0; i_col< labelBlobs.Cols; i_col++) for (int i_row = 0; i_row < labelBlobs.Rows; i_row++) { //for (int i_row = 0; i_row<labelBlobs.Rows;i_row++) for (int i_col = 0; i_col < labelBlobs.Cols; i_col++) { label_blobs_vector[ii] = labelBlobs[i_row, i_col]; ii += 1; } } // // Labelsからblob.Valueに一致するindexの配列を作成 // list_arr_indexに格納する // int count_blobs = blobs.Count; //label_sample.Content = list_arr_index.Count().ToString(); foreach (KeyValuePair <int, CvBlob> item in blobs) { int count_blobs = blobs.Count(); int labelvalue = item.Key; // Labelsからlabelvalueに一致するindex配列を作成 int area_blob = item.Value.Area;// int[] arr_idx_label = new int[area_blob]; ii = 0; for (int i_lab = 0; i_lab < label_blobs_vector.Length; i_lab++) { if (label_blobs_vector[i_lab] == labelvalue) { arr_idx_label[ii] = i_lab; ii += 1; } } //int[] arr_idx_label = label_blobs_vector.FindIndex<int>(label => label == labelvalue); list_arr_index.Add(arr_idx_label); } //label_sample.Content = list_arr_index.Count().ToString(); Console.WriteLine("hoge");//ブレイクポイント用 } }
private void ProcessFrame(object sender, EventArgs e) { DateTime methodStart = DateTime.Now; Image <Bgr, Byte> redFiltered = null; Image <Bgr, Byte> ycbcrFiltered = null; Image <Gray, Byte> blobImage = null; Image <Bgr, Byte> rawFrame = videoCapture.QueryFrame().ToImage <Bgr, Byte>(); rawFrame = rawFrame.Resize(320, 240, Emgu.CV.CvEnum.Inter.Cubic); rawFrame._EqualizeHist(); if (detectFire) { redFiltered = redTreshhold(rawFrame); ycbcrFiltered = yCbCrThreshold(redFiltered); blobImage = binaryTreshold(ycbcrFiltered); CvBlobs blobs = new CvBlobs(); CvBlobDetector blobDetector = new CvBlobDetector(); uint blobCount = blobDetector.Detect(blobImage, blobs); int minArea = (int)(rawFrame.Width * rawFrame.Height * 0.002); foreach (KeyValuePair <uint, CvBlob> blobPair in blobs) { if (blobPair.Value.Area > minArea) { Rectangle rect = blobPair.Value.BoundingBox; rawFrame.Draw(rect, new Bgr(0, 255, 0), 5); } } } picPreview.Image = rawFrame.Bitmap; if (detectFire) { picRedFilter.Image = redFiltered.Bitmap; picFinal.Image = blobImage.Bitmap; } else { picRedFilter.Image = null; picFinal.Image = null; } if (frameSkip) { int timePassed = (DateTime.Now - methodStart).Milliseconds; int framesToSkip = timePassed / frameInterval; for (int i = 0; i < framesToSkip; i++) { videoCapture.QueryFrame(); } } int currentFrame = (int)videoCapture.GetCaptureProperty(CapProp.PosFrames); int frameCount = (int)videoCapture.GetCaptureProperty(CapProp.FrameCount); if (currentFrame != -1 && frameCount != -1) { trbSeek.Value = currentFrame; if (currentFrame == frameCount) { CloseVideo(); } } }
public void detectMotion() { if (motionZONE != null) { motionDetector.MotionZones = motionZONE; } motionRatio = motionDetector.ProcessFrame(img.Bitmap); if (motionRatio > 0.4f) { taINCIDENTS.Insert("NA", "+ Quick Motion detected or more than 2% of fram changed", "INCIDENT TYPE 1", DateTime.Now.ToString()); taINCIDENTS.Fill(ds.INCIDENTS); } // FREEZ DETECTION if (!isSIDECAM) { Mat forgroundMask = new Mat(); fgDetector.Apply(img, forgroundMask); CvBlobs blobs = new CvBlobs(); blobs.FilterByArea(100, int.MaxValue); BlobCounter blobcounter = new BlobCounter(); blobcounter.MinHeight = 100; blobcounter.MaxWidth = 20; blobcounter.ObjectsOrder = ObjectsOrder.Size; blobcounter.ProcessImage(forgroundMask.Bitmap); Rectangle[] rect = blobcounter.GetObjectsRectangles(); if (motionRatio == 0 && rect.Length == 0) { taINCIDENTS.Insert("NA", "+ FREEZ DETECTED IN ATM", "INCIDENT TYPE 3", DateTime.Now.ToString()); taINCIDENTS.Fill(ds.INCIDENTS); } } if (isSIDECAM) { // PROCESS BLOB Mat forgroundMask = new Mat(); fgDetector.Apply(img, forgroundMask); CvBlobs blobs = new CvBlobs(); blobs.FilterByArea(100, int.MaxValue); BlobCounter blobcounter = new BlobCounter(); blobcounter.MinHeight = 100; blobcounter.MaxWidth = 20; blobcounter.ObjectsOrder = ObjectsOrder.Size; blobcounter.ProcessImage(forgroundMask.Bitmap); Rectangle[] rect = blobcounter.GetObjectsRectangles(); Graphics g = Graphics.FromImage(img.Bitmap); BLOBS = new Rectangle[10]; int BlobCPT = 0; if (motionRatio == 0 && rect.Length == 0) { taINCIDENTS.Insert("NA", "+ FREEZ DETECTED IN SIDE CAMERA", "INCIDENT TYPE 3", DateTime.Now.ToString()); taINCIDENTS.Fill(ds.INCIDENTS); } if (rect.Length > 0) { Rectangle objec = rect[0]; Graphics graphic = Graphics.FromImage(img.Bitmap); using (Pen pen = new Pen(Color.Red, 2)) foreach (Rectangle rec in rect) { if ((rec.Width > 50) && (rec.Height > 100)) { BlobCPT++; if (rec.Height > 200) { g.DrawRectangle(pen, rec); BLOBS[0] = rec; } else { g.DrawRectangle(new Pen(Color.Black, 2), rec); } } } { } g.Dispose(); } } }
/// <summary> /// Label the connected parts of a binary image. (cvLabel) /// </summary> /// <param name="img">Input binary image (depth=IPL_DEPTH_8U and num. channels=1).</param> /// <param name="blobs">List of blobs.</param> /// <returns>Number of pixels that has been labeled.</returns> public static int Label(Mat img, CvBlobs blobs) { if (img == null) throw new ArgumentNullException(nameof(img)); if (blobs == null) throw new ArgumentNullException(nameof(blobs)); return blobs.Label(img); }
/// <summary> /// Draws or prints information about blobs. (cvRenderBlobs) /// </summary> /// <param name="blobs">List of blobs.</param> /// <param name="imgSource">Input image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="imgDest">Output image (depth=IPL_DEPTH_8U and num. channels=3).</param> public static void RenderBlobs(CvBlobs blobs, Mat imgSource, Mat imgDest) { RenderBlobs(blobs, imgSource, imgDest, (RenderBlobsMode) 0x000f, 1.0); }
// Use this for initialization void Start() { using (var video = new VideoCapture(0)) { //保存先行列 var frame = new Mat(); var gray = new Mat(); //保存 while (Cv2.WaitKey(1) == -1) { //画像読み込み video.Read(frame); //グレースケール変換 Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY); //二値化 using (var binary = gray.Threshold(100, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary)) { // ラベリング実行 CvBlobs blobs = new CvBlobs(binary); // 入力画像と同じサイズ、チャネル数の画像を生成 using (var render = new Mat(frame.Rows, frame.Cols, MatType.CV_8UC3, 3)) using (var img = new Mat(frame.Rows, frame.Cols, MatType.CV_8UC3, 3)) { // ラベリング結果の描画 blobs.RenderBlobs(frame, render); CvBlob maxBlob = blobs.LargestBlob(); Debug.Log(maxBlob.Rect); Debug.Log(maxBlob.Centroid); Debug.Log(maxBlob.Area); // 各blob(輪郭要素)の情報の取得 //foreach (KeyValuePair<int, CvBlob> item in blobs) //{ // int labelValue = item.Key; // CvBlob blob = item.Value; // // 外接矩形(矩形の左上の座標(x,y),幅と高さ) // Console.WriteLine("外接矩形は、{0}", blob.Rect); // // 面積 // Console.WriteLine("面積は、{0}", blob.Area); // // 重心 // Console.WriteLine("重心は、{0}", blob.Centroid); // Debug.Log(blob.Centroid); // // 角度 // Console.WriteLine("角度は、{0}", blob.Angle()); // // ラベルの数値 // Console.WriteLine("ラベルは、{0}", blob.Label); // // 輪郭情報を得る(ここではキーのみで特に意味なし) // Console.WriteLine("輪郭は、{0}", blob.Contour); // // 輪郭情報を得る // CvContourChainCode cc = blob.Contour; // // 描画 // cc.Render(img); // // 周囲長の取得と表示 // double perimeter = cc.Perimeter(); // Console.WriteLine("周囲長は、{0}", perimeter); // // スペース(特に意味なし) // Console.WriteLine(""); //} // using (new Window("frame", frame)) //using (new Window("binary", binary)) using (new Window("render", render)); //using (new Window("img", img)) // { // Cv2.WaitKey(); // } } } } } }
/// <summary> /// Draw a binary image with the blobs that have been given. (cvFilterLabels) /// </summary> /// <param name="imgIn">Input image (depth=IPL_DEPTH_LABEL and num. channels=1).</param> /// <param name="imgOut">Output binary image (depth=IPL_DEPTH_8U and num. channels=1).</param> /// <param name="blobs">List of blobs to be drawn.</param> public static void FilterLabels(IplImage imgIn, IplImage imgOut, CvBlobs blobs) { if (imgIn == null) throw new ArgumentNullException("imgIn"); if (imgOut == null) throw new ArgumentNullException("imgOut"); if (blobs == null) throw new ArgumentNullException("blobs"); CvBlobInvoke.cvb_cvFilterLabels(imgIn.CvPtr, imgOut.CvPtr, blobs.CvPtr); }
public IplImage ImageTreatment(IplImage img, out IplImage[] plateImages) { IplImage tgray = null; IplImage gray = null; IplImage mainSubImage = null; IplImage tmpImage = null; IplImage tmpImage2 = null; CvBlobs blobs1 = null; CvBlobs blobs2 = null; List <IplImage> plates = null; CvRect subImageRect; plateImages = null; try { plates = new List <IplImage>(); mainSubImage = ExtractSubImage(img, out subImageRect); tgray = new IplImage(mainSubImage.Size, BitDepth.U8, 1); mainSubImage.CvtColor(tgray, ColorConversion.RgbaToGray); blobs1 = PreProcessImage1(mainSubImage, tgray); blobs2 = PreProcessImage2(mainSubImage, tgray); tmpImage = img.Clone(); tmpImage2 = mainSubImage.Clone(); tmpImage.SetROI(subImageRect); if (null != blobs1 && blobs1.Count > 0) { IplImage[] plateImage = GetPlates(tmpImage, tmpImage2, blobs1, 2.4); if (null != plateImage) { plates.AddRange(plateImage); } } if (null != blobs2 && blobs2.Count > 0) { IplImage[] plateImage = GetPlates(tmpImage, tmpImage2, blobs2, 3.5); if (null != plateImage) { plates.AddRange(plateImage); } } tmpImage.ResetROI(); Cv.ReleaseImage(gray); gray = tmpImage; } finally { if (null != tmpImage2) { Cv.ReleaseImage(tmpImage2); } if (null != tgray) { Cv.ReleaseImage(tgray); } if (null != mainSubImage) { Cv.ReleaseImage(mainSubImage); } } if (plates.Count > 0) { plateImages = plates.ToArray(); } return(gray); }
public Mat DoBinary() { if (!SetImageReady) { return(null); } if (!ManualBinary) { DoAutoBinary(); } else { DoManualBinary(); } if (FilterMask != 0) { Mat structElement = CvInvoke.GetStructuringElement(ElementShape.Ellipse, new Size(FilterMask, FilterMask), new Point(-1, -1)); CvInvoke.MorphologyEx(MatBinary, MatBinary, MorphOp.Close, structElement, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(0, 0, 0)); CvInvoke.MorphologyEx(MatBinary, MatBinary, MorphOp.Open, structElement, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(0, 0, 0)); } CvBlobs blobs = new CvBlobs(); CvBlobDetector _blobDetector = new CvBlobDetector(); _blobDetector.Detect(MatBinary.ToImage <Gray, byte>(), blobs); int maxwidth = 0; int width = 0; foreach (CvBlob item in blobs.Values) { width = item.BoundingBox.Right - item.BoundingBox.Left; if (width > maxwidth) { maxwidth = width; } } Image <Gray, byte> img1 = MatBinary.ToImage <Gray, byte>(); img1 = MatBinary.ToImage <Gray, byte>(); if (!FillWafer) { //雜訊太多就不補了 if (blobs.Values.Count < 50) { int[] TopSide = new int[3840]; for (int x = 0; x < ROI.Width; x++) { for (int y = 0; y < ROI.Height; y++) { if (img1.Data[y, x, 0] == 0xFF) { y = TopSide[x]; break; } } } foreach (CvBlob item in blobs.Values) { //Blob過小的時候視為雜訊 if (item.BoundingBox.Right - item.BoundingBox.Left < maxwidth / 3) { for (int y = item.BoundingBox.Top; y < item.BoundingBox.Bottom; y++) { for (int x = item.BoundingBox.Left; x < item.BoundingBox.Right; x++) { if (y > TopSide[x]) { img1.Data[y, x, 0] = 0x00; } } } } } } } MatBinary = img1.Mat; if (FillWafer) { Image <Gray, byte> img = MatBinary.ToImage <Gray, byte>(); for (int x = 0; x < ROI.Width; x++) { for (int y = 0; y < ROI.Height; y++) { if (0xFF == img.Data[y, x, 0]) { for (int k = y; k < ROI.Height; k++) { img.Data[k, x, 0] = 0xFF; } break; } } } MatBinary = img.Mat; } return(MatBinary); }
private void ProcessFrame(object sender, EventArgs e) { //String str = String.Format("withBall.jpg"); //originalImg = CvInvoke.Imread(str) originalImg = capture.QueryFrame(); Image <Bgr, Byte> outputImg = originalImg.ToImage <Bgr, Byte>(); int imgWidth = originalImg.Width; int imgHeight = originalImg.Height; UMat grayImg = new UMat(); //Convert RBG to Gray CvInvoke.CvtColor(originalImg, grayImg, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(grayImg, pyrDown); CvInvoke.PyrUp(pyrDown, grayImg); UMat binaryImg = new UMat(); //Find Potiential Plate Region CvInvoke.Threshold(grayImg, binaryImg, 200, 255, ThresholdType.BinaryInv); Image <Gray, Byte> binaryImgG = binaryImg.ToImage <Gray, Byte>(); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); int[,] hierachy = CvInvoke.FindContourTree(binaryImgG, contours, ChainApproxMethod.ChainApproxNone); int maxArea = 0; int maxAreaContourIndex = 0; for (int idx = 0; idx < contours.Size; idx++) { //bool isChild = isChildContour(hierachy, idx); int numberOfChildren = GetNumberOfChildren(hierachy, idx); using (VectorOfPoint contour = contours[idx]) { if ((numberOfChildren > 3)) { if (CvInvoke.ContourArea(contour) > maxArea) { maxAreaContourIndex = idx; } } } } Image <Gray, Byte> mask1 = new Image <Gray, Byte>(imgWidth, imgHeight); CvInvoke.DrawContours(mask1, contours, maxAreaContourIndex, new MCvScalar(255), -1); int openingFactor1 = 100; Image <Gray, Byte> plateMask = new Image <Gray, Byte>(imgWidth, imgHeight); plateMask = mask1.Erode(openingFactor1); plateMask = plateMask.Dilate(openingFactor1); CvBlobs blobs = new CvBlobs(); CvBlobDetector blob_detector = new CvBlobDetector(); //blobs.FilterByArea(10000, 1000000); blob_detector.Detect(plateMask, blobs); foreach (CvBlob blob in blobs.Values) { Rectangle r = blob.BoundingBox; outputImg.Draw(r, new Bgr(0, 255, 255), 4); } Image <Gray, Byte> invBinaryImgG = binaryImg.ToImage <Gray, Byte>(); CvInvoke.BitwiseNot(invBinaryImgG, invBinaryImgG); Image <Gray, Byte> mask3 = plateMask.Clone(); CvInvoke.BitwiseAnd(plateMask, invBinaryImgG, mask3); blob_detector.Detect(mask3, blobs); int patternSize = 20; int ballSize = 60; int tolerance = 10; int patternHigh = patternSize + tolerance; int patternLow = patternSize - tolerance; int ballHigh = ballSize + tolerance * 2; int ballLow = ballSize - tolerance * 2; blobs.FilterByArea(patternLow * patternLow, ballHigh * ballHigh); List <PointF> patternPoints = new List <PointF>(); PointF ballPoint = new PointF(); int numberOfPatternPointFound = 0; foreach (CvBlob blob in blobs.Values) { Rectangle r = blob.BoundingBox; if ((r.Height > patternLow) && (r.Height < patternHigh) && (r.Width > patternLow) && (r.Width < patternHigh)) { outputImg.Draw(new CircleF(blob.Centroid, 2), new Bgr(0, 0, 255), 2); patternPoints.Add(blob.Centroid); numberOfPatternPointFound++; } if ((r.Height > ballLow) && (r.Height < ballHigh) && (r.Width > ballLow) && (r.Width < ballHigh)) { outputImg.Draw(new CircleF(blob.Centroid, 5), new Bgr(0, 0, 255), 5); ballPoint = blob.Centroid; } } label14.Text = String.Format("{0}", numberOfPatternPointFound); List <PointF> sortedPatternPoints = new List <PointF>(); // 1 for TopLeft - 2 for Top Right - 3 for Bottom Right - 4 for Bottom Left List <int> pointType = new List <int>();; PointF centerPoint = new PointF(); foreach (PointF patternPoint in patternPoints) { centerPoint.X += patternPoint.X; centerPoint.Y += patternPoint.Y; } centerPoint.X /= numberOfPatternPointFound; centerPoint.Y /= numberOfPatternPointFound; x_position.Text = ballPoint.X.ToString(); y_position.Text = ballPoint.Y.ToString(); foreach (PointF patternPoint in patternPoints) { if ((patternPoint.X < centerPoint.X) && (patternPoint.Y < centerPoint.Y)) { sortedPatternPoints.Add(patternPoint); pointType.Add(1); } else if ((patternPoint.X > centerPoint.X) && (patternPoint.Y < centerPoint.Y)) { sortedPatternPoints.Add(patternPoint); pointType.Add(2); } else if ((patternPoint.X > centerPoint.X) && (patternPoint.Y > centerPoint.Y)) { sortedPatternPoints.Add(patternPoint); pointType.Add(3); } else if ((patternPoint.X < centerPoint.X) && (patternPoint.Y > centerPoint.Y)) { sortedPatternPoints.Add(patternPoint); pointType.Add(4); } } int id = 0; foreach (PointF patternPoint in sortedPatternPoints) { CvInvoke.PutText(outputImg, String.Format("{0}", pointType[id++]), new System.Drawing.Point((int)patternPoint.X, (int)patternPoint.Y), FontFace.HersheyComplex, 1.0, new Bgr(0, 255, 0).MCvScalar); } imageBox1.Image = outputImg; }
private void capture_ImageGrabbed(object sender, EventArgs e) { try { Mat iMat = new Mat(); capture.Retrieve(iMat); img = iMat.ToImage <Bgr, byte>(); FrameOld = img.Clone(); Bitmap imgB = FrameOld.Bitmap; Gray_Frame = img.Convert <Gray, Byte>().Clone(); // When enabled Equalize Hist and GammaCorrect adjust contrast and brithness if (EqualizeHist) { Gray_Frame._EqualizeHist(); // Equalize Histogram } if (GammaCorrect) { Gray_Frame._GammaCorrect(valCorrectGamma); // Correct Gamma } if (GaussianBlure) { CvInvoke.GaussianBlur(Gray_Frame, Gray_Frame, new Size(3, 3), 1); } grayImg.Image = Gray_Frame; faces = _face.DetectMultiScale(Gray_Frame, 1.3, 5, new System.Drawing.Size(24, 24), System.Drawing.Size.Empty); /**/ lblDetectedFaces.BeginInvoke((Action)(() => { // This is used to update the label of Faces detected count int nbrFaces = faces.Length; lblDetectedFaces.Text = nbrFaces + ""; })); foreach (Rectangle face in faces) { img.Draw(face, new Bgr(System.Drawing.Color.Blue), 2); } // Detecting bodies if (bodyDetection) { _bodies = _body.DetectMultiScale(Gray_Frame, 1.3, 5, new System.Drawing.Size(24, 24), System.Drawing.Size.Empty); foreach (Rectangle body in _bodies) { img.Draw(body, new Bgr(System.Drawing.Color.Red), 2); } lblBodies.BeginInvoke((Action)(() => {// This is used to update the label of Faces detected count lblBodies.Text = _bodies.Length + ""; })); } // captureImageBox.Image = img; // ##### Histogram Histo.Clear(); Histo.Calculate(new Image <Gray, Byte>[] { Gray_Frame }, true, null); Mat hMat = new Mat(); Histo.CopyTo(hMat); myHistogram.BeginInvoke((Action)(() => { myHistogram.ClearHistogram(); myHistogram.AddHistogram("Gray Histogram ", Color.Red, hMat, 256, new float[] { 0, 256 }); myHistogram.Refresh(); })); //######################## //######## Fram diffrencess :detectMotionAnoramlities MoveTowards moveTowardsFilter = new MoveTowards(); if (detectMotionAnoramlities) { #region Contour VectorOfVectorOfPoint polygon = new VectorOfVectorOfPoint(); // to draw the perimeter Image <Gray, byte> gray = img.Convert <Gray, byte>(); // convert source to gray Image <Gray, byte> thresh = gray.PyrDown().PyrUp(); // attempt to make edges more distinct? Image <Gray, byte> cannyImg = thresh.Canny(new Gray(10).Intensity, new Gray(50).Intensity); Mat tmpMat = new Mat(); Image <Gray, byte> imOut = new Image <Gray, byte>(img.Width, img.Height, new Gray(0)); CvInvoke.FindContours(cannyImg, polygon, tmpMat, RetrType.External, ChainApproxMethod.ChainApproxTc89Kcos); CvInvoke.DrawContours(imOut, polygon, -1, new MCvScalar(255, 0, 0), 2); pBoxContours.Image = imOut.Bitmap; #endregion Mat forgroundMask = new Mat(); fgDetector.Apply(img, forgroundMask); pBoxBackground.Image = forgroundMask.Bitmap; f = motionDetector.ProcessFrame(imgB); lblDetectedMotions.BeginInvoke((Action)(() => { // This is used to update the label of Faces detected count lblDetectedMotions.Text = f + ""; })); if (f > 0.2f) { txtAlerts.BeginInvoke((Action)(() => { // This is used to update the label of diff % txtAlerts.Text += "+ Quick Motion detected or more than 2% of fram changed" + DateTime.Now + Environment.NewLine; })); taINCIDENTS.Insert("NA", "+ Quick Motion detected or more than 2% of fram changed", "INCIDENT TYPE 1", DateTime.Now.ToString()); } /* * * */ // Blob Counter CvBlobs blobs = new CvBlobs(); blobs.FilterByArea(100, int.MaxValue); BlobCounter blobcounter = new BlobCounter(); blobcounter.MinHeight = 100; blobcounter.MaxWidth = 20; blobcounter.ObjectsOrder = ObjectsOrder.Size; blobcounter.ProcessImage(forgroundMask.Bitmap); Rectangle[] rect = blobcounter.GetObjectsRectangles(); Graphics g = Graphics.FromImage(imgB); int BlobCPT = 0; if (rect.Length > 0) { Rectangle objec = rect[0]; Graphics graphic = Graphics.FromImage(imgB); using (Pen pen = new Pen(Color.Red, 2)) foreach (Rectangle rec in rect) { if ((rec.Width > 50) && (rec.Height > 50)) { g.DrawRectangle(pen, rec); BlobCPT++; } } { } g.Dispose(); } if (f == 0.00f && blobs.Count == 0 && faces.Length == 0) { if (!wasInFreez && freezTriggered) { freezTime = 0; wasInFreez = true; // We suppose 0.5% and less as freez txtAlerts.BeginInvoke((Action)(() => { // This is used to update the label of Faces detected count txtAlerts.Text += "+Freez detecte : - Time:" + DateTime.Now.ToString() + Environment.NewLine; })); taEVENT.Insert("+Freez detecte : - Time:", "FREEZ", DateTime.Now.ToString()); } if (!wasInFreez && !freezTriggered && !TimerStarted) { TimerStarted = true; freezTimer.Start(); } } else if (BlobCPT > 0 && f != 0.00f) { if (wasInFreez) // We need to stop the timer and send the notification message { txtAlerts.BeginInvoke((Action)(() => { // This is used to update the label of Faces detected count txtAlerts.Text += "+Freez Stoped after : " + freezTime + " Second - Time:" + DateTime.Now.ToString() + Environment.NewLine; })); taEVENT.Insert("+Freez Stoped after : " + freezTime + " Second", "FREEZ STOP", DateTime.Now.ToString()); freezTimer.Stop(); TimerStarted = false; wasInFreez = false; freezTriggered = false; } } pBox.Image = imgB; } //####################### Application.DoEvents(); // Allow app to run other threads }
//############################################################################################################################################################################################## /// <summary> /// Extract all pieces from the source image. /// </summary> private void extract_pieces() { try { CurrentSolverState = PuzzleSolverState.INIT_PIECES; Piece.NextPieceID = 0; CurrentSolverStepPercentageFinished = 0; _logHandle.Report(new LogEventInfo("Extracting Pieces")); NumberPuzzlePieces = 0; Pieces.Clear(); InputImages.Clear(); List <string> imageExtensions = new List <string>() { ".jpg", ".png", ".bmp", ".tiff" }; FileAttributes attr = File.GetAttributes(PuzzlePiecesFolderPath); List <FileInfo> imageFilesInfo = new List <FileInfo>(); if (attr.HasFlag(FileAttributes.Directory)) //detect whether its a directory or file { DirectoryInfo folderInfo = new DirectoryInfo(PuzzlePiecesFolderPath); imageFilesInfo = folderInfo.GetFiles().ToList(); } else { FileInfo fileInfo = new FileInfo(PuzzlePiecesFolderPath); imageFilesInfo.Add(fileInfo); } imageFilesInfo = imageFilesInfo.Where(f => imageExtensions.Contains(f.Extension)).ToList(); int loopCount = 0; ParallelOptions parallelOptions = new ParallelOptions { CancellationToken = _cancelToken, MaxDegreeOfParallelism = (PluginFactory.GetGeneralSettingsPlugin().UseParallelLoops ? Environment.ProcessorCount : 1) }; //For each input image Parallel.For(0, imageFilesInfo.Count, parallelOptions, (i) => { using (Image <Rgba, byte> sourceImg = new Image <Rgba, byte>(imageFilesInfo[i].FullName)) //.LimitImageSize(1000, 1000)) { CvInvoke.MedianBlur(sourceImg, sourceImg, 5); // Get the (first) enabled Plugin for input image mask generation PluginGroupInputImageMask pluginInputImageMask = PluginFactory.GetEnabledPluginsOfGroupType <PluginGroupInputImageMask>().FirstOrDefault(); using (Image <Gray, byte> mask = pluginInputImageMask.GetMask(sourceImg)) { _logHandle.Report(new LogEventInfo("Extracting Pieces from source image " + i.ToString())); if (PluginFactory.GetGeneralSettingsPlugin().SolverShowDebugResults) { _logHandle.Report(new LogEventImage("Source image " + i.ToString(), sourceImg.Bitmap)); _logHandle.Report(new LogEventImage("Mask " + i.ToString(), mask.Bitmap)); } CvBlobDetector blobDetector = new CvBlobDetector(); // Find all blobs in the mask image, extract them and add them to the list of pieces CvBlobs blobs = new CvBlobs(); blobDetector.Detect(mask, blobs); foreach (CvBlob blob in blobs.Values.Where(b => b.BoundingBox.Width >= PluginFactory.GetGeneralSettingsPlugin().PuzzleMinPieceSize&& b.BoundingBox.Height >= PluginFactory.GetGeneralSettingsPlugin().PuzzleMinPieceSize)) { if (_cancelToken.IsCancellationRequested) { _cancelToken.ThrowIfCancellationRequested(); } Rectangle roi = blob.BoundingBox; Image <Rgba, byte> pieceSourceImg; Image <Gray, byte> pieceMask; try { if (sourceImg.Height > roi.Height + 4 && sourceImg.Width > roi.Width + 4) { roi.Inflate(2, 2); } pieceSourceImg = sourceImg.Copy(roi); pieceMask = mask.Copy(roi); } catch (Exception) { roi = blob.BoundingBox; pieceSourceImg = sourceImg.Copy(roi); pieceMask = mask.Copy(roi); } // Mask out background of piece Image <Rgba, byte> pieceSourceImageForeground = new Image <Rgba, byte>(pieceSourceImg.Size); CvInvoke.BitwiseOr(pieceSourceImg, pieceSourceImg, pieceSourceImageForeground, pieceMask); Image <Gray, byte> pieceMaskInverted = pieceMask.Copy(pieceMask); pieceMaskInverted._Not(); Image <Rgba, byte> background = new Image <Rgba, byte>(pieceSourceImg.Size); background.SetValue(new Rgba(255, 255, 255, 0)); Image <Rgba, byte> pieceSourceImageBackground = new Image <Rgba, byte>(pieceSourceImg.Size); CvInvoke.BitwiseOr(background, background, pieceSourceImageBackground, pieceMaskInverted); Image <Rgba, byte> pieceSourceImgMasked = new Image <Rgba, byte>(pieceSourceImg.Size); CvInvoke.BitwiseOr(pieceSourceImageForeground, pieceSourceImageBackground, pieceSourceImgMasked); Piece p = new Piece(pieceSourceImgMasked, pieceMask, imageFilesInfo[i].FullName, roi.Location, _logHandle, _cancelToken); lock (_piecesLock) { Pieces.Add(p); } sourceImg.Draw(roi, new Rgba(255, 0, 0, 1), 2); int baseLine = 0; Size textSize = CvInvoke.GetTextSize(p.PieceID.Replace("Piece", ""), FontFace.HersheyDuplex, 3, 2, ref baseLine); CvInvoke.PutText(sourceImg, p.PieceID.Replace("Piece", ""), Point.Add(roi.Location, new Size(0, textSize.Height + 10)), FontFace.HersheyDuplex, 3, new MCvScalar(255, 0, 0), 2); NumberPuzzlePieces++; pieceSourceImg.Dispose(); pieceMask.Dispose(); pieceSourceImageForeground.Dispose(); pieceMaskInverted.Dispose(); background.Dispose(); pieceSourceImageBackground.Dispose(); pieceSourceImgMasked.Dispose(); GC.Collect(); } Interlocked.Add(ref loopCount, 1); CurrentSolverStepPercentageFinished = (loopCount / (double)imageFilesInfo.Count) * 100; if (PluginFactory.GetGeneralSettingsPlugin().SolverShowDebugResults) { _logHandle.Report(new LogEventImage("Source Img " + i.ToString() + " Pieces", sourceImg.Bitmap)); } InputImages.Add(new ImageDescribedLight(Path.GetFileName(imageFilesInfo[i].FullName), PuzzlePiecesFolderPath + @"\Results\InputImagesMarked\" + Path.GetFileName(imageFilesInfo[i].FullName), sourceImg.Bitmap)); //sourceImg.LimitImageSize(1000, 1000).Bitmap)); blobs.Dispose(); blobDetector.Dispose(); GC.Collect(); } } GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); }); Pieces.Sort(p => ((Piece)p).PieceIndex, null); } catch (OperationCanceledException) { _logHandle.Report(new LogEventWarning("The operation was canceled. Step: " + CurrentSolverState.ToString())); CurrentSolverState = PuzzleSolverState.UNSOLVED; } catch (Exception ex) { _logHandle.Report(new LogEventError("The following error occured in step " + CurrentSolverState.ToString() + ":\n" + ex.Message)); CurrentSolverState = PuzzleSolverState.ERROR; CurrentSolverStepPercentageFinished = 100; } }
static void Main(string[] args) { CvCapture videoCaprure = null; try { videoCaprure = CvCapture.FromFile(FILE_NAME); } catch (Exception e) { Console.WriteLine("Unable to open file {0}", FILE_NAME); Console.WriteLine(e.ToString()); Console.ReadKey(); } Cv.NamedWindow(MAIN_WINDOW_NAME, WindowMode.AutoSize); double rate = videoCaprure.GetCaptureProperty(CvConst.CV_CAP_PROP_FPS); int delay = (int)(1000 / rate); //int counter = 0; IplImage backgroundImage = IplImage.FromFile(BACKDROUND_IMAGE_FILE_NAME); IplImage grayBackgroundImage = Cv.CreateImage(backgroundImage.Size, backgroundImage.Depth, 1); Cv.CvtColor(backgroundImage, grayBackgroundImage, ColorConversion.RgbToGray); Console.WriteLine("NChannels = " + backgroundImage.NChannels); Console.ReadKey(); //IplImage grayBackgroundImage = Cv.CreateImage(backgroundImage.Size, backgroundImage.Depth, 1); //Cv.CvtColor(backgroundImage, grayBackgroundImage, ColorConversion.RgbToGray);); while (true) { var currentOgirinalFrame = videoCaprure.QueryFrame(); if (currentOgirinalFrame == null) { return; } //counter++; //if (counter % 3 != 0) // continue; IplImage grayOriginalFrame = Cv.CreateImage(currentOgirinalFrame.Size, currentOgirinalFrame.Depth, 1); Cv.CvtColor(currentOgirinalFrame, grayOriginalFrame, ColorConversion.RgbToGray); IplImage differenceBetweenFrames = Cv.CreateImage(grayOriginalFrame.Size, grayOriginalFrame.Depth, 1); Cv.AbsDiff(grayOriginalFrame, grayBackgroundImage, differenceBetweenFrames); //Cv.Smooth(differenceBetweenFrames, differenceBetweenFrames, SmoothType.Blur); //IplImage graydifferenceBetweenFrames = Cv.CreateImage(differenceBetweenFrames.Size, differenceBetweenFrames.Depth, 1); //Cv.CvtColor(differenceBetweenFrames, graydifferenceBetweenFrames, ColorConversion.RgbToGray); //Cv.ShowImage("differenceBetweenFrames", differenceBetweenFrames); Cv.Threshold(differenceBetweenFrames, differenceBetweenFrames, 50, 255, ThresholdType.Binary); Cv.Erode(differenceBetweenFrames, differenceBetweenFrames); //finding blobs CvBlobs blobs = new CvBlobs(differenceBetweenFrames); blobs.FilterByArea(300, 10000); //blobs.Label(differenceBetweenFrames); var currentFrameWithRedRects = Cv.CreateImage(currentOgirinalFrame.Size, currentOgirinalFrame.Depth, currentOgirinalFrame.NChannels); currentOgirinalFrame.Copy(currentFrameWithRedRects); foreach (var cvBlob in blobs) { Cv.Rectangle(currentFrameWithRedRects, cvBlob.Value.Rect, CvColor.Red, 4); } Console.WriteLine(blobs.Count); Cv.ShowImage(MAIN_WINDOW_NAME, currentFrameWithRedRects); Cv.ShowImage("Result", differenceBetweenFrames); //Cv.ShowImage("backgroundImage", backgroundImage); Cv.WaitKey(delay); //currentOgirinalFrame.Copy(differenceBetweenFrames); } }
private CvBlobs PreProcessImage2_old(IplImage img) { CvBlobs blobs = null; IplConvKernel element = null; IplImage temp = null; IplImage dest = null; IplImage tmpImage = null; IplImage tmpImage2 = null; IplImage labelImg = null; try { element = Cv.CreateStructuringElementEx(180, 5, 90, 1, ElementShape.Rect, null); tmpImage = new IplImage(img.Size, BitDepth.U8, 1); temp = tmpImage.Clone(); dest = tmpImage.Clone(); img.CvtColor(tmpImage, ColorConversion.RgbaToGray); tmpImage.Rectangle(new CvPoint(0, 0), new CvPoint((Int32)(tmpImage.Size.Width), (Int32)((tmpImage.Size.Height / 9) * 3)), new CvScalar(255, 255, 255), -1); tmpImage.Rectangle(new CvPoint(0, (Int32)((tmpImage.Size.Height / 5) * 4)), new CvPoint((Int32)(tmpImage.Size.Width), (Int32)(tmpImage.Size.Height)), new CvScalar(255, 255, 255), -1); tmpImage.Rectangle(new CvPoint((Int32)((tmpImage.Size.Width / 9) * 7), 0), new CvPoint((Int32)((tmpImage.Size.Width)), (Int32)(tmpImage.Size.Height)), new CvScalar(255, 255, 255), -1); Cv.Smooth(tmpImage, tmpImage, SmoothType.Gaussian); Cv.MorphologyEx(tmpImage, dest, temp, element, MorphologyOperation.TopHat, 1); Cv.Threshold(dest, tmpImage, 128, 255, ThresholdType.Binary | ThresholdType.Otsu); Cv.Smooth(tmpImage, dest, SmoothType.Median); labelImg = new IplImage(img.Size, CvBlobLib.DepthLabel, 1); blobs = new CvBlobs(); tmpImage2 = tmpImage.Clone(); CvBlobLib.Label(tmpImage2, labelImg, blobs); //Cv.ReleaseImage(tmpImage); //tmpImage = img.Clone(); //blobs.RenderBlobs(labelImg, img, tmpImage); //tmpImage.SaveImage(@"c:\temp\newImages\RenderBlobsNOFiltered.png"); CvBlobLib.FilterByArea(blobs, 850, 4850); Cv.ReleaseImage(tmpImage); tmpImage = img.Clone(); //CvTracks tracks = new CvTracks(); //CvBlobLib.UpdateTracks(blobs, tracks, 200.0, 5); //CvBlobLib.RenderTracks(tracks, tmpImage, tmpImage, RenderTracksMode.ID); blobs.RenderBlobs(labelImg, img, tmpImage, RenderBlobsMode.BoundingBox | RenderBlobsMode.Angle); //tmpImage.SaveImage(@"c:\temp\newImages\RenderBlobsFiltered.png"); } finally { if (null != temp) { Cv.ReleaseImage(temp); } if (null != dest) { Cv.ReleaseImage(dest); } if (null != tmpImage) { Cv.ReleaseImage(tmpImage); } if (null != tmpImage2) { Cv.ReleaseImage(tmpImage2); } if (null != labelImg) { Cv.ReleaseImage(labelImg); } } return(blobs); }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (frame.Width + frame.Width) / 2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); long detectionTime; List <Rectangle> faces = new List <Rectangle>(); List <Rectangle> eyes = new List <Rectangle>(); IImage image = (IImage)frame;//这一步是重点 faceImage = frame.Bitmap; DetectFace.Detect(image , "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime); #region 多人识别 多人识别存在较大误差率(图片库 如果高清,识别效果就是好) //Graphics g1 = Graphics.FromImage(frame.Bitmap); //List<FaceIdentifyModel> tempList = new List<FaceIdentifyModel>(); //foreach (Rectangle face in faces) //{ // Image rectImage1 = ImageHelper.CaptureImage(frame.Bitmap, face); // FaceIdentifyModel MoreIdentifyInfo = FaceAPI.FaceIdentify(rectImage1, tb_Group.Text.Trim(), 1, 1);//人脸识别 一个人的识别效果比较好 // MoreIdentifyInfo.rect = face; // tempList.Add(MoreIdentifyInfo); //} //Color color_of_pen1 = Color.Gray; //color_of_pen1 = Color.Yellow; //Pen pen1 = new Pen(color_of_pen1, 2.0f); //Font font1 = new Font("微软雅黑", 16, GraphicsUnit.Pixel); //SolidBrush drawBrush1 = new SolidBrush(Color.Yellow); //tb_Identify.Text = tempList.ToJson(); //foreach (var t in tempList) //{ // g1.DrawRectangle(pen1, t.rect); // if (t.result != null) // { // g1.DrawString(t.result[0].user_info.Replace(",", "\r\n"), font1, drawBrush1, new Point(t.rect.X + 20, t.rect.Y - 20)); // } //} #endregion #region 单人识别 //单人 人脸识别 多人效果比较差 foreach (Rectangle face in faces) { #region 采用画图,显示自己的文本框 Graphics g = Graphics.FromImage(frame.Bitmap); ImageModel tempImage = new ImageModel(); tempImage.Rect = face; tempImage.Image = frame.Bitmap; //接口查询速度差 //string faceInfo = FaceAPI.FaceDetect(ImageHelper.CaptureImage(frame.Bitmap, face));//人脸检测 Image rectImage = ImageHelper.CaptureImage(frame.Bitmap, face); FaceIdentifyModel IdentifyInfo = FaceAPI.FaceIdentify(rectImage, tb_Group.Text.Trim(), 1, 1);//人脸识别 一个人的识别效果比较好 // tb_Result.Text = faceInfo; tb_Identify.Text = IdentifyInfo.ToJson().ToString(); //采用画板 Color color_of_pen = Color.Gray; color_of_pen = Color.Yellow; Pen pen = new Pen(color_of_pen, 2.0f); Rectangle rect = face; g.DrawRectangle(pen, rect); Font font = new Font("微软雅黑", 16, GraphicsUnit.Pixel); SolidBrush drawBrush = new SolidBrush(Color.Yellow); if (IdentifyInfo != null) { if (IdentifyInfo.result != null) { for (int i = 0; i < IdentifyInfo.result.Count; i++) { string faceInfo = ""; faceInfo = IdentifyInfo.result[i].user_info.Replace(",", "\r\n"); //显示用户信息 g.DrawString(faceInfo, font, drawBrush, new Point(face.X + 20, face.Y - 20)); } } } //CvInvoke.Rectangle(frame, face, new MCvScalar(255.0, 255.0, 255.0), 2); //CvInvoke.PutText(frame, faceInfo, new Point(face.X + 20, face.Y - 20), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); // 保存原始截图 //System.Drawing.Image ResourceImage = frame.Bitmap; //ResourceImage.Save(saveDir + saveFileName); //线程队列 保存人脸识别截图 QueueHelper.WriteImage(tempImage); //t1 = new Thread(new ThreadStart(() => //{ // faceInfo = FaceAPI.FaceDetect(ImageHelper.CaptureImage(frame.Bitmap, face)); // this.Invoke(new Action(() => // { // g.DrawString(faceInfo, font, drawBrush, new Point(face.X + 20, face.Y - 20)); // })); //})); //t1.IsBackground = true; //t1.Start(); #endregion } #endregion #region 视频调用原有的Open CV 不支持中文字 //foreach (var pair in _tracker) //{ // CvTrack b = pair.Value; // #region 视频中调用open CV 上直接画文本框 // CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); // CvInvoke.PutText(frame, "man,show", new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); // if (b.BoundingBox.Width < 100 || b.BoundingBox.Height < 50) // { // continue; // } // #endregion //} #endregion imageBox1.Image = frame; imageBox2.Image = forgroundMask; }
/// <summary> /// Label the connected parts of a binary image. (cvLabel) /// </summary> /// <param name="img">Input binary image (depth=IPL_DEPTH_8U and num. channels=1).</param> /// <param name="imgOut">Output image (depth=IPL_DEPTH_LABEL and num. channels=1).</param> /// <param name="blobs">List of blobs.</param> /// <returns>Number of pixels that has been labeled.</returns> public static UInt32 Label(IplImage img, IplImage imgOut, CvBlobs blobs) { if (img == null) throw new ArgumentNullException("img"); if (imgOut == null) throw new ArgumentNullException("imgOut"); if (blobs == null) throw new ArgumentNullException("blobs"); return CvBlobInvoke.cvb_cvLabel(img.CvPtr, imgOut.CvPtr, blobs.CvPtr); }
/// <summary> /// Draws or prints information about blobs. (cvRenderBlobs) /// </summary> /// <param name="imgLabel">Label image (depth=IPL_DEPTH_LABEL and num. channels=1).</param> /// <param name="blobs">List of blobs.</param> /// <param name="imgSource">Input image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="imgDest">Output image (depth=IPL_DEPTH_8U and num. channels=3).</param> public static void RenderBlobs(IplImage imgLabel, CvBlobs blobs, IplImage imgSource, IplImage imgDest) { RenderBlobs(imgLabel, blobs, imgSource, imgDest, (RenderBlobsMode)0x000f, 1.0); }
/// <summary> /// Draws or prints information about blobs. (cvRenderBlobs) /// </summary> /// <param name="imgLabel">Label image (depth=IPL_DEPTH_LABEL and num. channels=1).</param> /// <param name="blobs">List of blobs.</param> /// <param name="imgSource">Input image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="imgDest">Output image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="mode">Render mode. By default is CV_BLOB_RENDER_COLOR|CV_BLOB_RENDER_CENTROID|CV_BLOB_RENDER_BOUNDING_BOX|CV_BLOB_RENDER_ANGLE.</param> /// <param name="alpha">If mode CV_BLOB_RENDER_COLOR is used. 1.0 indicates opaque and 0.0 translucent (1.0 by default).</param> public static void RenderBlobs(IplImage imgLabel, CvBlobs blobs, IplImage imgSource, IplImage imgDest, RenderBlobsMode mode, double alpha) { if (imgLabel == null) throw new ArgumentNullException("imgLabel"); if (blobs == null) throw new ArgumentNullException("blobs"); if (imgSource == null) throw new ArgumentNullException("imgSource"); if (imgDest == null) throw new ArgumentNullException("imgDest"); CvBlobInvoke.cvb_cvRenderBlobs(imgLabel.CvPtr, blobs.CvPtr, imgSource.CvPtr, imgDest.CvPtr, mode, alpha); }
private void ProcessFrame(object sender, EventArgs args) { //Get frame Mat frame = camera.QueryFrame(); //Process frame Image <Bgr, Byte> img = frame.ToImage <Bgr, Byte>(); img.ROI = new Rectangle(100, 100, 300, 300); Image <Hsv, Byte> HSVimg = img.Convert <Hsv, Byte>(); Image <Gray, Byte> binary = HSVimg.InRange(new Hsv(minH, minS, minV), new Hsv(maxH, maxS, maxV)); Image <Gray, Byte> eroded = binary.Erode(erosions); Image <Gray, Byte> dilated = eroded.Dilate(dilations); //Detect largest blob CvBlobDetector blobDetector = new CvBlobDetector(); CvBlobs blobs = new CvBlobs(); blobDetector.Detect(dilated, blobs); int maxBlobArea = 0; CvBlob largestBlob = null; foreach (CvBlob blob in blobs.Values) { if (blob.Area > maxBlobArea) { maxBlobArea = blob.Area; largestBlob = blob; } } if (largestBlob != null && largestBlob.Area >= 10000) { handContour = largestBlob.GetContour(); VectorOfInt convexHullIndices = new VectorOfInt(); VectorOfPoint convexHull = new VectorOfPoint(); CvInvoke.ConvexHull(new VectorOfPoint(handContour), convexHull); CvInvoke.ConvexHull(new VectorOfPoint(handContour), convexHullIndices); Mat defects = new Mat(); //img.Draw(handContour, new Bgr(0, 0, 255),3); img.Draw(convexHull.ToArray(), new Bgr(255, 0, 0), 3); try { CvInvoke.ConvexityDefects(new VectorOfPoint(handContour), convexHullIndices, defects); } catch (CvException exc) { MessageBox.Show(exc.Message); } if (!defects.IsEmpty) { Matrix <int> defectsInt = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels); defects.CopyTo(defectsInt); int countFingers = 0; for (int i = 0; i < defectsInt.Rows; i++) { int startIdx = defectsInt.Data[i, 0]; int endIdx = defectsInt.Data[i, 1]; int farthestIdx = defectsInt.Data[i, 2]; float distance = defectsInt.Data[i, 3]; if (distance >= 15000) { //distances.Add(distance); Point startPoint = handContour[startIdx]; Point endPoint = handContour[endIdx]; Point farthestPoint = handContour[farthestIdx]; img.Draw(new CircleF(startPoint, 2.0f), new Bgr(0, 255, 0), 2); img.Draw(new CircleF(endPoint, 2.0f), new Bgr(255, 0, 0), 2); img.Draw(new CircleF(farthestPoint, 2.0f), new Bgr(0, 0, 255), 2); CvInvoke.Line(img, startPoint, farthestPoint, new MCvScalar(255, 255, 0)); countFingers++; } } //Approssimo conteggio dita, e classifico : 1 dito = play, 5 dita = pausa if (Math.Abs(countFingers - 1) < Math.Abs(countFingers - 5) && Math.Abs(countFingers - 1) < Math.Abs(countFingers - 2)) { label10.Text = "Play"; axWindowsMediaPlayer1.Ctlcontrols.play(); } else if (Math.Abs(countFingers - 5) < Math.Abs(countFingers - 1) && Math.Abs(countFingers - 5) < Math.Abs(countFingers - 2)) { label10.Text = "Pause"; axWindowsMediaPlayer1.Ctlcontrols.pause(); } else if (Math.Abs(countFingers - 2) < Math.Abs(countFingers - 1) && Math.Abs(countFingers - 2) < Math.Abs(countFingers - 5)) { label10.Text = "Volume Up"; axWindowsMediaPlayer1.Ctlcontrols.pause(); axWindowsMediaPlayer1.settings.volume++; } } } pictureBox1.Image = binary.Bitmap; }
/// <summary> /// ラベリングにより最大の面積の領域を残す /// </summary> /// <param name="imgSrc"></param> /// <param name="imgRender"></param> private void FilterByMaximalBlob(IplImage imgSrc, IplImage imgDst) { CvBlobs blobs = new CvBlobs(); imgDst.Zero(); blobs.Label(imgSrc); CvBlob max = blobs.GreaterBlob(); if (max == null) return; blobs.FilterByArea(max.Area, max.Area); blobs.FilterLabels(imgDst); }
/// <summary> /// Calculates mean color of a blob in an image. /// </summary> /// <param name="blobs">Blob list</param> /// <param name="targetBlob">The target blob</param> /// <param name="originalImage">Original image.</param> /// <returns>Average color.</returns> public static Scalar BlobMeanColor(CvBlobs blobs, CvBlob targetBlob, Mat originalImage) { if (blobs == null) throw new ArgumentNullException(nameof(blobs)); return blobs.BlobMeanColor(targetBlob, originalImage); }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Image <Bgr, Byte> ImageFrame = frame.ToImage <Bgr, Byte>(); long matchTime; Mat result = new Mat(); Mat mask = new Mat(); Mat homography = new Mat(); Rectangle personROI = new Rectangle(); Mat modelImage = new Mat(); Mat observedImage = new Mat(); Rectangle roi = new Rectangle(213, 0, 213, 480); // set the roi image.ROI = new Rectangle(x, Y, Width, Height); //ImageFrame.ROI = roi; ImageFrame.Draw(roi, new Bgr(Color.Green), 5); if (ImageFrame != null) // confirm that image is valid { Rectangle[] results = FindPedestrian.Find(frame, true, out processingTime); if (people.Count == 0) { foreach (Rectangle rect in results) { if (rect.Width >= 150) { personROI.X = rect.X; personROI.Y = rect.Y; personROI.Width = rect.Width; personROI.Height = rect.Height; Mat person = new Mat(frame, personROI); people.Add(person); ImageFrame.Draw(rect, new Bgr(Color.Red), 5); //Console.WriteLine(index); } } } else { foreach (Rectangle rect in results) { ImageFrame.Draw(rect, new Bgr(Color.Red), 5); var check = false; var temp = new List <Mat>(people); foreach (Mat aperson in people) { Mat img = new Mat(frame, rect); observedImage = aperson; modelImage = img; result = Draw(modelImage, observedImage, out matchTime, out check); if (!check) { temp.Add(img); ++counter; Console.WriteLine("Counter: " + counter); break; } } people = new List <Mat>(temp); Console.WriteLine("End for frame processing"); } } Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); //_fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(1000, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); foreach (var pair in blobs) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); //CvInvoke.PutText(frame, blob.ID.ToString(), Point.Round(blob.Center), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); } imageBox1.Image = ImageFrame; //Console.WriteLine(ImageFrame.Size); imageBox2.Image = result; //imageBox2.Image = forgroundMask; } //people.Clear(); }
/// <summary> /// Draws or prints information about blobs. (cvRenderBlobs) /// </summary> /// <param name="blobs">List of blobs.</param> /// <param name="imgSource">Input image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="imgDest">Output image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="mode">Render mode. By default is CV_BLOB_RENDER_COLOR|CV_BLOB_RENDER_CENTROID|CV_BLOB_RENDER_BOUNDING_BOX|CV_BLOB_RENDER_ANGLE.</param> /// <param name="alpha">If mode CV_BLOB_RENDER_COLOR is used. 1.0 indicates opaque and 0.0 translucent (1.0 by default).</param> public static void RenderBlobs(CvBlobs blobs, Mat imgSource, Mat imgDest, RenderBlobsMode mode, double alpha = 1.0) { if (blobs == null) throw new ArgumentNullException(nameof(blobs)); if (imgSource == null) throw new ArgumentNullException(nameof(imgSource)); if (imgDest == null) throw new ArgumentNullException(nameof(imgDest)); BlobRenderer.PerformMany(blobs, imgSource, imgDest, mode, alpha); }
public List <Box> DetectObjects(DateTime timestamp, Mat image, int frameIndex, out Mat fg) { if (regionOfInterest != null) { bgs.SetRegionOfInterest(regionOfInterest); } Cv2.GaussianBlur(image, blurredFrame, Size.Zero, PRE_BGS_BLUR_SIGMA); // fgMask is the original foreground bitmap returned by opencv MOG2 fgMask = bgs.DetectForeground(blurredFrame, frameIndex); fg = fgMask; if (fgMask == null) { return(null); } // pre-processing Cv2.Threshold(fgMask, fgWOShadows, 200, 255, ThresholdTypes.Binary); Cv2.MedianBlur(fgWOShadows, fgSmoothedMask2, MEDIAN_BLUR_SIZE); Cv2.GaussianBlur(fgSmoothedMask2, fgSmoothedMask3, Size.Zero, GAUSSIAN_BLUR_SIGMA); Cv2.Threshold(fgSmoothedMask3, fgSmoothedMask4, GAUSSIAN_BLUR_THRESHOLD, 255, ThresholdTypes.Binary); fg = fgSmoothedMask4; CvBlobs blobs = new CvBlobs(); KeyPoint[] points = _blobDetector.Detect(fgSmoothedMask4); //blobs.FilterByArea(MIN_BLOB_SIZE, int.MaxValue); //// filter overlapping blobs //HashSet<uint> blobIdsToRemove = new HashSet<uint>(); //foreach (var b0 in blobs) // foreach (var b1 in blobs) // { // if (b0.Key == b1.Key) continue; // if (b0.Value.BoundingBox.Contains(b1.Value.BoundingBox)) // blobIdsToRemove.Add(b1.Key); // } //foreach (uint blobid in blobIdsToRemove) // blobs.Remove(blobid); // adding text to boxes and foreground frame List <Box> newBlobs = new List <Box>(); uint id = 0; foreach (var point in points) { int x = (int)point.Pt.X; int y = (int)point.Pt.Y; int size = (int)point.Size; Box box = new Box("", x - size, x + size, y - size, y + size, frameIndex, id); id++; newBlobs.Add(box); Cv2.Rectangle(fgSmoothedMask4, new OpenCvSharp.Point(x - size, y - size), new OpenCvSharp.Point(x + size, y + size), new Scalar(255), 1); Cv2.PutText(fgSmoothedMask4, box.ID.ToString(), new OpenCvSharp.Point(x, y - size), HersheyFonts.HersheyPlain, 1.0, new Scalar(255.0, 255.0, 255.0)); } Cv2.PutText(fgSmoothedMask4, "frame: " + frameIndex, new OpenCvSharp.Point(10, 10), HersheyFonts.HersheyPlain, 1, new Scalar(255, 255, 255)); newBlobs.ForEach(b => b.Time = timestamp); newBlobs.ForEach(b => b.Timestamp = frameIndex); return(newBlobs); }
/// <summary> /// Filter blobs by area. /// Those blobs whose areas are not in range will be erased from the input list of blobs. (cvFilterByArea) /// </summary> /// <param name="blobs">List of blobs.</param> /// <param name="minArea">Minimun area.</param> /// <param name="maxArea">Maximun area.</param> public static void FilterByArea(CvBlobs blobs, uint minArea, uint maxArea) { if (blobs == null) { throw new ArgumentNullException("blobs"); } CvBlobInvoke.cvb_cvFilterByArea(blobs.CvPtr, minArea, maxArea); }
private CvBlobs PreProcessImage1(IplImage mainSubImage, IplImage imgGray) { CvBlobs blobs = null; IplImage tmpImage = null; IplImage gray = null; IplImage tgray = null; IplImage labelImg = null; IplImage temp = null; try { tgray = imgGray.Clone(); gray = new IplImage(tgray.Size, tgray.Depth, 1); Cv.Smooth(tgray, tgray, SmoothType.Gaussian); Cv.Canny(tgray, gray, 500, 2, ApertureSize.Size5); temp = gray.Clone(); //IplConvKernel element = Cv.CreateStructuringElementEx(5, 1, 3, 0, ElementShape.Rect, null); IplConvKernel element = Cv.CreateStructuringElementEx(7, 1, 3, 0, ElementShape.Rect, null); Cv.MorphologyEx(gray, gray, temp, element, MorphologyOperation.BlackHat, 1); Cv.Threshold(gray, gray, 100, 255, ThresholdType.Binary | ThresholdType.Otsu); Cv.Smooth(gray, gray, SmoothType.Gaussian); labelImg = new IplImage(mainSubImage.Size, CvBlobLib.DepthLabel, 1); blobs = new CvBlobs(); CvBlobLib.Label(gray, labelImg, blobs); CvBlobLib.FilterByArea(blobs, 1550, 4850); tmpImage = mainSubImage.Clone(); //CvTracks tracks = new CvTracks(); //CvBlobLib.UpdateTracks(blobs, tracks, 200.0, 5); //CvBlobLib.RenderTracks(tracks, tmpImage, tmpImage, RenderTracksMode.ID); blobs.RenderBlobs(labelImg, mainSubImage, tmpImage, RenderBlobsMode.BoundingBox | RenderBlobsMode.Angle); /* * img.SetROI(subImageRect); * Cv.Copy(tmpImage, img); * img.ResetROI(); * Cv.ReleaseImage(tmpImage); * */ } finally { if (null != temp) { Cv.ReleaseImage(temp); } if (null != tgray) { Cv.ReleaseImage(tgray); } if (null != gray) { Cv.ReleaseImage(gray); } if (null != labelImg) { Cv.ReleaseImage(labelImg); } if (null != tmpImage) { Cv.ReleaseImage(tmpImage); } } return(blobs); }
/// <summary> /// Find greater blob. (cvGreaterBlob) /// </summary> /// <param name="blobs">List of blobs.</param> /// <returns>Label of greater blob.</returns> public static UInt32 GreaterBlob(CvBlobs blobs) { if (blobs == null) throw new ArgumentNullException("blobs"); return CvBlobInvoke.cvb_cvGreaterBlob(blobs.CvPtr); }
public void Start() { if (canExecute) { return; //既にカメラが起動していたら何もしない ※stop忘れ防止 Stopするのを忘れてStartすると二重起動して異常動作します } IsActive = true; canExecute = true; var im = new IplImage(); // カメラ画像格納用の変数 WriteableBitmap buff = new WriteableBitmap(WIDTH, HEIGHT, 96, 96, PixelFormats.Bgr24, null); WriteableBitmap grayBuff = new WriteableBitmap(WIDTH, HEIGHT, 96, 96, PixelFormats.Gray8, null); IplImage _mapX, _mapY; var dst = new IplImage(); Task.Run(() => { //Thread.Sleep(1000); try { cap = Cv.CreateCameraCapture(CameraNumber); // カメラのキャプチ cap.SetCaptureProperty(CaptureProperty.FrameWidth, WIDTH); cap.SetCaptureProperty(CaptureProperty.FrameHeight, HEIGHT); SetWb(); var dis = App.Current.Dispatcher; while (canExecute) // 任意のキーが入力されるまでカメラ映像を表示 { try { Thread.Sleep(100); if (FlagPropChange) { cap.SetCaptureProperty(CaptureProperty.FrameWidth, WIDTH); cap.SetCaptureProperty(CaptureProperty.FrameHeight, HEIGHT); cap.SetCaptureProperty(CaptureProperty.Brightness, Brightness); cap.SetCaptureProperty(CaptureProperty.Contrast, Contrast); cap.SetCaptureProperty(CaptureProperty.Hue, Hue); cap.SetCaptureProperty(CaptureProperty.Saturation, Saturation); cap.SetCaptureProperty(CaptureProperty.Sharpness, Sharpness); cap.SetCaptureProperty(CaptureProperty.Gamma, Gamma); cap.SetCaptureProperty(CaptureProperty.Gain, Gain); cap.SetCaptureProperty(CaptureProperty.Exposure, Exposure);//露出 //cap.SetCaptureProperty(CaptureProperty.WhiteBalance, White);//Opencvsharp2/3 非対応 dis.BeginInvoke(new Action(() => { try { FlagPropChange = false; } catch { MessageBox.Show("カメラ異常"); canExecute = false; } })); } im = Cv.QueryFrame(cap);//画像取得 if (im == null) { continue; } if (IsActive == true) { IsActive = false; } dst = new IplImage(im.Size, im.Depth, im.NChannels); //set rectify data _mapX = Cv.CreateImage(im.Size, BitDepth.F32, 1); _mapY = Cv.CreateImage(im.Size, BitDepth.F32, 1); Cv.InitUndistortMap(_fileIntrinsic, _fileDistortion, _mapX, _mapY); Cv.Remap(im, dst, _mapX, _mapY); //傾き補正 CvPoint2D32f center = new CvPoint2D32f(WIDTH / 2, HEIGHT / 2); CvMat affineMatrix = Cv.GetRotationMatrix2D(center, Theta, 1.0); //Cv.WarpAffine(im, im, affineMatrix); Cv.WarpAffine(dst, dst, affineMatrix); if (FlagTestPic) { imageForTest = dst.Clone(); FlagTestPic = false; } if (FlagLabeling) { var imageForLabeling = new IplImage(WIDTH, HEIGHT, BitDepth.U8, 3); var imbuff = dst.Clone(); var Binbuff = Binary(imbuff); blobs = new CvBlobs(Binbuff); blobs.RenderBlobs(dst, imageForLabeling); dis.BeginInvoke(new Action(() => { WriteableBitmapConverter.ToWriteableBitmap(imageForLabeling, buff);// カメラからフレーム(画像)を取得 source = buff; imageForLabeling.Dispose(); })); while (FlagNgFrame) { ; } continue; } //二値化表示 if (FlagBin) { var imbuff = dst.Clone(); var Binbuff = Binary(imbuff); dis.BeginInvoke(new Action(() => { WriteableBitmapConverter.ToWriteableBitmap(Binbuff, grayBuff);// カメラからフレーム(画像)を取得 source = grayBuff; })); continue; } //グリッド表示 if (FlagGrid) { foreach (var i in Enumerable.Range(0, HEIGHT / 10)) { var 行 = i * 10; var p1 = new CvPoint(0, 行); var p2 = new CvPoint(WIDTH, 行); dst.Line(p1, p2, CvColor.Aquamarine, 1, LineType.AntiAlias, 0); } foreach (var j in Enumerable.Range(0, WIDTH / 10)) { var 列 = j * 10; var p1 = new CvPoint(列, 0); var p2 = new CvPoint(列, HEIGHT); dst.Line(p1, p2, CvColor.Aquamarine, 1, LineType.AntiAlias, 0); } dis.BeginInvoke(new Action(() => { WriteableBitmapConverter.ToWriteableBitmap(dst, buff);// カメラからフレーム(画像)を取得 source = buff; })); continue; } if (FlagFrame) { dis.BeginInvoke(new Action(() => { MakeFrame(dst); WriteableBitmapConverter.ToWriteableBitmap(dst, buff);// カメラからフレーム(画像)を取得 source = buff; })); continue; } if (FlagNgFrame)//試験NGの場合 { dis.BeginInvoke(new Action(() => { MakeNgFrame(imageForTest); WriteableBitmapConverter.ToWriteableBitmap(imageForTest, source);// カメラからフレーム(画像)を取得 })); while (FlagNgFrame) { ; } } if (FlagHsv) { GetHsv(dst); } //すべてのフラグがfalseならノーマル表示する dis.BeginInvoke(new Action(() => { try { WriteableBitmapConverter.ToWriteableBitmap(dst, buff);// カメラからフレーム(画像)を取得 source = buff; } catch { CamState = false; canExecute = false; } })); } catch { //例外無視する処理を追加 CamState = false; canExecute = false; } } } catch { CamState = false; } finally { if (cap != null) { cap.Dispose(); cap = null; } IsActive = false; Stopped = true; } }); }
/// <summary> /// Clear blobs structure. (cvReleaseBlobs) /// </summary> /// <param name="blobs">List of blobs.</param> public static void ReleaseBlobs(CvBlobs blobs) { if (blobs == null) throw new ArgumentNullException("blobs"); blobs.Dispose(); }
private void CaptureMotion() { try { float wFactor = (float)this.Width / (float)CaptureBox.Width; float hFactor = (float)this.Height / (float)CaptureBox.Height; CvArr array = null; CvCapture cap = CvCapture.FromCamera(CaptureDevice.Any); this.Invoke(new Action(() => { lblLoading.Visible = false; radioButton1.Visible = true; radioButton2.Visible = true; })); while (true) { IplImage img = cap.QueryFrame(); if (img == null) { continue; } img.Flip(array, FlipMode.Y); if (mode == 1) { string filepath = "haarcascade_frontalface_alt2.xml"; CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(filepath); CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(img, cascade, Cv.CreateMemStorage(), 3.0, 1, HaarDetectionType.Zero, new CvSize(70, 70), new CvSize(500, 500)); foreach (CvAvgComp face in faces) { //IplImage ClonedImage = img.Clone(); //Cv.SetImageROI(ClonedImage, face.Rect); //IplImage ThisFace = Cv.CreateImage(face.Rect.Size, ClonedImage.Depth, ClonedImage.NChannels); //Cv.Copy(ClonedImage, ThisFace, null); //Cv.ResetImageROI(ClonedImage); //Bitmap FaceImage = BitmapConverter.ToBitmap(ThisFace); //FaceImage.SetResolution(240, 180); //CaptureBox.Image = FaceImage; img.DrawRect(face.Rect, CvColor.Red, 3); Bitmap FaceImage = BitmapConverter.ToBitmap(img); FaceImage.SetResolution(240, 180); CaptureBox.Image = FaceImage; this.Invoke(new Action(() => { LifeBox.Left = (int)(face.Rect.Left * wFactor - (float)(LifeBox.Width / 2.0) - (float)(this.Width / 2.0)); LifeBox.Top = (int)(face.Rect.Top * hFactor - (float)(LifeBox.Height / 2.0) - (float)(this.Height / 2.0)); if (LifeBox.Left > (this.Width - LifeBox.Width - 12)) { LifeBox.Left = (this.Width - LifeBox.Width - 24); } if (LifeBox.Top > (this.Height - LifeBox.Height - 48)) { LifeBox.Top = (this.Height - LifeBox.Height - 48); } if (LifeBox.Left < 12) { LifeBox.Left = 12; } if (LifeBox.Top < 12) { LifeBox.Top = 12; } Thread.Sleep(30); })); break; } } else { int AllBlobs = 0; CvBlobs blobs = null; IplImage imgHSVsrc = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 3); IplImage imgHSVdst = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 1); Cv.CvtColor(img, imgHSVsrc, ColorConversion.BgrToHsv); Cv.InRangeS(imgHSVsrc, new CvScalar(86, 80, 30), new CvScalar(115, 250, 250), imgHSVdst); Cv.ReleaseImage(imgHSVsrc); blobs = new CvBlobs(imgHSVdst); blobs.FilterByArea(7000, 40000); AllBlobs = blobs.Count; foreach (KeyValuePair <int, CvBlob> blob in blobs) { CvBlob CurrentBlob = blob.Value; CvRect BlobRect = CurrentBlob.Rect; CvPoint Point1, Point2; Point1.X = BlobRect.X; Point1.Y = BlobRect.Y; Point2.X = BlobRect.X + BlobRect.Width; Point2.Y = BlobRect.Y + BlobRect.Height; img.DrawRect(Point1, Point2, CvColor.LightGreen, 3, LineType.AntiAlias); this.Invoke(new Action(() => { LifeBox.Left = (int)(BlobRect.Left * wFactor - (float)(LifeBox.Width / 2.0) - (float)(this.Width / 2.0)); LifeBox.Top = (int)(BlobRect.Top * hFactor - (float)(LifeBox.Height / 2.0) - (float)(this.Height / 2.0)); if (LifeBox.Left > (this.Width - LifeBox.Width - 12)) { LifeBox.Left = (this.Width - LifeBox.Width - 24); } if (LifeBox.Top > (this.Height - LifeBox.Height - 48)) { LifeBox.Top = (this.Height - LifeBox.Height - 48); } if (LifeBox.Left < 12) { LifeBox.Left = 12; } if (LifeBox.Top < 12) { LifeBox.Top = 12; } Thread.Sleep(30); })); break; } Bitmap Item = BitmapConverter.ToBitmap(img); Item.SetResolution(240, 180); CaptureBox.Image = Item; Bitmap HSVItem = BitmapConverter.ToBitmap(imgHSVdst); HSVItem.SetResolution(240, 180); HSVCaptureBox.Image = HSVItem; Cv.ReleaseImage(imgHSVdst); } } } catch (Exception e) { Console.WriteLine("ERROR: " + e.Message + "DETAILS: " + e.StackTrace); } }
/// <summary> /// Draws or prints information about blobs. (cvRenderBlobs) /// </summary> /// <param name="imgLabel">Label image (depth=IPL_DEPTH_LABEL and num. channels=1).</param> /// <param name="blobs">List of blobs.</param> /// <param name="imgSource">Input image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="imgDest">Output image (depth=IPL_DEPTH_8U and num. channels=3).</param> /// <param name="mode">Render mode. By default is CV_BLOB_RENDER_COLOR|CV_BLOB_RENDER_CENTROID|CV_BLOB_RENDER_BOUNDING_BOX|CV_BLOB_RENDER_ANGLE.</param> public static void RenderBlobs(IplImage imgLabel, CvBlobs blobs, IplImage imgSource, IplImage imgDest, RenderBlobsMode mode) { RenderBlobs(imgLabel, blobs, imgSource, imgDest, mode, 1.0); }
private void timer_process_Tick(object sender, EventArgs e) { timer_process.Enabled = false; Bitmap bitmap = new Bitmap(Screen.PrimaryScreen.Bounds.Width, Screen.PrimaryScreen.Bounds.Height); Graphics graphics = Graphics.FromImage(bitmap as Image); graphics.CopyFromScreen(0, 0, 0, 0, bitmap.Size); bitmap.Save("4.png"); long Ticks = DateTime.Now.Millisecond; Mat mat = new Mat("4.png", Emgu.CV.CvEnum.LoadImageType.Color); Image <Bgr, Byte> imgBgr = mat.ToImage <Bgr, Byte>(); Image <Gray, Byte> imgGray = mat.ToImage <Gray, Byte>(); int nWid = imgGray.Width; int nHei = imgGray.Height; byte[,,] pData = imgGray.Data; for (int y = 0; y < nHei; y++) { for (int x = 0; x < nWid; x++) { byte c = pData[y, x, 0]; if (c > 5) { pData[y, x, 0] = 0; } else { pData[y, x, 0] = 255; } } } CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(imgGray, blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); if (blobs.Count < 1) { timer_process.Enabled = true; return; } //------------------------------- Rectangle rc = Rectangle.Empty; foreach (var pair in blobs) { CvBlob b = pair.Value; rc = b.BoundingBox; //CvInvoke.Rectangle(imgBgr, b.BoundingBox, new MCvScalar(255.0, 0, 0), 2); break; } // -------Detect Blue Region ---- / imgGray = imgBgr.Convert <Gray, Byte>(); pData = imgGray.Data; for (int y = 0; y < nHei; y++) { for (int x = 0; x < nWid; x++) { if (!rc.Contains(x, y)) { pData[y, x, 0] = 0; continue; } byte c = pData[y, x, 0]; if (c >= 100 && c <= 120) { pData[y, x, 0] = 255; } else { pData[y, x, 0] = 0; } } } blobs.Clear(); _blobDetector.Detect(imgGray, blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); if (blobs.Count < 1) { timer_process.Enabled = true; return; } //------------------------------- rc = Rectangle.Empty; int nSizeMax = 0; foreach (var pair in blobs) { CvBlob b = pair.Value; if (b.BoundingBox.Width * b.BoundingBox.Height > nSizeMax) { rc = b.BoundingBox; nSizeMax = rc.Width * rc.Height; } //break; } CvInvoke.Rectangle(imgBgr, rc, new MCvScalar(255, 255, 0), 2); Global.g_rcROI = rc; Global.DEF_MAIN_BOARD_X = 238; Global.DEF_MAIN_BOARD_Y = 42; Global.DEF_MAIN_BOARD_W = 570; Global.DEF_MAIN_BOARD_H = 570; int nGameBoardX = Global.DEF_MAIN_BOARD_X + rc.X; int nGameBoardY = Global.DEF_MAIN_BOARD_Y + rc.Y; Global.GetRatioCalcedValues(rc.Width, rc.Height, ref nGameBoardX, ref nGameBoardY); Global.GetRatioCalcedValues(rc.Width, rc.Height, ref Global.DEF_MAIN_BOARD_W, ref Global.DEF_MAIN_BOARD_H); CvInvoke.Rectangle(imgBgr, new Rectangle(nGameBoardX, nGameBoardY, Global.DEF_MAIN_BOARD_W, Global.DEF_MAIN_BOARD_H), new MCvScalar(255, 255, 0), 2); Global.DEF_MARKS_X = 15; Global.DEF_MARKS_Y = 204; Global.DEF_MARKS_W = 189; Global.DEF_MARKS_H = 69; int nMarksX = Global.DEF_MARKS_X + rc.X; int nMarksY = Global.DEF_MARKS_Y + rc.Y; Global.GetRatioCalcedValues(rc.Width, rc.Height, ref nMarksX, ref nMarksY); Global.GetRatioCalcedValues(rc.Width, rc.Height, ref Global.DEF_MARKS_W, ref Global.DEF_MARKS_H); CvInvoke.Rectangle(imgBgr, new Rectangle(nMarksX, nMarksY, Global.DEF_MARKS_W, Global.DEF_MARKS_H), new MCvScalar(255, 255, 0), 2); int nStepX = Global.DEF_MAIN_BOARD_W / 8; int nStepY = Global.DEF_MAIN_BOARD_H / 8; var rois = new List <Rectangle>(); // List of rois var imageparts = new List <Image <Bgr, byte> >(); // List of extracted image parts for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) { Rectangle roi = new Rectangle(nGameBoardX + j * nStepX, nGameBoardY + i * nStepY, Global.DEF_ITEM_W, Global.DEF_ITEM_H); rois.Add(roi); imgBgr.ROI = roi; imageparts.Add(imgBgr.Copy()); } } imgBgr.ROI = Rectangle.Empty; m_LstCharacter.Clear(); bool bCanProcess = true; int k = 0, nRow = 0, nCol = 0; foreach (Image <Bgr, Byte> img in imageparts) { int nCharac = (int)ImageMatcher.DetermineCharacter(img); m_LstCharacter.Add(nCharac); MovementDecision.g_AllocCharacters[nRow, nCol] = nCharac; nCol++; if (nCol >= 8) { nRow++; nCol = 0; } //if (nCharac != 0) CvInvoke.Rectangle(imgBgr, rois[k], new MCvScalar(255, 255, 0), 2); //CvInvoke.Rectangle(imgBgr, rois[k], cols[nCharac - 1], 2); if (nCharac == 0) { bCanProcess = false; } k++; } string szLine = ""; lstBox.Items.Clear(); for (int i = 0; i < 8; i++) { szLine = ""; for (int j = 0; j < 8; j++) { szLine += "" + MovementDecision.g_AllocCharacters[i, j] + " "; } lstBox.Items.Add(szLine); } //imgBgr.Save("processed.png"); picScr.Image = imgBgr.Bitmap; if (!bCanProcess) { timer_process.Enabled = true; return; } MovementDecision.Process(); long Ticks2 = DateTime.Now.Millisecond; lbProcessTime.Text = "" + (Ticks2 - Ticks); timer_process.Enabled = true; }
/// <summary> /// Updates list of tracks based on current blobs. /// </summary> /// <param name="b">List of blobs.</param> /// <param name="t">List of tracks.</param> /// <param name="thDistance">Max distance to determine when a track and a blob match.</param> /// <param name="thInactive">Max number of frames a track can be inactive.</param> /// <remarks> /// Tracking based on: /// A. Senior, A. Hampapur, Y-L Tian, L. Brown, S. Pankanti, R. Bolle. Appearance Models for /// Occlusion Handling. Second International workshop on Performance Evaluation of Tracking and /// Surveillance Systems & CVPR'01. December, 2001. /// (http://www.research.ibm.com/peoplevision/PETS2001.pdf) /// </remarks> public static void UpdateTracks(CvBlobs b, CvTracks t, double thDistance, uint thInactive) { if (b == null) throw new ArgumentNullException("b"); if (t == null) throw new ArgumentNullException("t"); CvBlobInvoke.cvb_cvUpdateTracks(b.CvPtr, t.CvPtr, thDistance, thInactive); }
private void ExtractBlobAndCrop(Image <Gray, byte> skin) { using (MemStorage storage = new MemStorage()) { Image <Gray, Byte> smoothedFrame = new Image <Gray, byte>(skin.Size); CvInvoke.GaussianBlur(skin, smoothedFrame, new Size(3, 3), 1); //filter out noises imageBoxFrameGrabber.Image = skin; Mat forgroundMask = new Mat(); Mat ss = new Mat(); ss = skin.Mat; //grabber.Retrieve(ss); fgDetector.Apply(ss, forgroundMask); //imageBox1.Image = forgroundMask; CvBlobs blobs = new CvBlobs(); //blobDetector.Detect(forgroundMask.ToImage<Gray, byte>(), blobs); blobDetector.Detect(skin, blobs); blobs.FilterByArea(30000, 150000); CvBlob b = null; CvBlob btemp; int area = 0; foreach (var pair in blobs) { btemp = pair.Value; if (area < btemp.Area) { b = pair.Value; area = btemp.Area; } } //Crop LArgest Blob Bitmap skin_bit = skin.ToBitmap(); //MessageBox.Show("" + area); if (area != 0) { CvInvoke.Rectangle(currentFrame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); //Rectangle rec = new Rectangle(b.BoundingBox.X, b.BoundingBox.Y, b.BoundingBox.Width, b.BoundingBox.Height); Bitmap crop_image = new Bitmap((b.BoundingBox.Width > b.BoundingBox.Height ? b.BoundingBox.Width : b.BoundingBox.Height), (b.BoundingBox.Width > b.BoundingBox.Height ? b.BoundingBox.Width : b.BoundingBox.Height)); //Bitmap crop_image = skin_bit.Clone(rec, skin_bit.PixelFormat); Graphics g = Graphics.FromImage(crop_image); g.DrawImage(skin_bit, -b.BoundingBox.X, -b.BoundingBox.Y); //g.DrawImage(skin_bit, -50, -50); croped = new Image <Gray, Byte>(crop_image).Resize(350, 350, Inter.Cubic); croped1 = new Image <Gray, Byte>(crop_image).Resize(100, 100, Inter.Cubic); croped2 = new Image <Gray, Byte>(crop_image).Resize(50, 50, Inter.Cubic); int gesture_number = fow_prop.image(croped2); label1.Text = "" + gesture_number; imageBoxSkin.Image = croped; crop_image.Dispose(); skin_bit.Dispose(); } } }
private void UpdateTracks(CvBlobs blobs, List<Trak> tracks, double minDist, int maxLife) { ArrayList matched = new ArrayList(); List<int> enter = new List<int>(); List<int> end = new List<int>(); List<int> active = new List<int>(); foreach (var blob in blobs) { Vector2 blobPos = TransformKinectToScreenPos(new Vector2((float)blob.Value.Centroid.X, (float)(blob.Value.Centroid.Y))); float distanceFromCenter = Vector2.Distance(blobPos, new Vector2(Screen.width / 2, Screen.height / 2)); if (distanceFromCenter < ((Screen.height / 2) - radiusRemove))//check Centroid is inside Pond Bounds { //Debug.Log("Inbounds"); bool tracked = false; double minFound = 1000.0; Trak closest = new Trak(); //Find if blob is being tracked foreach (Trak track in tracks) { double distance = Vector2.Distance(new Vector2((float)blob.Value.Centroid.X, (float)blob.Value.Centroid.Y), new Vector2((float)track.X, (float)track.Y)); if (distance < minDist) { //Debug.Log("Found Closest"); tracked = true; if (distance < minFound) { closest = track; minFound = distance; } } } if (tracked) { //Debug.Log("updating tracked"); //Ok it is tracked! do your stuff blob! closest.Active++; closest.Inactive = 0; closest.Lifetime++; closest.Label = blob.Key; closest.X = blob.Value.Centroid.X; closest.Y = blob.Value.Centroid.Y; closest.Centroid = blob.Value.Centroid; closest.MaxX = blob.Value.MaxX; closest.MaxY = blob.Value.MaxY; closest.MinX = blob.Value.MinX; closest.MinY = blob.Value.MinY; matched.Add(closest.Id); tracked = true; active.Add((int)closest.Id); //break; } else { //Debug.Log("New track"); //Blob Is not tracked? create new trak trakCount++; Trak track = new Trak(); track.Active = 1; track.Inactive = 0; track.Lifetime = 1; track.Label = blob.Key; track.X = blob.Value.Centroid.X; track.Y = blob.Value.Centroid.Y; track.Centroid = blob.Value.Centroid; track.MaxX = blob.Value.MaxX; track.MaxY = blob.Value.MaxY; track.MinX = blob.Value.MinX; track.MinY = blob.Value.MinY; track.Id = trakCount; tracks.Add(track); enter.Add((int)track.Id); } } } for (int i = 0; i < tracks.Count; i++) { Trak track = (Trak)tracks[i]; if (matched.IndexOf(track.Id) == -1) { if (track.Inactive >= maxLife) { //Tracked object left, this track is leaving end.Add((int)track.Id); } else { //this track was not matched, let's wait maxLife frames track.Active = 0; track.Inactive++; track.Lifetime++; } } } foreach (StoneArea stoneArea in Spawner.Instance.stoneAreas) { bool toOpen = false; for (int i = 0; i < tracks.Count; i++) { Trak track = tracks[i]; if (track.Active > 0) { Vector2 blobPos = TransformKinectToScreenPos(new Vector2((float)track.X, (float)(track.Y))); if (Vector2.Distance(blobPos, stoneArea.GetPositionOnScreen()) < Screen.height * stoneAreaDist) { track.stoneArea = true; toOpen = true; //break; } else { track.stoneArea = false; } } } stoneArea.GoActivate(toOpen); } foreach (FoodArea foodArea in Spawner.Instance.foodAreas) { bool toOpen = false; for (int i = 0; i < tracks.Count; i++) { Trak track = tracks[i]; if (tracks[i].Active > 0) { Vector2 blobPos = TransformKinectToScreenPos(new Vector2((float)track.X, (float)(track.Y))); if (Vector2.Distance(blobPos, foodArea.GetPositionOnScreen()) < Screen.height * foodAreaDist) { track.foodArea = true; toOpen = true; // break; } else { track.foodArea = false; } } } foodArea.GoActivate(toOpen); } foreach (int id in active) { //Debug.Log(id); int idt = id; OnBlobActive(idt); } foreach (int id in end) { //Debug.Log(id); int idt = id; OnBlobExit(idt); } foreach (int id in enter) { //Debug.Log(id); int idt = id; OnBlobEnter(idt); } }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (frame.Width + frame.Width) / 2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); long detectionTime; List <Rectangle> faces = new List <Rectangle>(); List <Rectangle> eyes = new List <Rectangle>(); IImage image = (IImage)frame;//这一步是重点 faceImage = frame.Bitmap; DetectFace.Detect(image , "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime); #region 多人识别 Graphics g1 = Graphics.FromImage(frame.Bitmap); List <FaceIdentifyModel> tempList = new List <FaceIdentifyModel>(); foreach (Rectangle face in faces) { Image rectImage1 = ImageHelper.CaptureImage(frame.Bitmap, face);// 自己封装的方法,通过大图截取矩形框的人脸图片,返回Image 对象 FaceIdentifyModel MoreIdentifyInfo = FaceAPI.FaceIdentify(rectImage1, tb_Group.Text.Trim(), 1, 1); MoreIdentifyInfo.rect = face; tempList.Add(MoreIdentifyInfo); } Color color_of_pen1 = Color.Gray; color_of_pen1 = Color.Yellow; Pen pen1 = new Pen(color_of_pen1, 2.0f); Font font1 = new Font("微软雅黑", 16, GraphicsUnit.Pixel); SolidBrush drawBrush1 = new SolidBrush(Color.Yellow); tb_Identify.Text = tempList.ToJson(); foreach (var t in tempList) { g1.DrawRectangle(pen1, t.rect); if (t.result != null) { g1.DrawString(t.result[0].user_info.Replace(",", "\r\n"), font1, drawBrush1, new Point(t.rect.X + 20, t.rect.Y - 20)); } 125 } #endregion imageBox1.Image = frame; imageBox2.Image = forgroundMask; }