public override void Process(Image <Bgr, byte> image, out Image <Bgr, byte> annotatedImage, out List <object> data) { base.Process(image, out annotatedImage, out data); // apply the current image to the foreground mask _subtractor.Apply(image, _foregroundMask); // add the mask to the motion history _motionHistory.Update(_foregroundMask); // selector for desired image to be viewed switch (_bgSubImageType) { case BgSubImageType.FgMask: annotatedImage = _foregroundMask.ToImage <Bgr, byte>(); break; case BgSubImageType.Background: _subtractor.GetBackgroundImage(annotatedImage); break; default: data = new List <object>(); DrawMotion(ref annotatedImage, ref data); break; } }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Image <Bgr, Byte> smoothedFrame = new Image <Bgr, byte>(frame.Size); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); foreach (var pair in blobs) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); //CvInvoke.PutText(frame, blob.ID.ToString(), Point.Round(blob.Center), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); } imageBox1.Image = frame; imageBox2.Image = forgroundMask; }
void frame(object sender, EventArgs e)//捕捉帧进行线程函数 { double sum_pix = 0; double angle = 0; VectorOfRect rects = new VectorOfRect(); //创建VectorOfRect存储运动矩形。 Mat _segMask = new Mat(); //创建背景蒙版图片。 _capture.Retrieve(scr); //获取帧数据。 // CvInvoke.Resize(scr, scr, new Size(320, 480)); _motionDetect.Apply(scr, mask); //进行运动检测。 CvInvoke.MedianBlur(mask, mask, 5); //中值滤波。 _motionhistory.Update(mask); //更新背景图片。 _motionhistory.GetMotionComponents(_segMask, rects); //获取背景蒙版及运动矩形。 for (int j = 0; j < rects.ToArray().Length; j++) //遍历所有运动矩形。 { if (rects[j].Width * rects[j].Height > 1000) //删除一些较小的矩形采用面积的方式。 { CvInvoke.Rectangle(scr, rects[j], new MCvScalar(0, 0, 255)); //在scr图像总绘制运动矩形。 _motionhistory.MotionInfo(_segMask, rects[j], out angle, out sum_pix); //指定矩阵获取运动的角度和像素值。 CvInvoke.PutText(scr, "angle : " + (int)angle, rects[j].Location, Emgu.CV.CvEnum.FontFace.HersheyComplex, 0.5, new MCvScalar(0, 255, 0)); //绘制运动的角度。 } } imageBox1.Image = drar_rect(scr); // imageBox1.Image = scr;//显示图像。 imageBox2.Image = mask;//显示运动检测输出图像。 }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (frame.Width + frame.Width) / 2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); foreach (var pair in _tracker) { CvTrack b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); CvInvoke.PutText(frame, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); } //imageBox11.Image = null; //imageBox12.Image = null; imageBox11.Image = frame; imageBox12.Image = forgroundMask; }
private void ProcImage3(ref System.Drawing.Bitmap src, ref System.Drawing.Bitmap srcB, out System.Drawing.Bitmap dst) { dst = null; Mat srcImg = BitmapConverter.ToMat(src); Cv2.CvtColor(srcImg, srcImg, ColorConversionCodes.BGRA2BGR); Mat srcImgB = BitmapConverter.ToMat(srcB); Cv2.CvtColor(srcImgB, srcImgB, ColorConversionCodes.BGRA2BGR); Mat mask = new Mat(); double threshold = App.appSettings.DarkAreaThreshold; BackgroundSubtractor backSub = BackgroundSubtractorMOG2.Create(1, threshold, true); //BackgroundSubtractor backSub = BackgroundSubtractorMOG.Create(1, 5, 0.7, 0); //BackgroundSubtractor backSub = BackgroundSubtractorGMG.Create(1, 0.5); backSub.Apply(srcImgB, mask, 1); backSub.Apply(srcImg, mask, 0); Cv2.Threshold(mask, mask, 180, 255, ThresholdTypes.Binary); var element = Cv2.GetStructuringElement( MorphShapes.Rect, new OpenCvSharp.Size(2 * 2 + 1, 2 * 2 + 1), new OpenCvSharp.Point(2, 2)); Mat tmp = new Mat(); Cv2.MorphologyEx(mask, tmp, MorphTypes.Close, element, null, App.appSettings.Iterations); Cv2.MorphologyEx(tmp, mask, MorphTypes.Open, element, null, App.appSettings.Iterations2); Cv2.Erode(mask, tmp, element); dst = BitmapConverter.ToBitmap(tmp); }
/// <summary> /// Updates motion history with current image /// </summary> private void UpdateMotionHistory() { if (_currentImage != null) { Image <Gray, byte> image; using (image = _currentImage.Clone()) { image = Prepare(image); ComputerVisionMonitors[eComputerVisionMonitor.MonitorC].ShowFrame(image); Mat motion = new Mat(); motion = image.Mat; _forgroundDetector.Apply(motion, _foreground); _motionHistory.Update(_foreground); } } }
public CameraTrackingUpdateReturnModel Update() { // capture frame Mat frame = _cameraCapture.QueryFrame(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(FrameBlurStrength, FrameBlurStrength), 1); //filter out noises // get mask for preview Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); LastFrame = frame; LastMask = forgroundMask; return(new CameraTrackingUpdateReturnModel() { Frame = frame, Mask = forgroundMask }); }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); capture.Retrieve(frame, 0); Mat frame_crop = frame; Image <Hsv, Byte> currenthsvFrame = (frame.ToImage <Bgr, Byte>()).Convert <Hsv, Byte>(); Image <Gray, Byte> color_one = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_two = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_three = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_four = new Image <Gray, Byte>(frame.Width, frame.Height); /* * Color one is Red * Color two is Blue * Color three is Green * Color Four is Yellow * Green is in Right Index Finger * Blue is in Left Index Finger * Red in Right Thumb * Yelloe in Left Thumb */ Hsv hsv_min_color_one = new Hsv(0, 135, 110); //Hsv hsv_max_color_one = new Hsv(6, 255, 255); Hsv hsv_max_color_one = new Hsv(8, 255, 255); Hsv hsv_min_color_two = new Hsv(112, 53, 10); Hsv hsv_max_color_two = new Hsv(119, 255, 255); /* * Hsv hsv_min_color_three = new Hsv(68, 59, 80); * Hsv hsv_max_color_three = new Hsv(85, 255, 255); * Hsv hsv_min_color_four = new Hsv(20, 165, 165); * Hsv hsv_max_color_four = new Hsv(36, 255, 255); */ Hsv hsv_min_color_three = new Hsv(83, 109, 105); Hsv hsv_max_color_three = new Hsv(109, 255, 255); Hsv hsv_min_color_four = new Hsv(18, 155, 155); Hsv hsv_max_color_four = new Hsv(35, 255, 255); color_one = currenthsvFrame.InRange(hsv_min_color_one, hsv_max_color_one); color_two = currenthsvFrame.InRange(hsv_min_color_two, hsv_max_color_two); color_three = currenthsvFrame.InRange(hsv_min_color_three, hsv_max_color_three); color_four = currenthsvFrame.InRange(hsv_min_color_four, hsv_max_color_four); //Blob detection #region Blob Detection //Color one detection Image <Bgr, Byte> smoothedFrame_cone = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_one, smoothedFrame_cone, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cone = new Mat(); fgDetector.Apply(smoothedFrame_cone, forgroundMask_cone); CvBlobs blobs_color_one = new CvBlobs(); blobDetector.Detect(forgroundMask_cone.ToImage <Gray, byte>(), blobs_color_one); blobs_color_one.FilterByArea(minarea, maxarea); //Color two Blob Detection Image <Bgr, Byte> smoothedFrame_ctwo = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_two, smoothedFrame_ctwo, new Size(3, 3), 1); //filter out noises Mat forgroundMask_ctwo = new Mat(); fgDetector.Apply(smoothedFrame_ctwo, forgroundMask_ctwo); CvBlobs blobs_color_two = new CvBlobs(); blobDetector.Detect(forgroundMask_ctwo.ToImage <Gray, byte>(), blobs_color_two); blobs_color_two.FilterByArea(minarea, maxarea); //Color three blob detection Image <Bgr, Byte> smoothedFrame_cthree = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_three, smoothedFrame_cthree, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cthree = new Mat(); fgDetector.Apply(smoothedFrame_cthree, forgroundMask_cthree); CvBlobs blobs_color_three = new CvBlobs(); blobDetector.Detect(forgroundMask_cthree.ToImage <Gray, byte>(), blobs_color_three); blobs_color_three.FilterByArea(minarea, maxarea); //Color four detection Image <Bgr, Byte> smoothedFrame_cfour = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_four, smoothedFrame_cfour, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cfour = new Mat(); fgDetector.Apply(smoothedFrame_cfour, forgroundMask_cfour); CvBlobs blobs_color_four = new CvBlobs(); blobDetector.Detect(forgroundMask_cfour.ToImage <Gray, byte>(), blobs_color_four); blobs_color_four.FilterByArea(minarea, maxarea); //Makers Interpretition float[] cent_color_one = new float[2]; float[] cent_color_two = new float[2]; float[] cent_color_three = new float[2]; float[] cent_color_four = new float[2]; //Centroids of Markers foreach (var pair in blobs_color_one) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_one[0] = b.Centroid.X; cent_color_one[1] = b.Centroid.Y; } foreach (var pair in blobs_color_two) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_two[0] = b.Centroid.X; cent_color_two[1] = b.Centroid.Y; } foreach (var pair in blobs_color_three) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_three[0] = b.Centroid.X; cent_color_three[1] = b.Centroid.Y; } foreach (var pair in blobs_color_four) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_four[0] = b.Centroid.X; cent_color_four[1] = b.Centroid.Y; } #endregion #region Calculation int click_flag = 0; int[] x_cor = new int[4]; int[] y_cor = new int[4]; if (blobs_color_one.Count != 0 && blobs_color_two.Count != 0 && blobs_color_three.Count != 0 && blobs_color_four.Count != 0) { foreach (var pair in blobs_color_one) { CvBlob b = pair.Value; foreach (var pairr in blobs_color_two) { CvBlob c = pairr.Value; if ((b.Centroid.X - c.Centroid.X) * (b.Centroid.X - c.Centroid.X) + (b.Centroid.Y - c.Centroid.Y) * (b.Centroid.Y - c.Centroid.Y) <= 5000) { click_flag = 1; x_cor[0] = ((int)b.Centroid.X); x_cor[1] = ((int)c.Centroid.X); y_cor[0] = ((int)b.Centroid.Y); y_cor[1] = ((int)c.Centroid.Y); break; } } if (click_flag == 1) { break; } } if (click_flag == 1) { click_flag = 0; foreach (var pair in blobs_color_three) { CvBlob b = pair.Value; foreach (var pairr in blobs_color_four) { CvBlob c = pairr.Value; if ((b.Centroid.X - c.Centroid.X) * (b.Centroid.X - c.Centroid.X) + (b.Centroid.Y - c.Centroid.Y) * (b.Centroid.Y - c.Centroid.Y) <= 10000) { click_flag = 1; x_cor[2] = ((int)b.Centroid.X); x_cor[3] = ((int)c.Centroid.X); y_cor[2] = ((int)b.Centroid.Y); y_cor[3] = ((int)c.Centroid.Y); break; } } if (click_flag == 1) { break; } } } } if (click_flag == 1) { //MessageBox.Show("clicked"); SoundPlayer simpleSound = new SoundPlayer(@"click_sound.wav"); simpleSound.Play(); Array.Sort(x_cor); Array.Sort(y_cor); Bitmap ori_image = frame_crop.ToImage <Bgr, Byte>().ToBitmap(); Bitmap crop_image = new Bitmap(x_cor[2] - x_cor[1], y_cor[2] - y_cor[1]); Graphics g = Graphics.FromImage(crop_image); g.DrawImage(ori_image, -x_cor[1], -y_cor[1]); //string name = string.Format("SAP_{0:ddMMyyyy_hh_mm_ss}.jpg",DateTime.Now); frame.Save(@"C:\Users\Shubhankar\Pictures\Camera Roll\" + string.Format("SAP_{0:ddMMyyyy_hh_mm_ss}_original.jpg", DateTime.Now)); crop_image.Save(@"C:\Users\Shubhankar\Pictures\Camera Roll\" + string.Format("SAP_{0:ddMMyyyy_hh_mm_ss}.jpg", DateTime.Now)); Thread.Sleep(500); } #endregion #region Click Gesture #endregion captureImageBox.Image = frame; grayscaleImageBox.Image = color_one; smoothedGrayscaleImageBox.Image = color_two; cannyImageBox.Image = color_three; Color4ImageBox.Image = color_four; }
private bool testDiff() { if (_forgroundDetector == null) { //Emgu.CV.O _forgroundDetector = new BackgroundSubtractorMOG2(20, 16f, true); } bool result = false; using (Mat _forgroundMask = new Mat()) { if (nextFrame != null) { _forgroundDetector.Apply(nextFrame, _forgroundMask); } else { return (false); } decimal backGroundCounter = 0; decimal whiteColorCounter = 0; //double delta = 0.01; decimal deltaCounter = this.numUpDown_Sens.Value; decimal whitePixelsSettings = this.numUpDown_WhitePixels.Value; int colorLimit = 200; using (Bitmap img = _forgroundMask.Bitmap) { using (Bitmap origImg = nextFrame.Bitmap) { curSearchResult.RGBprofile = ""; curSearchResult.RGBprofileXY = ""; foreach (Rectangle Rect in ListRect) { for (int i = 0; i < Rect.Height; ++i) { for (int j = 0; j < Rect.Width; ++j) { //if (_forgroundMask.Bitmap.GetPixel(Rect.X + j, Rect.Y + i).GetBrightness() > delta) if (img.GetPixel(Rect.X + j, Rect.Y + i).ToArgb() != -16777216) { backGroundCounter++; if (origImg.GetPixel(Rect.X + j, Rect.Y + i).R > colorLimit && origImg.GetPixel(Rect.X + j, Rect.Y + i).G > colorLimit && origImg.GetPixel(Rect.X + j, Rect.Y + i).B > colorLimit ) { whiteColorCounter++; curSearchResult.RGBprofile += string.Format("(R={0},G={1},B={2})|", origImg.GetPixel(Rect.X + j, Rect.Y + i).R, origImg.GetPixel(Rect.X + j, Rect.Y + i).G, origImg.GetPixel(Rect.X + j, Rect.Y + i).B); curSearchResult.RGBprofileXY += string.Format("(X={0},Y={1})|", Rect.X + j, Rect.Y + i); } } } } } } } if (backGroundCounter > deltaCounter && whiteColorCounter > whitePixelsSettings) { result = true; if (!b_AutoRun) { pictureBox2.Image = _forgroundMask.Bitmap.Clone() as Bitmap; pictureBox2.Refresh(); } curSearchResult.backGroundCounter = backGroundCounter; curSearchResult.frameNumber = this.frameNum; curSearchResult.whiteColorCounter = whiteColorCounter; } _forgroundMask.Dispose(); } return(result); }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (frame.Width + frame.Width) / 2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); long detectionTime; List <Rectangle> faces = new List <Rectangle>(); List <Rectangle> eyes = new List <Rectangle>(); IImage image = (IImage)frame;//这一步是重点 faceImage = frame.Bitmap; DetectFace.Detect(image , "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime); #region 多人识别 多人识别存在较大误差率(图片库 如果高清,识别效果就是好) //Graphics g1 = Graphics.FromImage(frame.Bitmap); //List<FaceIdentifyModel> tempList = new List<FaceIdentifyModel>(); //foreach (Rectangle face in faces) //{ // Image rectImage1 = ImageHelper.CaptureImage(frame.Bitmap, face); // FaceIdentifyModel MoreIdentifyInfo = FaceAPI.FaceIdentify(rectImage1, tb_Group.Text.Trim(), 1, 1);//人脸识别 一个人的识别效果比较好 // MoreIdentifyInfo.rect = face; // tempList.Add(MoreIdentifyInfo); //} //Color color_of_pen1 = Color.Gray; //color_of_pen1 = Color.Yellow; //Pen pen1 = new Pen(color_of_pen1, 2.0f); //Font font1 = new Font("微软雅黑", 16, GraphicsUnit.Pixel); //SolidBrush drawBrush1 = new SolidBrush(Color.Yellow); //tb_Identify.Text = tempList.ToJson(); //foreach (var t in tempList) //{ // g1.DrawRectangle(pen1, t.rect); // if (t.result != null) // { // g1.DrawString(t.result[0].user_info.Replace(",", "\r\n"), font1, drawBrush1, new Point(t.rect.X + 20, t.rect.Y - 20)); // } //} #endregion #region 单人识别 //单人 人脸识别 多人效果比较差 foreach (Rectangle face in faces) { #region 采用画图,显示自己的文本框 Graphics g = Graphics.FromImage(frame.Bitmap); ImageModel tempImage = new ImageModel(); tempImage.Rect = face; tempImage.Image = frame.Bitmap; //接口查询速度差 //string faceInfo = FaceAPI.FaceDetect(ImageHelper.CaptureImage(frame.Bitmap, face));//人脸检测 Image rectImage = ImageHelper.CaptureImage(frame.Bitmap, face); FaceIdentifyModel IdentifyInfo = FaceAPI.FaceIdentify(rectImage, tb_Group.Text.Trim(), 1, 1);//人脸识别 一个人的识别效果比较好 // tb_Result.Text = faceInfo; tb_Identify.Text = IdentifyInfo.ToJson().ToString(); //采用画板 Color color_of_pen = Color.Gray; color_of_pen = Color.Yellow; Pen pen = new Pen(color_of_pen, 2.0f); Rectangle rect = face; g.DrawRectangle(pen, rect); Font font = new Font("微软雅黑", 16, GraphicsUnit.Pixel); SolidBrush drawBrush = new SolidBrush(Color.Yellow); if (IdentifyInfo != null) { if (IdentifyInfo.result != null) { for (int i = 0; i < IdentifyInfo.result.Count; i++) { string faceInfo = ""; faceInfo = IdentifyInfo.result[i].user_info.Replace(",", "\r\n"); //显示用户信息 g.DrawString(faceInfo, font, drawBrush, new Point(face.X + 20, face.Y - 20)); } } } //CvInvoke.Rectangle(frame, face, new MCvScalar(255.0, 255.0, 255.0), 2); //CvInvoke.PutText(frame, faceInfo, new Point(face.X + 20, face.Y - 20), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); // 保存原始截图 //System.Drawing.Image ResourceImage = frame.Bitmap; //ResourceImage.Save(saveDir + saveFileName); //线程队列 保存人脸识别截图 QueueHelper.WriteImage(tempImage); //t1 = new Thread(new ThreadStart(() => //{ // faceInfo = FaceAPI.FaceDetect(ImageHelper.CaptureImage(frame.Bitmap, face)); // this.Invoke(new Action(() => // { // g.DrawString(faceInfo, font, drawBrush, new Point(face.X + 20, face.Y - 20)); // })); //})); //t1.IsBackground = true; //t1.Start(); #endregion } #endregion #region 视频调用原有的Open CV 不支持中文字 //foreach (var pair in _tracker) //{ // CvTrack b = pair.Value; // #region 视频中调用open CV 上直接画文本框 // CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); // CvInvoke.PutText(frame, "man,show", new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); // if (b.BoundingBox.Width < 100 || b.BoundingBox.Height < 50) // { // continue; // } // #endregion //} #endregion imageBox1.Image = frame; imageBox2.Image = forgroundMask; }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); capture.Retrieve(frame, 0); Mat grayFrame = new Mat(); CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray); //Mat smallGrayFrame = new Mat(); //CvInvoke.PyrDown(grayFrame, smallGrayFrame); //Mat smoothedGrayFrame = new Mat(); //CvInvoke.PyrUp(smallGrayFrame, smoothedGrayFrame); //Image<Gray, Byte> smallGrayFrame = grayFrame.PyrDown(); //Image<Gray, Byte> smoothedGrayFrame = smallGrayFrame.PyrUp(); //Mat cannyFrame = new Mat(); //CvInvoke.Canny(smoothedGrayFrame, cannyFrame, 100, 60); //Image<Gray, Byte> cannyFrame = smoothedGrayFrame.Canny(100, 60); Image <Bgra, Byte> _frame = frame.ToImage <Bgra, Byte>(); Image <Gray, Byte> _grayFrame = grayFrame.ToImage <Gray, Byte>(); Image <Gray, Byte>[] rgb_frame = _frame.Split(); //components of rgb image Image <Gray, Byte> red_com = rgb_frame[2] - _grayFrame; var red_bi = red_com.Convert <Gray, byte>().ThresholdBinary(new Gray(redThres), new Gray(255)); Image <Gray, Byte> blue_com = rgb_frame[0] - _grayFrame; var blue_bi = blue_com.Convert <Gray, byte>().ThresholdBinary(new Gray(blueThres), new Gray(255)); Image <Gray, Byte> green_com = rgb_frame[1] - _grayFrame; var green_bi = green_com.Convert <Gray, byte>().ThresholdBinary(new Gray(greenThres), new Gray(255)); //System.Windows.Forms.MessageBox.Show(""); /////////////////////////////////////////////////////////////////////////////////// //Blob detection //Red Blob detection Image <Bgr, Byte> smoothedFrame_r = new Image <Bgr, byte>(red_com.Size); CvInvoke.GaussianBlur(red_bi, smoothedFrame_r, new Size(3, 3), 1); //filter out noises Mat forgroundMask_r = new Mat(); fgDetector.Apply(smoothedFrame_r, forgroundMask_r); CvBlobs blobs_r = new CvBlobs(); blobDetector.Detect(forgroundMask_r.ToImage <Gray, byte>(), blobs_r); blobs_r.FilterByArea(minarea, maxarea); //blue Blob Detection Image <Bgr, Byte> smoothedFrame_b = new Image <Bgr, byte>(red_com.Size); CvInvoke.GaussianBlur(blue_bi, smoothedFrame_b, new Size(3, 3), 1); //filter out noises Mat forgroundMask_b = new Mat(); fgDetector.Apply(smoothedFrame_b, forgroundMask_b); CvBlobs blobs_b = new CvBlobs(); blobDetector.Detect(forgroundMask_b.ToImage <Gray, byte>(), blobs_b); blobs_b.FilterByArea(minarea, maxarea); //Green blob detection Image <Bgr, Byte> smoothedFrame_g = new Image <Bgr, byte>(red_com.Size); CvInvoke.GaussianBlur(green_bi, smoothedFrame_g, new Size(3, 3), 1); //filter out noises Mat forgroundMask_g = new Mat(); fgDetector.Apply(smoothedFrame_g, forgroundMask_g); CvBlobs blobs_g = new CvBlobs(); blobDetector.Detect(forgroundMask_g.ToImage <Gray, byte>(), blobs_g); blobs_g.FilterByArea(minarea, maxarea); //Mouse Interpretition float[] cent_r = new float[2]; float[] cent_g = new float[2]; float[] cent_b = new float[2]; //Corsor control with Green Marker foreach (var pair in blobs_g) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_g[0] = b.Centroid.X; cent_g[1] = b.Centroid.Y; } if (blobs_g.Count == 1 || mouseflag != 0) { //Cursor Movement Controlled //Primary Screem //if (Screen.AllScreens.Length == 1) { Cursor.Position = new Point(Screen.PrimaryScreen.Bounds.Width - (int)(cursor_mul * (int)cent_g[0] * Screen.PrimaryScreen.Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_g[1]) * Screen.PrimaryScreen.Bounds.Height / capture.Height); } //Secondary Screen //Cursor.Position = new Point((int)(cursor_mul * (int)cent_g[0] * Screen.AllScreens[1].Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_g[1]) * Screen.AllScreens[1].Bounds.Height / capture.Height); //Number of Screen = 2 and both a same time /* if (Screen.AllScreens.Length == 2) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_g[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_g[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } * //Number of screen =3 and all at same time * if (Screen.AllScreens.Length == 3) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_g[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width + Screen.AllScreens[2].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_g[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } */ /* * //Check for Clicks * if (blobs_r.Count == 1) * { * if(blobs_g.Count == 0) * { * if(ccount == 1) * { * //double click * mouse_event(MOUSEEVENTF_LEFTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_LEFTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * Thread.Sleep(150); * mouse_event(MOUSEEVENTF_LEFTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_LEFTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * } * else * { * ccount--; * } * } * * else if ((cent_g[0] - cent_r[0] >= 10 || cent_r[0] - cent_g[0] <= 10) && (cent_g[1] - cent_r[1] >= 10 || cent_r[1] - cent_g[1] <= 10)) * { * ccount = safevalue; * mouseflag = 1; * //single click * mouse_event(MOUSEEVENTF_LEFTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_LEFTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * } * } * else * { * ccount = 0; * * } * * } * * if (blobs_b.Count == 1) * { * foreach (var pair in blobs_b) * { * CvBlob b = pair.Value; * CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); * cent_b[0] = b.Centroid.X; * cent_b[1] = b.Centroid.Y; * } * * if (blobs_g.Count == 1 && (cent_g[0] - cent_b[0] >= 10 || cent_b[0] - cent_g[0] <= 10) && (cent_g[1] - cent_b[1] >= 10 || cent_b[1] - cent_g[1] <= 10)) * { * //right click * mouse_event(MOUSEEVENTF_RIGHTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_RIGHTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * } * * else if(blobs_g.Count == 0) * { * mouse_event(MOUSEEVENTF_VWHEEL, 0, 0, (scroll_y - (int)cent_b[1]) * scroll_mul_v, 0); * mouse_event(MOUSEEVENTF_HWHEEL, 0, 0, (scroll_x - (int)cent_b[0]) * scroll_mul_h, 0); * scroll_y = (int)cent_b[1]; * scroll_x = (int)cent_b[0]; * * } */ } captureImageBox.Image = frame; grayscaleImageBox.Image = red_bi; smoothedGrayscaleImageBox.Image = green_bi; cannyImageBox.Image = blue_bi; }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (frame.Width + frame.Width) / 2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); long detectionTime; List <Rectangle> faces = new List <Rectangle>(); List <Rectangle> eyes = new List <Rectangle>(); IImage image = (IImage)frame;//这一步是重点 faceImage = frame.Bitmap; DetectFace.Detect(image , "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime); #region 多人识别 Graphics g1 = Graphics.FromImage(frame.Bitmap); List <FaceIdentifyModel> tempList = new List <FaceIdentifyModel>(); foreach (Rectangle face in faces) { Image rectImage1 = ImageHelper.CaptureImage(frame.Bitmap, face);// 自己封装的方法,通过大图截取矩形框的人脸图片,返回Image 对象 FaceIdentifyModel MoreIdentifyInfo = FaceAPI.FaceIdentify(rectImage1, tb_Group.Text.Trim(), 1, 1); MoreIdentifyInfo.rect = face; tempList.Add(MoreIdentifyInfo); } Color color_of_pen1 = Color.Gray; color_of_pen1 = Color.Yellow; Pen pen1 = new Pen(color_of_pen1, 2.0f); Font font1 = new Font("微软雅黑", 16, GraphicsUnit.Pixel); SolidBrush drawBrush1 = new SolidBrush(Color.Yellow); tb_Identify.Text = tempList.ToJson(); foreach (var t in tempList) { g1.DrawRectangle(pen1, t.rect); if (t.result != null) { g1.DrawString(t.result[0].user_info.Replace(",", "\r\n"), font1, drawBrush1, new Point(t.rect.X + 20, t.rect.Y - 20)); } 125 } #endregion imageBox1.Image = frame; imageBox2.Image = forgroundMask; }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); capture.Retrieve(frame, 0); Image <Hsv, Byte> currenthsvFrame = (frame.ToImage <Bgr, Byte>()).Convert <Hsv, Byte>(); Image <Gray, Byte> color_one = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_two = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_three = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_four = new Image <Gray, Byte>(frame.Width, frame.Height); /* * Color one is Red * Color two is Blue * Color three is Green * Color Four is Yellow * Green is in Right Index Finger * Blue is in Left Index Finger * Red in Right Thumb * Yelloe in Left Thumb */ /* Hsv hsv_min_color_one = new Hsv(0, 135, 110); * Hsv hsv_max_color_one = new Hsv(6, 255, 255); * Hsv hsv_min_color_two = new Hsv(112, 53, 10); * Hsv hsv_max_color_two = new Hsv(119, 255, 255); * Hsv hsv_min_color_three = new Hsv(68, 59, 80); * Hsv hsv_max_color_three = new Hsv(85, 255, 255); * Hsv hsv_min_color_four = new Hsv(20, 165, 165); * Hsv hsv_max_color_four = new Hsv(36, 255, 255);*/ Hsv hsv_min_color_one = new Hsv(0, 135, 50); //Hsv hsv_max_color_one = new Hsv(6, 255, 255); Hsv hsv_max_color_one = new Hsv(8, 255, 255); Hsv hsv_min_color_two = new Hsv(112, 53, 10); Hsv hsv_max_color_two = new Hsv(119, 255, 255); /* * Hsv hsv_min_color_three = new Hsv(68, 59, 80); * Hsv hsv_max_color_three = new Hsv(85, 255, 255); * Hsv hsv_min_color_four = new Hsv(20, 165, 165); * Hsv hsv_max_color_four = new Hsv(36, 255, 255); */ Hsv hsv_min_color_three = new Hsv(65, 70, 0); Hsv hsv_max_color_three = new Hsv(109, 255, 255); Hsv hsv_min_color_four = new Hsv(18, 155, 155); Hsv hsv_max_color_four = new Hsv(35, 255, 255); color_one = currenthsvFrame.InRange(hsv_min_color_one, hsv_max_color_one); color_two = currenthsvFrame.InRange(hsv_min_color_two, hsv_max_color_two); color_three = currenthsvFrame.InRange(hsv_min_color_three, hsv_max_color_three); color_four = currenthsvFrame.InRange(hsv_min_color_four, hsv_max_color_four); //Blob detection #region Blob Detection //Color one detection Image <Bgr, Byte> smoothedFrame_cone = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_one, smoothedFrame_cone, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cone = new Mat(); fgDetector.Apply(smoothedFrame_cone, forgroundMask_cone); CvBlobs blobs_color_one = new CvBlobs(); blobDetector.Detect(forgroundMask_cone.ToImage <Gray, byte>(), blobs_color_one); blobs_color_one.FilterByArea(minarea, maxarea); //Color two Blob Detection Image <Bgr, Byte> smoothedFrame_ctwo = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_two, smoothedFrame_ctwo, new Size(3, 3), 1); //filter out noises Mat forgroundMask_ctwo = new Mat(); fgDetector.Apply(smoothedFrame_ctwo, forgroundMask_ctwo); CvBlobs blobs_color_two = new CvBlobs(); blobDetector.Detect(forgroundMask_ctwo.ToImage <Gray, byte>(), blobs_color_two); blobs_color_two.FilterByArea(minarea, maxarea); //Color three blob detection Image <Bgr, Byte> smoothedFrame_cthree = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_three, smoothedFrame_cthree, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cthree = new Mat(); fgDetector.Apply(smoothedFrame_cthree, forgroundMask_cthree); CvBlobs blobs_color_three = new CvBlobs(); blobDetector.Detect(forgroundMask_cthree.ToImage <Gray, byte>(), blobs_color_three); blobs_color_three.FilterByArea(minarea, maxarea); //Color four detection Image <Bgr, Byte> smoothedFrame_cfour = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_four, smoothedFrame_cfour, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cfour = new Mat(); fgDetector.Apply(smoothedFrame_cfour, forgroundMask_cfour); CvBlobs blobs_color_four = new CvBlobs(); blobDetector.Detect(forgroundMask_cfour.ToImage <Gray, byte>(), blobs_color_four); blobs_color_four.FilterByArea(minarea, maxarea); #endregion //Makers Interpretition float[] cent_color_one = new float[2]; float[] cent_color_two = new float[2]; float[] cent_color_three = new float[2]; float[] cent_color_four = new float[2]; cent_color_one[0] = 0; cent_color_one[1] = 0; cent_color_two[0] = 0; cent_color_two[1] = 0; cent_color_three[0] = green_history_x; cent_color_three[1] = green_history_y; cent_color_four[0] = 0; cent_color_four[1] = 0; //Corsor control with Green Marker if (blobs_color_three.Count == 1 || mouseflag != 0) { foreach (var pair in blobs_color_three) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cursor_history_x.Enqueue((int)b.Centroid.X); cursor_history_y.Enqueue((int)b.Centroid.Y); cursor_history_x.Dequeue(); cursor_history_y.Dequeue(); cent_color_three[0] = (int)b.Centroid.X; cent_color_three[1] = (int)b.Centroid.Y; /*int temp_sum = 0; * int[] temp = cursor_history_x.ToArray(); * for (int i = 0; i < queue_cursor_length; i++) * temp_sum += temp[i]; * cent_color_three[0] = temp_sum / queue_cursor_length; * * temp_sum = 0; * temp = cursor_history_y.ToArray(); * for (int i = 0; i < queue_cursor_length; i++) * temp_sum += temp[i]; * cent_color_three[1] = temp_sum / queue_cursor_length; * * green_history_x = (int)cent_color_three[0]; * green_history_y = (int)cent_color_three[1];*/ } //Cursor Movement Controlled //Primary Screem // if (Screen.AllScreens.Length == 1) { //Cursor.Position = new Point(Screen.PrimaryScreen.Bounds.Width - (int)(cursor_mul * (int)cent_color_three[0] * Screen.PrimaryScreen.Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_color_three[1]) * Screen.PrimaryScreen.Bounds.Height / capture.Height); Cursor.Position = new Point((int)((cursor_mul_x * (int)cent_color_three[0]) * (Screen.PrimaryScreen.Bounds.Width) / capture.Width) + cursor_add_x, (((int)cursor_mul_y * (int)cent_color_three[1]) * Screen.PrimaryScreen.Bounds.Height / capture.Height) + cursor_add_y); //mouse_event(MOUSEEVENTF_MOVE, ( (-(int)cent_color_three[0] + green_history_x)), ( (-(int)cent_color_three[1] + green_history_y)),0,0); //mouse_event(MOUSEEVENTF_ABSOLUTE, 0, 0, 0, 0); } //Secondary Screen //Cursor.Position = new Point((int)(cursor_mul * (int)cent_color_three[0] * Screen.AllScreens[1].Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_color_three[1]) * Screen.AllScreens[1].Bounds.Height / capture.Height); //Number of Screen = 2 and both a same time /* if (Screen.AllScreens.Length == 2) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_color_three[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_color_three[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } * //Number of screen =3 and all at same time * if (Screen.AllScreens.Length == 3) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_color_three[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width + Screen.AllScreens[2].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_color_three[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } */ //Check for Clicks if (blobs_color_one.Count == 1) { foreach (var pair in blobs_color_one) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_one[0] = b.Centroid.X; cent_color_one[1] = b.Centroid.Y; } if (blobs_color_three.Count == 0) { if (ccount == 1) { //double click mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_LEFTDOWN, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_LEFTUP, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); Thread.Sleep(150); mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_LEFTDOWN, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_LEFTUP, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); } else { ccount--; } } else if ((cent_color_one[0] - cent_color_three[0]) * (cent_color_one[0] - cent_color_three[0]) + (cent_color_one[1] - cent_color_three[1]) * (cent_color_one[1] - cent_color_three[1]) <= 5000) { ccount = safevalue; mouseflag = 1; //single click mouse_event(MOUSEEVENTF_LEFTDOWN, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_LEFTUP, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_ABSOLUTE, 0, 0, 0, 0); } } else { ccount = 0; } } if (blobs_color_two.Count == 1) { foreach (var pair in blobs_color_two) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_two[0] = b.Centroid.X; cent_color_two[1] = b.Centroid.Y; } if (blobs_color_three.Count == 1 && ((cent_color_three[0] - cent_color_two[0]) * (cent_color_three[0] - cent_color_two[0]) + (cent_color_three[1] - cent_color_two[1]) * (cent_color_three[1] - cent_color_two[1]) <= 5000)) { //right click mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_RIGHTDOWN, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_RIGHTUP, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); } else //if(blobs_g.Count == 0) { //MessageBox.Show("d"); //Cursor.Position = new Point(Screen.PrimaryScreen.Bounds.Width - (int)(cursor_mul * green_history_x * Screen.PrimaryScreen.Bounds.Width / capture.Width), (int)(cursor_mul * green_history_y) * Screen.PrimaryScreen.Bounds.Height / capture.Height); //mouse_event(MOUSEEVENTF_VWHEEL, 0, 0, (scroll_y - (int)cent_color_two[1]) * scroll_mul_v, 0); mouse_event(MOUSEEVENTF_HWHEEL, 0, 0, (uint)((scroll_x - (int)cent_color_two[0]) * scroll_mul_h), 0); mouse_event(MOUSEEVENTF_VWHEEL, (uint)Cursor.Position.X, (uint)Cursor.Position.Y, 50, 0); //mouse_event(MOUSEEVENTF_HWHEEL, 0, 0, 50, 0); scroll_y = (int)cent_color_two[1]; scroll_x = (int)cent_color_two[0]; } } captureImageBox.Image = frame; grayscaleImageBox.Image = color_one; smoothedGrayscaleImageBox.Image = color_two; cannyImageBox.Image = color_three; Color4ImageBox.Image = color_four; }
private void ProcessFrame(object sender, EventArgs e) { Mat image = new Mat(); _capture.Retrieve(image); if (_forgroundDetector == null) { _forgroundDetector = new BackgroundSubtractorMOG2(); } _forgroundDetector.Apply(image, _forgroundMask); capturedImageBox.Image = image; //update the motion history _motionHistory.Update(_forgroundMask); foreground.Image = _forgroundMask; #region get a copy of the motion mask and enhance its color double[] minValues, maxValues; Point[] minLoc, maxLoc; _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc); Mat motionMask = new Mat(); using (ScalarArray sa = new ScalarArray(255.0 / maxValues[0])) CvInvoke.Multiply(_motionHistory.Mask, sa, motionMask, 1, DepthType.Cv8U); //Image<Gray, Byte> motionMask = _motionHistory.Mask.Mul(255.0 / maxValues[0]); #endregion //create the motion image // Image<Bgr, Byte> motionImage = new Image<Bgr, byte>(motionMask.Size); //display the motion pixels in blue (first channel) //motionImage[0] = motionMask; // CvInvoke.InsertChannel(motionMask, motionImage, 0); //Threshold to define a motion area, reduce the value to detect smaller motion // double minArea = 100; //storage.Clear(); //clear the storage Rectangle[] rects; using (VectorOfRect boundingRect = new VectorOfRect()) { _motionHistory.GetMotionComponents(_segMask, boundingRect); rects = boundingRect.ToArray(); } //iterate through each of the motion component foreach (Rectangle comp in rects) { time.Start(); // find the angle and motion pixel count of the specific area double angle, motionPixelCount; _motionHistory.MotionInfo(_forgroundMask, comp, out angle, out motionPixelCount); if (Main.security.Text == "SECURITY MODE ON") { long x = time.ElapsedMilliseconds; if (x > ellapsed_time) { if (motionPixelCount > pixel_count) { //MessageBox.Show("My message here"); Console.Beep(5000, 1000); if (Main.connected == true) { chat.send(Encoding.ASCII.GetBytes("Someone is in the room")); } break; time.Stop(); } } } } }
private void ProcessFrame(object sender, EventArgs e) { Mat image = new Mat(); _capture.Retrieve(image); if (_forgroundDetector == null) { _forgroundDetector = new BackgroundSubtractorMOG2(); } _forgroundDetector.Apply(image, _forgroundMask); //update the motion history _motionHistory.Update(_forgroundMask); #region get a copy of the motion mask and enhance its color double[] minValues, maxValues; Point[] minLoc, maxLoc; _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc); Mat motionMask = new Mat(); using (ScalarArray sa = new ScalarArray(255.0 / maxValues[0])) CvInvoke.Multiply(_motionHistory.Mask, sa, motionMask, 1, DepthType.Cv8U); //Image<Gray, Byte> motionMask = _motionHistory.Mask.Mul(255.0 / maxValues[0]); #endregion //DetectFace.Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, tryUseCuda, out detectionTime); //create the motion image Mat motionImage = new Mat(motionMask.Size.Height, motionMask.Size.Width, DepthType.Cv8U, 3); //display the motion pixels in blue (first channel) //motionImage[0] = motionMask; CvInvoke.InsertChannel(motionMask, motionImage, 0); //Threshold to define a motion area, reduce the value to detect smaller motion double minArea = 100; //storage.Clear(); //clear the storage Rectangle[] rects; using (VectorOfRect boundingRect = new VectorOfRect()) { _motionHistory.GetMotionComponents(_segMask, boundingRect); rects = boundingRect.ToArray(); } List <Rectangle> Availablerects = new List <Rectangle>(); foreach (Rectangle comp in rects) { int area = comp.Width * comp.Height; //reject the components that have small area; if (area < minArea) { continue; } double angle, motionPixelCount; _motionHistory.MotionInfo(_forgroundMask, comp, out angle, out motionPixelCount); //reject the area that contains too few motion if (motionPixelCount < area * size) { continue; } else { Availablerects.Add(comp); } } //iterate through each of the motion component List <Rectangle> faces = new List <Rectangle>(); List <Rectangle> eyes = new List <Rectangle>(); Task task2 = new Task(() => { Mat Detectmat = new Mat(); Detectmat = image; DetectFace.Detect(Detectmat, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, tryUseCuda, out detectionTime); if (faces.Count > 0) { label1.Text = "detectionTime:" + detectionTime.ToString(); for (int i = 0; i < faces.Count; i++) { Bitmap bt2 = DetectFace.Cutbitmap(Detectmat.Bitmap, faces[i].X, faces[i].Y, faces[i].Width, faces[i].Height); Emgu.CV.Image <Bgr, Byte> currentFrame1 = new Emgu.CV.Image <Bgr, Byte>(bt2); //只能这么转 Mat invert1 = new Mat(); CvInvoke.BitwiseAnd(currentFrame1, currentFrame1, invert1); //这是官网上的方法,变通用。没看到提供其它方法直接转换的。 faceimage.Image = invert1; string filePath = "G:\\motion1\\" + DateTime.Now.ToString("人脸-yyyy年MM月dd日HH点mm分ss秒") + i.ToString() + "-" + faces.Count.ToString() + ".jpg"; bt2.Save(filePath); System.Media.SystemSounds.Beep.Play(); } Bitmap bt1 = Detectmat.Bitmap; string filePath2 = "G:\\motion1\\" + DateTime.Now.ToString("原图-yyyy年MM月dd日HH点mm分ss秒") + ".jpg"; //System.Diagnostics.Debug.WriteLine("准备保存原图" + detectionTime.ToString()); bt1.Save(filePath2); } }); task2.Start(); foreach (Rectangle comp in Availablerects) { int area = comp.Width * comp.Height; //reject the components that have small area; if (area < minArea) { continue; } // find the angle and motion pixel count of the specific area double angle, motionPixelCount; _motionHistory.MotionInfo(_forgroundMask, comp, out angle, out motionPixelCount); //reject the area that contains too few motion if (motionPixelCount < area * size) { continue; } //Draw each individual motion in red //=================转换mat格式为bitmap并裁切=========================== Task task = new Task(() => { Bitmap bt = DetectFace.Cutbitmap(image.Bitmap, comp.X, comp.Y, comp.Width, comp.Height); Emgu.CV.Image <Bgr, Byte> currentFrame = new Emgu.CV.Image <Bgr, Byte>(bt); //只能这么转 Mat invert = new Mat(); CvInvoke.BitwiseAnd(currentFrame, currentFrame, invert); //这是官网上的方法,变通用。没看到提供其它方法直接转换的。 moveimage.Image = invert; }); task.Start(); try { DrawMotion(motionImage, comp, angle, new Bgr(Color.Red)); DrawMotion(capturedImageBox.Image, comp, angle, new Bgr(Color.Red)); } catch (Exception a) { } #region//area /* * bool time = false; * if ((comp.X > 1770 && comp.X < 1830) && (comp.Y > 2 && comp.Y < 40)) * { * time = true; * } * if (youxiaorects.Count < 50&&!time) * { * if (capturedImageBox.Image != null) * { * Random rd = new Random(); * Bitmap bt = new Bitmap(capturedImageBox.Image.Bitmap); * // string filePath = "G:\\motion\\" + DateTime.Now.ToString("yyyy年MM月dd日HH点mm分ss秒") + ".jpg"; * // image.Save(filePath); * * } * } */ #endregion } #region//垃圾堆 //=================当检测到图像更变,获取更变区域坐标与大小时,尝试将更变区域保存 传入人脸识别函数分析===================== //===============根据更变区域个数来动态创建线程,增加效率====================== /* Thread[] downloadThread; * Thread face=new Thread(confirmface); * face.Start();*/ /* int areacount = Availablerects.Count; * //声名下载线程,这是C#的优势,即数组初始化时,不需要指定其长度,可以在使用时才指定。 * * //这个声名应为类级,这样也就为其它方法控件它们提供了可能 * * ThreadStart startDownload = new ThreadStart(confirmface); * //线程起始设置:即每个线程都执行DownLoad() * downloadThread = new Thread[areacount];//为线程申请资源,确定线程总数 * for (int k = 0; k < areacount; k++)//开启指定数量的线程数 * { * downloadThread[k] = new Thread(startDownload);//指定线程起始设置 * downloadThread[k].Start();//逐个开启线程 * }*/ #endregion #region//_forgroundMask /* * // find and draw the overall motion angle * double overallAngle, overallMotionPixelCount; * * _motionHistory.MotionInfo(_forgroundMask, new Rectangle(Point.Empty, motionMask.Size), out overallAngle, out overallMotionPixelCount); * // DrawMotion(motionImage, new Rectangle(Point.Empty, motionMask.Size), overallAngle, new Bgr(Color.Green)); * // DrawMotion(image, new Rectangle(Point.Empty, image.Size), overallAngle, new Bgr(Color.Green)); * if (this.Disposing || this.IsDisposed) * return; */ /* foreach (Rectangle face in faces) * CvInvoke.Rectangle(image, face, new Bgr(Color.Red).MCvScalar, 2); * foreach (Rectangle eye in eyes) * CvInvoke.Rectangle(image, eye, new Bgr(Color.Blue).MCvScalar, 2);*/ capturedImageBox.Image = image; // forgroundImageBox.Image = _forgroundMask; //Display the amount of motions found on the current image //UpdateText(String.Format("Total Motions found: {0}; Motion Pixel count: {1} detectionTime:{2} ", rects.Length, overallMotionPixelCount, detectionTime)); //Display the image of the motion // motionImageBox.Image = motionImage; #endregion }
private void ProcessFrame(object sender, EventArgs e) { Mat image = new Mat(); _capture.Retrieve(image); if (_forgroundDetector == null) { _forgroundDetector = new BackgroundSubtractorMOG2(); } _forgroundDetector.Apply(image, _forgroundMask); //update the motion history _motionHistory.Update(_forgroundMask); #region get a copy of the motion mask and enhance its color double[] minValues, maxValues; Point[] minLoc, maxLoc; _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc); Mat motionMask = new Mat(); using (ScalarArray sa = new ScalarArray(255.0 / maxValues[0])) CvInvoke.Multiply(_motionHistory.Mask, sa, motionMask, 1, DepthType.Cv8U); //Image<Gray, Byte> motionMask = _motionHistory.Mask.Mul(255.0 / maxValues[0]); #endregion //create the motion image Mat motionImage = new Mat(motionMask.Size.Height, motionMask.Size.Width, DepthType.Cv8U, 3); //display the motion pixels in blue (first channel) //motionImage[0] = motionMask; CvInvoke.InsertChannel(motionMask, motionImage, 0); //Threshold to define a motion area, reduce the value to detect smaller motion double minArea = 100; //storage.Clear(); //clear the storage Rectangle[] rects; using (VectorOfRect boundingRect = new VectorOfRect()) { _motionHistory.GetMotionComponents(_segMask, boundingRect); rects = boundingRect.ToArray(); } //iterate through each of the motion component foreach (Rectangle comp in rects) { int area = comp.Width * comp.Height; //reject the components that have small area; if (area < minArea) { continue; } // find the angle and motion pixel count of the specific area double angle, motionPixelCount; _motionHistory.MotionInfo(_forgroundMask, comp, out angle, out motionPixelCount); //reject the area that contains too few motion if (motionPixelCount < area * 0.05) { continue; } //Draw each individual motion in red DrawMotion(motionImage, comp, angle, new Bgr(Color.Red)); } // find and draw the overall motion angle double overallAngle, overallMotionPixelCount; _motionHistory.MotionInfo(_forgroundMask, new Rectangle(Point.Empty, motionMask.Size), out overallAngle, out overallMotionPixelCount); DrawMotion(motionImage, new Rectangle(Point.Empty, motionMask.Size), overallAngle, new Bgr(Color.Green)); if (this.Disposing || this.IsDisposed) { return; } capturedImageBox.Image = image; forgroundImageBox.Image = _forgroundMask; //Display the amount of motions found on the current image UpdateText(String.Format("Total Motions found: {0}; Motion Pixel count: {1}", rects.Length, overallMotionPixelCount)); //Display the image of the motion motionImageBox.Image = motionImage; }
public List <IImage> ProcessFrame(IImage original) { List <IImage> processedImages = new List <IImage>(); Mat foregroundBlobs = new Mat(); Mat motionMask = new Mat(); Mat segmentMask = new Mat(); //threshold to define the minimum motion area double minArea = AdjustableParameters["MinMotionArea"].CurrentValue; //threshold to define the minimun motion 1/20th of the size of the bounding blob double minMotion = AdjustableParameters["MinMotionDistance"].CurrentValue; Rectangle[] motionComponents; _backgroundSubtractor.Apply(original, foregroundBlobs); //update the motion-history _motionHistory.Update(foregroundBlobs); //Get a copy of the mask and enhance its color double[] minValues, maxValues; Point[] minLocation, maxLocation; _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLocation, out maxLocation); //Mutiply the copy by a scalar array outputs motionMask using (ScalarArray myScalar = new ScalarArray(255.0 / maxValues[0])) CvInvoke.Multiply(_motionHistory.Mask, myScalar, motionMask, 1, Emgu.CV.CvEnum.DepthType.Cv8U); //Create the motion image Mat motionImage = new Mat(motionMask.Size.Height, motionMask.Size.Width, Emgu.CV.CvEnum.DepthType.Cv8U, 3); motionImage.SetTo(new MCvScalar(0)); //Insert the motion mask into the blue channel of the motionImage CvInvoke.InsertChannel(motionMask, motionImage, 0); //Get the motion components using (VectorOfRect boundingRect = new VectorOfRect()) { _motionHistory.GetMotionComponents(segmentMask, boundingRect); motionComponents = boundingRect.ToArray(); } //Loop through the motion components foreach (Rectangle component in motionComponents) { int area = component.Width * component.Height; //reject components that are smaller that the threshold if (area < minArea) { continue; } //find angle and pixel count for the motionComponent double angle, pixelCount; _motionHistory.MotionInfo(foregroundBlobs, component, out angle, out pixelCount); //reject component of motion pixel count is less than the min threshold if (pixelCount < area * minMotion) { continue; } //draw motions in red DrawMotion(motionImage, component, angle, new Bgr(Color.Red)); } // find and draw the overall motion angle double overallAngle, overallMotionPixelCount; _motionHistory.MotionInfo(foregroundBlobs, new Rectangle(Point.Empty, motionMask.Size), out overallAngle, out overallMotionPixelCount); DrawMotion(motionImage, new Rectangle(Point.Empty, motionMask.Size), overallAngle, new Bgr(Color.Green)); processedImages.Add(foregroundBlobs); processedImages.Add(motionImage); processedImages.Add(segmentMask); return(processedImages); }
protected override MotionDetectorOutput DoProcess(MotionDetectorInput input) { var output = new MotionDetectorOutput(); var subtractorConfig = input.Settings.SubtractorConfig; if (_foregroundDetector == null || !_currentSubtractorConfig.Equals(subtractorConfig)) { if (_foregroundDetector != null) { _foregroundDetector.Dispose(); } _foregroundDetector = new BackgroundSubtractorMOG2( subtractorConfig.History , subtractorConfig.Threshold , subtractorConfig.ShadowDetection); _currentSubtractorConfig = subtractorConfig; } _foregroundDetector.Apply(input.Captured, _forgroundMask); _motionHistory.Update(_forgroundMask); #region get a copy of the motion mask and enhance its color double[] minValues, maxValues; Point[] minLoc, maxLoc; _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc); var motionMask = new Mat(); using (var sa = new ScalarArray(255.0 / maxValues[0])) { CvInvoke.Multiply(_motionHistory.Mask, sa, motionMask, 1, DepthType.Cv8U); } #endregion if (input.SetCapturedImage) { output.ForegroundImage = _forgroundMask.ToImage <Bgr, byte>(); output.MotionImage = new Image <Bgr, byte>(motionMask.Size); CvInvoke.InsertChannel(motionMask, output.MotionImage, 0); } Rectangle[] motionComponents; using (var boundingRect = new VectorOfRect()) { _motionHistory.GetMotionComponents(_segMask, boundingRect); motionComponents = boundingRect.ToArray(); } foreach (Rectangle motionComponent in motionComponents) { int area = motionComponent.Area(); //reject the components that have small area; if (area < input.Settings.MinimumArea || area > input.Settings.MaximumArea) { continue; } // find the angle and motion pixel count of the specific area double angle, motionPixelCountDouble; _motionHistory.MotionInfo(_forgroundMask, motionComponent, out angle, out motionPixelCountDouble); int motionPixelCount = (int)motionPixelCountDouble; //reject the area that contains too few motion if (motionPixelCount < area * input.Settings.MinimumPercentMotionInArea) { continue; } var motionSection = new MotionSection(); motionSection.Area = area; motionSection.Region = motionComponent; motionSection.Angle = angle; motionSection.PixelsInMotionCount = motionPixelCount; output.MotionSections.Add(motionSection); } if (output.IsDetected) { switch (input.Settings.BiggestMotionType) { case BiggestMotionType.Unspecified: break; case BiggestMotionType.Area: output.MotionSections.Sort((x, y) => y.Area.CompareTo(x.Area)); break; case BiggestMotionType.Pixels: output.MotionSections.Sort((x, y) => y.PixelsInMotionCount.CompareTo(x.PixelsInMotionCount)); break; } output.BiggestMotion = output.MotionSections.FirstOrDefault(); } double overallAngle, overallMotionPixelCount; _motionHistory.MotionInfo(_forgroundMask, new Rectangle(Point.Empty, motionMask.Size), out overallAngle, out overallMotionPixelCount); output.OverallAngle = overallAngle; output.OverallMotionPixelCount = Convert.ToInt32(overallMotionPixelCount); return(output); }
private void ExtractBlobAndCrop(Image <Gray, byte> skin) { using (MemStorage storage = new MemStorage()) { Image <Gray, Byte> smoothedFrame = new Image <Gray, byte>(skin.Size); CvInvoke.GaussianBlur(skin, smoothedFrame, new Size(3, 3), 1); //filter out noises imageBoxFrameGrabber.Image = skin; Mat forgroundMask = new Mat(); Mat ss = new Mat(); ss = skin.Mat; //grabber.Retrieve(ss); fgDetector.Apply(ss, forgroundMask); //imageBox1.Image = forgroundMask; CvBlobs blobs = new CvBlobs(); //blobDetector.Detect(forgroundMask.ToImage<Gray, byte>(), blobs); blobDetector.Detect(skin, blobs); blobs.FilterByArea(30000, 150000); CvBlob b = null; CvBlob btemp; int area = 0; foreach (var pair in blobs) { btemp = pair.Value; if (area < btemp.Area) { b = pair.Value; area = btemp.Area; } } //Crop LArgest Blob Bitmap skin_bit = skin.ToBitmap(); //MessageBox.Show("" + area); if (area != 0) { CvInvoke.Rectangle(currentFrame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); //Rectangle rec = new Rectangle(b.BoundingBox.X, b.BoundingBox.Y, b.BoundingBox.Width, b.BoundingBox.Height); Bitmap crop_image = new Bitmap((b.BoundingBox.Width > b.BoundingBox.Height ? b.BoundingBox.Width : b.BoundingBox.Height), (b.BoundingBox.Width > b.BoundingBox.Height ? b.BoundingBox.Width : b.BoundingBox.Height)); //Bitmap crop_image = skin_bit.Clone(rec, skin_bit.PixelFormat); Graphics g = Graphics.FromImage(crop_image); g.DrawImage(skin_bit, -b.BoundingBox.X, -b.BoundingBox.Y); //g.DrawImage(skin_bit, -50, -50); croped = new Image <Gray, Byte>(crop_image).Resize(350, 350, Inter.Cubic); croped1 = new Image <Gray, Byte>(crop_image).Resize(100, 100, Inter.Cubic); croped2 = new Image <Gray, Byte>(crop_image).Resize(50, 50, Inter.Cubic); int gesture_number = fow_prop.image(croped2); label1.Text = "" + gesture_number; imageBoxSkin.Image = croped; crop_image.Dispose(); skin_bit.Dispose(); } } }