/* public void TestCodeBookBGModel() { using (Capture capture = new Capture()) using (BGCodeBookModel<Bgr> model = new BGCodeBookModel<Bgr>()) { ImageViewer viewer = new ImageViewer(); Image<Gray, byte> fgMask = capture.QueryFrame().Convert<Gray, Byte>(); Application.Idle += delegate(Object sender, EventArgs args) { Mat frame = capture.QueryFrame(); model.Apply(frame); viewer.Image = model.ForegroundMask; }; viewer.ShowDialog(); } } public void TestBlobTracking() { MCvFGDStatModelParams fgparam = new MCvFGDStatModelParams(); fgparam.alpha1 = 0.1f; fgparam.alpha2 = 0.005f; fgparam.alpha3 = 0.1f; fgparam.delta = 2; fgparam.is_obj_without_holes = 1; fgparam.Lc = 32; fgparam.Lcc = 16; fgparam.minArea = 15; fgparam.N1c = 15; fgparam.N1cc = 25; fgparam.N2c = 25; fgparam.N2cc = 35; fgparam.perform_morphing = 0; fgparam.T = 0.9f; BlobTrackerAutoParam<Bgr> param = new BlobTrackerAutoParam<Bgr>(); param.BlobDetector = new BlobDetector(Emgu.CV.CvEnum.BlobDetectorType.CC); param.FGDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.ForgroundDetectorType.Fgd, fgparam); param.BlobTracker = new BlobTracker(Emgu.CV.CvEnum.BLOBTRACKER_TYPE.MSFG); param.FGTrainFrames = 10; BlobTrackerAuto<Bgr> tracker = new BlobTrackerAuto<Bgr>(param); //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 1.0, 1.0); using(ImageViewer viewer = new ImageViewer()) using (Capture capture = new Capture()) { capture.ImageGrabbed += delegate(object sender, EventArgs e) { tracker.Process(capture.RetrieveBgrFrame()); //Image<Bgr, Byte> img = capture.RetrieveBgrFrame(); Image<Bgr, Byte> img = tracker.ForegroundMask.Convert<Bgr, Byte>(); foreach (MCvBlob blob in tracker) { img.Draw((Rectangle)blob, new Bgr(255.0, 255.0, 255.0), 2); img.Draw(blob.ID.ToString(), Point.Round(blob.Center), CvEnum.FontFace.HersheySimplex, 1.0, new Bgr(255.0, 255.0, 255.0)); } viewer.Image = img; }; capture.Start(); viewer.ShowDialog(); } }*/ public void TestCvBlob() { //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, 0.5); using (CvTracks tracks = new CvTracks()) using (ImageViewer viewer = new ImageViewer()) using (Capture capture = new Capture()) using (Mat fgMask = new Mat()) { //BGStatModel<Bgr> bgModel = new BGStatModel<Bgr>(capture.QueryFrame(), Emgu.CV.CvEnum.BG_STAT_TYPE.GAUSSIAN_BG_MODEL); BackgroundSubtractorMOG2 bgModel = new BackgroundSubtractorMOG2(0, 0, true); //BackgroundSubstractorMOG bgModel = new BackgroundSubstractorMOG(0, 0, 0, 0); capture.ImageGrabbed += delegate(object sender, EventArgs e) { Mat frame = new Mat(); capture.Retrieve(frame); bgModel.Apply(frame, fgMask); using (CvBlobDetector detector = new CvBlobDetector()) using (CvBlobs blobs = new CvBlobs()) { detector.Detect(fgMask.ToImage<Gray, Byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); tracks.Update(blobs, 20.0, 10, 0); Image<Bgr, Byte> result = new Image<Bgr, byte>(frame.Size); using (Image<Gray, Byte> blobMask = detector.DrawBlobsMask(blobs)) { frame.CopyTo(result, blobMask); } //CvInvoke.cvCopy(frame, result, blobMask); foreach (KeyValuePair<uint, CvTrack> pair in tracks) { if (pair.Value.Inactive == 0) //only draw the active tracks. { CvBlob b = blobs[pair.Value.BlobLabel]; Bgr color = detector.MeanColor(b, frame.ToImage<Bgr, Byte>()); result.Draw(pair.Key.ToString(), pair.Value.BoundingBox.Location, CvEnum.FontFace.HersheySimplex, 0.5, color); result.Draw(pair.Value.BoundingBox, color, 2); Point[] contour = b.GetContour(); result.Draw(contour, new Bgr(0, 0, 255), 1); } } viewer.Image = frame.ToImage<Bgr, Byte>().ConcateVertical(fgMask.ToImage<Bgr, Byte>().ConcateHorizontal(result)); } }; capture.Start(); viewer.ShowDialog(); } }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Image <Bgr, Byte> smoothedFrame = new Image <Bgr, byte>(frame.Size); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); foreach (var pair in blobs) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); //CvInvoke.PutText(frame, blob.ID.ToString(), Point.Round(blob.Center), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); } imageBox1.Image = frame; imageBox2.Image = forgroundMask; }
public void 評価結果画像作成_debug(Mat 検査結果, Mat テンプレート, int[,] 正解座標, ref Mat color_debug) { Mat res_color = new Mat(new Size(検査結果.Width, 検査結果.Height), MatType.CV_8UC3, Scalar.All(0)); var temp_color = res_color.Clone(); var result_clone = 検査結果.Clone(); paint_black(ref result_clone, テンプレート); CvBlobs blobs = new CvBlobs(result_clone); blobs.FilterByArea(9, 250); blobs.RenderBlobs(result_clone, res_color); Cv2.CvtColor(テンプレート, temp_color, ColorConversionCodes.GRAY2BGR); Cv2.Add(temp_color, res_color, color_debug); 点数計算_debug(blobs, 正解座標, ref color_debug); res_color = null; temp_color = null; blobs = null; result_clone = null; }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (frame.Width + frame.Width) / 2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); foreach (var pair in _tracker) { CvTrack b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); CvInvoke.PutText(frame, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); } imageBox1.Image = frame; imageBox2.Image = forgroundMask; }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage<Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (frame.Width + frame.Width)/2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); foreach (var pair in _tracker) { CvTrack b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); CvInvoke.PutText(frame, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); } imageBox1.Image = frame; imageBox2.Image = forgroundMask; }
public static void Blob_CvBlobs(int filterArea = 0) { Glb.DrawMatAndHist0(Glb.matSrc); var matThr = Glb.matSrc.CvtColor(ColorConversionCodes.BGR2GRAY).Threshold(128, 255, ThresholdTypes.Otsu); var blobs = new CvBlobs(); Glb.TimerStart(); int cnt = blobs.Label(matThr); Console.WriteLine("=> Label Time: {0}ms", Glb.TimerStop()); blobs.FilterByArea(filterArea, int.MaxValue); var matDsp = new Mat(Glb.matSrc.Rows, Glb.matSrc.Cols, MatType.CV_8UC3); matDsp.SetTo(Scalar.Black); Glb.TimerStart(); MyBlobRenderer.RenderBlobs(blobs, matDsp); Console.WriteLine("=> Render Time: {0}ms", Glb.TimerStop()); Console.WriteLine("=> Blob Count: {0}", blobs.Count); Glb.DrawMatAndHist1(matThr); Glb.DrawMatAndHist2(matDsp); matThr.Dispose(); matDsp.Dispose(); }
public int[] 点数計算(Mat 検査結果, Mat テンプレート, int[,] 正解座標) { int score = 0; var result_clone = 検査結果.Clone(); paint_black(ref result_clone, テンプレート); CvBlobs blobs = new CvBlobs(result_clone); blobs.FilterByArea(9, 250); int[,] 正解座標2 = (int[, ])正解座標.Clone(); int 正解数 = 0; int 正解数 = 0; int 許容回数 = 5; int 未検出数 = 0; foreach (CvBlob item in blobs.Values) { CvContourPolygon polygon = item.Contour.ConvertToPolygon(); Point2f circleCenter; float circleRadius; GetEnclosingCircle(polygon, out circleCenter, out circleRadius); for (int j = 0; j < 正解座標2.Length / 2; j++) { if (正解座標2[j, 0] != 0 && (Math.Pow(circleCenter.X - 正解座標2[j, 0], 2) + Math.Pow(circleCenter.Y - 正解座標2[j, 1], 2) < circleRadius * circleRadius)) {//外接円内にあったら 正解数++; 正解座標2[j, 0] = 正解座標2[j, 1] = 0; j = 正解座標2.Length;//ひとつ照合確定したら,このfor文を抜けて次のラベルの検査に移動 } } } for (int i = 0; i < 正解座標2.Length / 2; i++) {//検出されなかった座標が残る if (正解座標2[i, 0] != 0) { 未検出数++; } } 正解数 = blobs.Count - 正解数; if (正解数 <= 許容回数) { score = (int)((float)(正解数) * (10000.0f / (正解座標.Length / 2))); } else { score = (int)((float)(正解数 - (正解数 - 許容回数)) * (10000.0f / (正解座標.Length / 2))); } blobs = null; result_clone = null; return(new int[] { score, 正解数, 未検出数 }); }
public static IEnumerable <CvBlob> DetectAndFilterBlobs(Image <Gray, byte> img, int min, int max) { CvBlobs blobs = new CvBlobs(); PerThreadUtils.GetBlobDetector().Detect(img, blobs); blobs.FilterByArea(min, max); return(blobs.Values); }
private Rectangle GetBlueGameBoxRegion(Rectangle rcGame, Image <Gray, Byte> imgGray) { byte[,,] pData = imgGray.Data; int nWid = imgGray.Width; int nHei = imgGray.Height; for (int y = 0; y < nHei; y++) { for (int x = 0; x < nWid; x++) { if (!rcGame.Contains(x, y)) { pData[y, x, 0] = 0; continue; } byte c = pData[y, x, 0]; if (c >= 100 && c <= 120) { pData[y, x, 0] = 255; } else { pData[y, x, 0] = 0; } } } CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(imgGray, blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); if (blobs.Count < 1) { return(Rectangle.Empty); } //------------------------------- Rectangle rc = Rectangle.Empty; int nSizeMax = 0; foreach (var pair in blobs) { CvBlob b = pair.Value; if (b.BoundingBox.Width * b.BoundingBox.Height > nSizeMax) { rc = b.BoundingBox; nSizeMax = rc.Width * rc.Height; } //break; } return(rc); }
private Point centroideDeCorTime(Image <Gray, Byte> imgGray, Point individual) { blobDetector.Detect(imgGray, detectedBlobs); detectedBlobs.FilterByArea(300, 99999); blobList = detectedBlobs.Values.ToList(); time.Clear(); foreach (var blob in blobList) { time.Add(new Point((int)blob.Centroid.X, (int)blob.Centroid.Y)); } detectedBlobs.Clear(); return(nearest(time, infoVtoERobo.PosicaoIndividual)); }
/// <summary> /// Gets the largest blob /// </summary> /// <param name="imgSrc"></param> /// <param name="imgRender"></param> private void FilterByMaximumBlob(IplImage imgSrc, IplImage imgDst) { CvBlobs blobs = new CvBlobs(); imgDst.Zero(); blobs.Label(imgSrc); CvBlob max = blobs.GreaterBlob(); if (max == null) { return; } blobs.FilterByArea(max.Area, max.Area); blobs.FilterLabels(imgDst); }
private static void FilterByMaximalBlob(IplImage imgSrc, IplImage imgDst) { using (CvBlobs blobs = new CvBlobs()) using (IplImage imgLabelData = new IplImage(imgSrc.Size, CvBlobLib.DepthLabel, 1)) { imgDst.Zero(); blobs.Label(imgSrc, imgLabelData); CvBlob max = blobs[blobs.GreaterBlob()]; if (max == null) { return; } blobs.FilterByArea(max.Area, max.Area); blobs.FilterLabels(imgLabelData, imgDst); } }
public static IplImage test(IplImage target) { CvBlobs blobs = new CvBlobs(); IplImage lableImg = new IplImage(target.Size, CvBlobLib.DepthLabel, 1); IplImage retImg = new IplImage(target.Size, BitDepth.U8, 1); blobs.Label(target); CvBlob max = blobs.GreaterBlob(); if (max == null) { return(target); } blobs.FilterByArea(max.Area, max.Area); blobs.FilterLabels(retImg); return(retImg); }
private Rectangle GetBlackGameBoxRegion(Image <Gray, Byte> imgGray) { int nWid = imgGray.Width; int nHei = imgGray.Height; byte[,,] pData = imgGray.Data; for (int y = 0; y < nHei; y++) { for (int x = 0; x < nWid; x++) { byte c = pData[y, x, 0]; if (c > 5) { pData[y, x, 0] = 0; } else { pData[y, x, 0] = 255; } } } CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(imgGray, blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); if (blobs.Count < 1) { return(Rectangle.Empty); } //------------------------------- Rectangle rc = Rectangle.Empty; foreach (var pair in blobs) { CvBlob b = pair.Value; rc = b.BoundingBox; //CvInvoke.Rectangle(imgBgr, b.BoundingBox, new MCvScalar(255.0, 0, 0), 2); break; } return(rc); }
public static void CarbonPaper(int x1 = 100, int y1 = 300, int x2 = 1100, int y2 = 1600, ThresholdTypes thrType = ThresholdTypes.Binary, int thr = 128, int filterArea = 30) { // 1. convert to grayscale var matGray = Glb.matSrc.CvtColor(ColorConversionCodes.BGR2GRAY); // 2. roi crop Rect roi = new Rect(x1, y1, x2 - x1 + 1, y2 - y1 + 1); var matGrayDrawRoi = Glb.matSrc.Clone(); matGrayDrawRoi.Rectangle(roi, Scalar.Yellow); Glb.DrawMat0(matGrayDrawRoi); var matRoi = new Mat(matGray, roi); Glb.DrawHist0(matRoi); // 3. threshold var matThr = matRoi.Threshold(thr, 255, thrType); Glb.DrawMatAndHist1(matThr); // 4. blob with area filter CvBlobs blobs = new CvBlobs(); blobs.Label(matThr); blobs.FilterByArea(filterArea, int.MaxValue); // 5. display blob var matDsp = new Mat(matRoi.Rows, matRoi.Cols, MatType.CV_8UC3); matDsp.SetTo(Scalar.Black); blobs.RenderBlobs(matDsp, matDsp, RenderBlobsModes.Color); Glb.DrawMatAndHist2(matDsp); Console.WriteLine("blobs.cnt = {0}", blobs.Count); matGray.Dispose(); matGrayDrawRoi.Dispose(); matRoi.Dispose(); matThr.Dispose(); matDsp.Dispose(); }
public void 評価結果画像作成_debug(Mat 検査結果, Mat テンプレート, int[,] 正解座標, ref Mat color_debug) { Mat res_color = new Mat(new Size(検査結果.Width, 検査結果.Height), MatType.CV_8UC3, Scalar.All(0)); var temp_color = res_color.Clone(); CvBlobs blobs = new CvBlobs(検査結果); int score = 0; blobs.FilterByArea(Main.FilterByArea[0], Main.FilterByArea[1]); blobs.RenderBlobs(検査結果, res_color); Cv2.CvtColor(テンプレート, temp_color, ColorConversionCodes.GRAY2BGR); Cv2.Add(temp_color, res_color, color_debug); 点数計算_debug(blobs, 正解座標, ref color_debug, ref score); res_color.Dispose(); temp_color.Dispose(); blobs = null; }
protected override void processarImagem(bool mapeamento) { preencherImagemPlanoDeFundo(); preencherImagemBinariaSemPlanoDeFundo(); Mat imagemCinza = ConverterParaCinzas(mImagemBinariaSemPlanoDeFundo); if (mapeamento) { //Image<Bgr, byte> mCopiaMenorPlanoFundo = mCopiaImagemPlanoDeFundo.Resize(0.7, Inter.Area); Mat mCopiaMenorPlanoFundo = new Mat(); CvInvoke.Resize(mCopiaImagemPlanoDeFundo, mCopiaMenorPlanoFundo, mCopiaImagemPlanoDeFundo.Size, 0.7, 0.7); // Image<Gray, byte> mCopiaMenorImagemCinza = imagemCinza.Resize(0.7, Inter.Area); // Image<Gray, byte> mCopiaMenorImagemCinza = imagemCinza.Clone(); //imagemCinza = imagemCinza.Resize(0.7, Inter.Area); mJanelaCalibracao.PlanoDeFundo.Image = mCopiaMenorPlanoFundo; //mJanelaCalibracao.Objetos.Image = imagemCinza; } //mTracker.Process(mImagemColorida, ConverterParaCinzas(mImagemBinariaSemPlanoDeFundo)); mblobs = new CvBlobs(); mBlobDetector.Detect(imagemCinza.ToImage <Gray, byte>(), mblobs); mblobs.FilterByArea(100, int.MaxValue); }
/// <summary> /// Finds blobs with the specified min and max area. /// </summary> /// <param name="src">Source image</param> /// <param name="minArea">The minimum allowable area</param> /// <param name="maxArea">The maximum allowable area</param> /// <returns>List containing the X and Y coordinates of the found blobs</returns> public IEnumerable <PointF> FindBlobs(Bitmap src, uint minArea, uint maxArea) { if (src == null) { throw new ArgumentNullException(nameof(src), @"Source image cannot be null"); } using (var sourceImage = new Image <Gray, byte>(src)) using (var blobDetector = new CvBlobDetector( )) using (var blobs = new CvBlobs( )) { Image <Gray, byte> filteredSrc = null; try { // Binarize and invert the image so that // that blob detector can locate the dots. filteredSrc = sourceImage.ThresholdBinaryInv(new Gray(90), new Gray(255)); // Finds all blobs in the input image and // stores them in to the CvBlobs structure. blobDetector.Detect(filteredSrc, blobs); // Filter the blobs by area. The alignment dots // have an average area of roughly 3500 pixels. blobs.FilterByArea(( int )minArea, ( int )maxArea); // Return the centroids of each blob. return(blobs.Values.Select(b => new PointF(b.Centroid.X, b.Centroid.Y))); } finally { filteredSrc?.Dispose( ); } } }
/// <summary> /// Filter blobs by area. /// Those blobs whose areas are not in range will be erased from the input list of blobs. (cvFilterByArea) /// </summary> /// <param name="blobs">List of blobs.</param> /// <param name="minArea">Minimun area.</param> /// <param name="maxArea">Maximun area.</param> public static void FilterByArea(CvBlobs blobs, int minArea, int maxArea) { if (blobs == null) throw new ArgumentNullException(nameof(blobs)); blobs.FilterByArea(minArea, maxArea); }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); capture.Retrieve(frame, 0); Mat frame_crop = frame; Image <Hsv, Byte> currenthsvFrame = (frame.ToImage <Bgr, Byte>()).Convert <Hsv, Byte>(); Image <Gray, Byte> color_one = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_two = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_three = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_four = new Image <Gray, Byte>(frame.Width, frame.Height); /* * Color one is Red * Color two is Blue * Color three is Green * Color Four is Yellow * Green is in Right Index Finger * Blue is in Left Index Finger * Red in Right Thumb * Yelloe in Left Thumb */ Hsv hsv_min_color_one = new Hsv(0, 135, 110); //Hsv hsv_max_color_one = new Hsv(6, 255, 255); Hsv hsv_max_color_one = new Hsv(8, 255, 255); Hsv hsv_min_color_two = new Hsv(112, 53, 10); Hsv hsv_max_color_two = new Hsv(119, 255, 255); /* * Hsv hsv_min_color_three = new Hsv(68, 59, 80); * Hsv hsv_max_color_three = new Hsv(85, 255, 255); * Hsv hsv_min_color_four = new Hsv(20, 165, 165); * Hsv hsv_max_color_four = new Hsv(36, 255, 255); */ Hsv hsv_min_color_three = new Hsv(83, 109, 105); Hsv hsv_max_color_three = new Hsv(109, 255, 255); Hsv hsv_min_color_four = new Hsv(18, 155, 155); Hsv hsv_max_color_four = new Hsv(35, 255, 255); color_one = currenthsvFrame.InRange(hsv_min_color_one, hsv_max_color_one); color_two = currenthsvFrame.InRange(hsv_min_color_two, hsv_max_color_two); color_three = currenthsvFrame.InRange(hsv_min_color_three, hsv_max_color_three); color_four = currenthsvFrame.InRange(hsv_min_color_four, hsv_max_color_four); //Blob detection #region Blob Detection //Color one detection Image <Bgr, Byte> smoothedFrame_cone = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_one, smoothedFrame_cone, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cone = new Mat(); fgDetector.Apply(smoothedFrame_cone, forgroundMask_cone); CvBlobs blobs_color_one = new CvBlobs(); blobDetector.Detect(forgroundMask_cone.ToImage <Gray, byte>(), blobs_color_one); blobs_color_one.FilterByArea(minarea, maxarea); //Color two Blob Detection Image <Bgr, Byte> smoothedFrame_ctwo = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_two, smoothedFrame_ctwo, new Size(3, 3), 1); //filter out noises Mat forgroundMask_ctwo = new Mat(); fgDetector.Apply(smoothedFrame_ctwo, forgroundMask_ctwo); CvBlobs blobs_color_two = new CvBlobs(); blobDetector.Detect(forgroundMask_ctwo.ToImage <Gray, byte>(), blobs_color_two); blobs_color_two.FilterByArea(minarea, maxarea); //Color three blob detection Image <Bgr, Byte> smoothedFrame_cthree = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_three, smoothedFrame_cthree, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cthree = new Mat(); fgDetector.Apply(smoothedFrame_cthree, forgroundMask_cthree); CvBlobs blobs_color_three = new CvBlobs(); blobDetector.Detect(forgroundMask_cthree.ToImage <Gray, byte>(), blobs_color_three); blobs_color_three.FilterByArea(minarea, maxarea); //Color four detection Image <Bgr, Byte> smoothedFrame_cfour = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_four, smoothedFrame_cfour, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cfour = new Mat(); fgDetector.Apply(smoothedFrame_cfour, forgroundMask_cfour); CvBlobs blobs_color_four = new CvBlobs(); blobDetector.Detect(forgroundMask_cfour.ToImage <Gray, byte>(), blobs_color_four); blobs_color_four.FilterByArea(minarea, maxarea); //Makers Interpretition float[] cent_color_one = new float[2]; float[] cent_color_two = new float[2]; float[] cent_color_three = new float[2]; float[] cent_color_four = new float[2]; //Centroids of Markers foreach (var pair in blobs_color_one) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_one[0] = b.Centroid.X; cent_color_one[1] = b.Centroid.Y; } foreach (var pair in blobs_color_two) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_two[0] = b.Centroid.X; cent_color_two[1] = b.Centroid.Y; } foreach (var pair in blobs_color_three) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_three[0] = b.Centroid.X; cent_color_three[1] = b.Centroid.Y; } foreach (var pair in blobs_color_four) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_four[0] = b.Centroid.X; cent_color_four[1] = b.Centroid.Y; } #endregion #region Calculation int click_flag = 0; int[] x_cor = new int[4]; int[] y_cor = new int[4]; if (blobs_color_one.Count != 0 && blobs_color_two.Count != 0 && blobs_color_three.Count != 0 && blobs_color_four.Count != 0) { foreach (var pair in blobs_color_one) { CvBlob b = pair.Value; foreach (var pairr in blobs_color_two) { CvBlob c = pairr.Value; if ((b.Centroid.X - c.Centroid.X) * (b.Centroid.X - c.Centroid.X) + (b.Centroid.Y - c.Centroid.Y) * (b.Centroid.Y - c.Centroid.Y) <= 5000) { click_flag = 1; x_cor[0] = ((int)b.Centroid.X); x_cor[1] = ((int)c.Centroid.X); y_cor[0] = ((int)b.Centroid.Y); y_cor[1] = ((int)c.Centroid.Y); break; } } if (click_flag == 1) { break; } } if (click_flag == 1) { click_flag = 0; foreach (var pair in blobs_color_three) { CvBlob b = pair.Value; foreach (var pairr in blobs_color_four) { CvBlob c = pairr.Value; if ((b.Centroid.X - c.Centroid.X) * (b.Centroid.X - c.Centroid.X) + (b.Centroid.Y - c.Centroid.Y) * (b.Centroid.Y - c.Centroid.Y) <= 10000) { click_flag = 1; x_cor[2] = ((int)b.Centroid.X); x_cor[3] = ((int)c.Centroid.X); y_cor[2] = ((int)b.Centroid.Y); y_cor[3] = ((int)c.Centroid.Y); break; } } if (click_flag == 1) { break; } } } } if (click_flag == 1) { //MessageBox.Show("clicked"); SoundPlayer simpleSound = new SoundPlayer(@"click_sound.wav"); simpleSound.Play(); Array.Sort(x_cor); Array.Sort(y_cor); Bitmap ori_image = frame_crop.ToImage <Bgr, Byte>().ToBitmap(); Bitmap crop_image = new Bitmap(x_cor[2] - x_cor[1], y_cor[2] - y_cor[1]); Graphics g = Graphics.FromImage(crop_image); g.DrawImage(ori_image, -x_cor[1], -y_cor[1]); //string name = string.Format("SAP_{0:ddMMyyyy_hh_mm_ss}.jpg",DateTime.Now); frame.Save(@"C:\Users\Shubhankar\Pictures\Camera Roll\" + string.Format("SAP_{0:ddMMyyyy_hh_mm_ss}_original.jpg", DateTime.Now)); crop_image.Save(@"C:\Users\Shubhankar\Pictures\Camera Roll\" + string.Format("SAP_{0:ddMMyyyy_hh_mm_ss}.jpg", DateTime.Now)); Thread.Sleep(500); } #endregion #region Click Gesture #endregion captureImageBox.Image = frame; grayscaleImageBox.Image = color_one; smoothedGrayscaleImageBox.Image = color_two; cannyImageBox.Image = color_three; Color4ImageBox.Image = color_four; }
protected override void processarImagem(bool mapeamento) { preencherImagemPlanoDeFundo(); preencherImagemBinariaSemPlanoDeFundo(); Mat imagemCinza = ConverterParaCinzas(mImagemBinariaSemPlanoDeFundo); if (mapeamento) { //Image<Bgr, byte> mCopiaMenorPlanoFundo = mCopiaImagemPlanoDeFundo.Resize(0.7, Inter.Area); Mat mCopiaMenorPlanoFundo = new Mat(); CvInvoke.Resize(mCopiaImagemPlanoDeFundo, mCopiaMenorPlanoFundo, mCopiaImagemPlanoDeFundo.Size, 0.7, 0.7); // Image<Gray, byte> mCopiaMenorImagemCinza = imagemCinza.Resize(0.7, Inter.Area); // Image<Gray, byte> mCopiaMenorImagemCinza = imagemCinza.Clone(); //imagemCinza = imagemCinza.Resize(0.7, Inter.Area); mJanelaCalibracao.PlanoDeFundo.Image = mCopiaMenorPlanoFundo; //mJanelaCalibracao.Objetos.Image = imagemCinza; } //mTracker.Process(mImagemColorida, ConverterParaCinzas(mImagemBinariaSemPlanoDeFundo)); mblobs = new CvBlobs(); mBlobDetector.Detect(imagemCinza.ToImage<Gray, byte>(), mblobs); mblobs.FilterByArea(100, int.MaxValue); }
//private void CountBlobs(WriteableBitmap writableBitmap) private void CountBlobs(WriteableBitmap writeableBitmap) { Mat imgIR = writeableBitmap.ToMat();// CV_16UC1 imgIR.ConvertTo(imgIR, MatType.CV_8UC1, 1.0 / 256.0); Mat imgIRbin = new Mat(imgIR.Rows, imgIR.Cols, MatType.CV_8UC1); Cv2.Threshold(imgIR, imgIRbin, 225, 255, ThresholdTypes.Binary); //imgIR.SaveImage("D:/imgIR.png"); CvBlobs blobs = new CvBlobs(imgIRbin); blobs.FilterByArea(30, 2000); //label_sample.Content = blobs.Count().ToString(); // Canvasに追加 canvas_blob.Children.Clear(); list_arr_index.Clear(); //label_sample.Content = blobs.Count.ToString(); if (blobs.Count > 0) { foreach (KeyValuePair <int, CvBlob> item in blobs) { int labelValue = item.Key; CvBlob blob = item.Value; Rectangle blob_rect = new Rectangle { Width = blob.Rect.Width, Height = blob.Rect.Height, Stroke = Brushes.Red, StrokeThickness = 2 }; canvas_blob.Children.Add(blob_rect); Canvas.SetLeft(blob_rect, blob.Rect.Left); Canvas.SetTop(blob_rect, blob.Rect.Top); } // blobsから各blobのindexを取り出す//////////////////// // blobsからLabelsに変換 LabelData labelBlobs = blobs.Labels; // Labelsを1dデータに変換 int[] label_blobs_vector = new int[labelBlobs.Rows * labelBlobs.Cols]; int ii = 0; //for (int i_col = 0; i_col< labelBlobs.Cols; i_col++) for (int i_row = 0; i_row < labelBlobs.Rows; i_row++) { //for (int i_row = 0; i_row<labelBlobs.Rows;i_row++) for (int i_col = 0; i_col < labelBlobs.Cols; i_col++) { label_blobs_vector[ii] = labelBlobs[i_row, i_col]; ii += 1; } } // // Labelsからblob.Valueに一致するindexの配列を作成 // list_arr_indexに格納する // int count_blobs = blobs.Count; //label_sample.Content = list_arr_index.Count().ToString(); foreach (KeyValuePair <int, CvBlob> item in blobs) { int count_blobs = blobs.Count(); int labelvalue = item.Key; // Labelsからlabelvalueに一致するindex配列を作成 int area_blob = item.Value.Area;// int[] arr_idx_label = new int[area_blob]; ii = 0; for (int i_lab = 0; i_lab < label_blobs_vector.Length; i_lab++) { if (label_blobs_vector[i_lab] == labelvalue) { arr_idx_label[ii] = i_lab; ii += 1; } } //int[] arr_idx_label = label_blobs_vector.FindIndex<int>(label => label == labelvalue); list_arr_index.Add(arr_idx_label); } //label_sample.Content = list_arr_index.Count().ToString(); Console.WriteLine("hoge");//ブレイクポイント用 } }
private void ProcessFrameMP4(object sender, EventArgs e) { px = new Point(px1, px2); py = new Point(py1, py2); if (cap != null) { cap.Retrieve(frame, 0); currentframe = frame.ToImage <Bgr, byte>(); Mat mask = new Mat(); sub.Apply(currentframe, mask); Mat kernelOp = new Mat(); Mat kernelCl = new Mat(); Mat kernelEl = new Mat(); Mat Dilate = new Mat(); kernelOp = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1)); kernelCl = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(11, 11), new Point(-1, -1)); var element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1)); CvInvoke.GaussianBlur(mask, mask, new Size(13, 13), 1.5); CvInvoke.MorphologyEx(mask, mask, MorphOp.Open, kernelOp, new Point(-1, -1), 1, BorderType.Default, new MCvScalar()); CvInvoke.MorphologyEx(mask, mask, MorphOp.Close, kernelCl, new Point(-1, -1), 1, BorderType.Default, new MCvScalar()); CvInvoke.Dilate(mask, mask, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar)); CvInvoke.Threshold(mask, mask, 127, 255, ThresholdType.Binary); detect.Detect(mask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(500, 20000); tracks.Update(blobs, 20.0, 1, 10); Image <Bgr, byte> result = new Image <Bgr, byte>(currentframe.Size); using (Image <Gray, Byte> blobMask = detect.DrawBlobsMask(blobs)) { frame.CopyTo(result, blobMask); } CvInvoke.Line(currentframe, px, py, new MCvScalar(0, 0, 255), 2); foreach (KeyValuePair <uint, CvTrack> pair in tracks) { if (pair.Value.Inactive == 0) //only draw the active tracks. { int cx = Convert.ToInt32(pair.Value.Centroid.X); int cy = Convert.ToInt32(pair.Value.Centroid.Y); CvBlob b = blobs[pair.Value.BlobLabel]; Bgr color = detect.MeanColor(b, frame.ToImage <Bgr, Byte>()); result.Draw(pair.Key.ToString(), pair.Value.BoundingBox.Location, FontFace.HersheySimplex, 0.5, color); currentframe.Draw(pair.Value.BoundingBox, new Bgr(0, 0, 255), 1); Point[] contour = b.GetContour(); //result.Draw(contour, new Bgr(0, 0, 255), 1); Point center = new Point(cx, cy); CvInvoke.Circle(currentframe, center, 1, new MCvScalar(255, 0, 0), 2); if (center.Y <= px.Y + 10 && center.Y > py.Y - 10 && center.X <= py.X && center.X > px.X) { if (pair.Key.ToString() != "") { if (!carid.Contains(pair.Key.ToString())) { carid.Add(pair.Key.ToString()); if (carid.Count == 20) { carid.Clear(); } carcount++; if (carcount != countBrd + 1 && carcount != countBrd + 2 && carcount != countBrd + 3 && carcount != countBrd + 4 && carcount != countBrd + 5) { //Json Logger Logs log = new Logs() { Date = DateTime.Now.ToString(), Id = carcount }; string strResultJson = JsonConvert.SerializeObject(log); File.AppendAllText(cfg.LogSavePath + @"\log.json", strResultJson + Environment.NewLine); } } } CvInvoke.Line(currentframe, px, py, new MCvScalar(0, 255, 0), 2); } } } CvInvoke.PutText(currentframe, "Count :" + carcount.ToString(), new Point(10, 25), FontFace.HersheySimplex, 1, new MCvScalar(255, 0, 255), 2, LineType.AntiAlias); //Frame Rate double framerate = cap.GetCaptureProperty(CapProp.Fps); Thread.Sleep((int)(1000.0 / framerate)); if (firstCount == false && carcount == countBrd) { Image_Name = cfg.PhotoSavePath + @"\" + "Car" + DateTime.Now.ToString("dd-mm-yyyy-hh-mm-ss") + ".jpg"; currentframe.Save(Image_Name); sendMail = new Thread(SendMail); sendMail.Start(); firstCount = true; } if (isRecording) { if (firstFrameTime != null) { writer.WriteVideoFrame(currentframe.Bitmap, DateTime.Now - firstFrameTime.Value); } else { writer.WriteVideoFrame(currentframe.Bitmap); firstFrameTime = DateTime.Now; } } //pictureBox1.SizeMode = PictureBoxSizeMode.StretchImage; pictureBox1.Image = currentframe.Bitmap; } }
static Image <Bgr, Byte> _doDetect(string fileName) { Debug.WriteLine($"Processing: {fileName}"); var frameOrig = new Image <Bgr, Byte>(fileName); var frame = frameOrig.Convert <Gray, Byte>(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(19, 19), 7); //filter out noises var smoothedImage = smoothedFrame.ToImage <Gray, Byte>(); if (_original == null) { _original = smoothedImage; return(null); } var frameDelta = smoothedImage.AbsDiff(_original); var thresh = frameDelta.ThresholdBinary(new Gray(25), new Gray(255)); thresh = thresh.Dilate(2); //File.WriteAllBytes(@"C:\Temp\imagery\aathreh.jpg", thresh.ToJpegData(95)); _original = smoothedImage; //var cnts = new VectorOfVectorOfPoint(); //CvInvoke.FindContours(thresh.Copy(), cnts, null, RetrType.External, // ChainApproxMethod.ChainApproxSimple); //var goer = false; //for (var i = 0; i < cnts.Size; i++) //{ // var c = cnts[i]; // if (CvInvoke.ContourArea(c) < 500) // { // continue; // } // goer = true; // //Debug.WriteLine(CvInvoke.ContourArea(c)); // //var rect = CvInvoke.BoundingRectangle(c); // //CvInvoke.Rectangle(frame, rect, new MCvScalar(255.0, 255.0, 255.0), 2); //} ////// File.WriteAllBytes(@"C:\Temp\imagery\aaframes.jpg", frame.ToJpegData(95)); // return goer; //Mat forgroundMask = new Mat(); //_fgDetector.Apply(smoothedFrame, forgroundMask); CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(thresh, blobs); blobs.FilterByArea(1600, int.MaxValue); float scale = (frame.Width + frame.Width) / 2.0f; //File.WriteAllBytes(@"C:\Temp\imagery\aaout.jpg", smoothedImage.ToJpegData(95)); _tracker.Update(blobs, scale, 5, 5); foreach (var pair in _tracker) { CvTrack b = pair.Value; CvInvoke.Rectangle(frameOrig, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); CvInvoke.PutText(frameOrig, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); } // File.WriteAllBytes(@"C:\Temp\imagery\aaframes.jpg", frame.ToJpegData(95)); // File.WriteAllBytes(@"C:\Temp\imagery\aablur.jpg", smoothedFrame.ToImage<Gray, byte>().ToJpegData(95)); return(_tracker.Count > 0 ? frameOrig : null); //var cnts = new VectorOfVectorOfPoint(); //CvInvoke.FindContours(thresh.Copy(), cnts, null, RetrType.External, // ChainApproxMethod.ChainApproxSimple); //for (var i = 0; i < cnts.Size; i++) //{ // var c = cnts[i]; // Debug.WriteLine(CvInvoke.ContourArea(c)); // var rect = CvInvoke.BoundingRectangle(c); // CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); //} //Mat smoothedFrame = new Mat(); //CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(23, 23), 5); //filter out noises ////frame._SmoothGaussian(3); //Mat forgroundMask = new Mat(); //_fgDetector.Apply(smoothedFrame, forgroundMask); //CvBlobs blobs = new CvBlobs(); //_blobDetector.Detect(forgroundMask.ToImage<Gray, byte>(), blobs); //blobs.FilterByArea(100, int.MaxValue); //float scale = (frame.Width + frame.Width) / 2.0f; //File.WriteAllBytes(@"C:\Temp\imagery\aaout.jpg", forgroundMask.ToImage<Gray, byte>().ToJpegData(95)); //_tracker.Update(blobs, scale, 5, 5); //foreach (var pair in _tracker) //{ // CvTrack b = pair.Value; // CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); // CvInvoke.PutText(frame, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); //} //File.WriteAllBytes(@"C:\Temp\imagery\aaframes.jpg", frame.ToJpegData(95)); //File.WriteAllBytes(@"C:\Temp\imagery\aablur.jpg", smoothedFrame.ToImage<Gray, byte>().ToJpegData(95)); //Console.WriteLine($" >>>> Tracker: {_tracker.Count}, Blobs: {blobs.Count}"); //foreach (var pair in _tracker) //{ // CvTrack b = pair.Value; // CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); // CvInvoke.PutText(frame, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); //} //imageBox1.Image = frame; //imageBox2.Image = forgroundMask; }
// 別スレッド処理(キャプチャー) private void worker_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker bw = (BackgroundWorker)sender; Stopwatch sw = new Stopwatch(); string str; id = 0; //PID送信用UDP //バインドするローカルポート番号 // FSI_PID_DATA pid_data = new FSI_PID_DATA(); int localPort = mmFsiUdpPortMT3PV; System.Net.Sockets.UdpClient udpc2 = null;; /* try * { * udpc2 = new System.Net.Sockets.UdpClient(localPort); * * } * catch (Exception ex) * { * //匿名デリゲートで表示する * this.Invoke(new dlgSetString(ShowRText), new object[] { richTextBox1, ex.ToString() }); * } */ //videoInputオブジェクト const int DeviceID = 0; // 0; // 3 (pro), 4(piccolo) 7(DMK) const int CaptureFps = 30; // 30 int interval = (int)(1000 / CaptureFps / 10); const int CaptureWidth = 640; const int CaptureHeight = 480; // 画像保存枚数 int mmFsiPostRec = 60; int save_counter = mmFsiPostRec; using (VideoInput vi = new VideoInput()) { vi.SetIdealFramerate(DeviceID, CaptureFps); vi.SetupDevice(DeviceID, CaptureWidth, CaptureHeight); int width = vi.GetWidth(DeviceID); int height = vi.GetHeight(DeviceID); using (IplImage img = new IplImage(width, height, BitDepth.U8, 3)) using (IplImage img_dark8 = Cv.LoadImage(@"C:\piccolo\MT3V_dark.bmp", LoadMode.GrayScale)) //using (IplImage img_dark = new IplImage(width, height, BitDepth.U8, 3)) using (IplImage img_mono = new IplImage(width, height, BitDepth.U8, 1)) using (IplImage img2 = new IplImage(width, height, BitDepth.U8, 1)) // using (Bitmap bitmap = new Bitmap(width, height, PixelFormat.Format24bppRgb)) using (CvFont font = new CvFont(FontFace.HersheyComplex, 0.45, 0.45)) //using (CvWindow window0 = new CvWindow("FIFO0", WindowMode.AutoSize)) { //this.Size = new Size(width + 12, height + 148); double min_val, max_val; CvPoint min_loc, max_loc; int size = 15; int size2x = size / 2; int size2y = size / 2; int crop = 20; double sigma = 3; long elapsed0 = 0, elapsed1 = 0; double framerate0 = 0, framerate1 = 0; double alfa_fr = 0.99; sw.Start(); while (bw.CancellationPending == false) { if (vi.IsFrameNew(DeviceID)) { DateTime dn = DateTime.Now; //取得時刻 vi.GetPixels(DeviceID, img.ImageData, false, true); // 画面time表示 str = String.Format("Wide ID:{0:D2} ", id) + dn.ToString("yyyyMMdd_HHmmss_fff");// +String.Format(" ({0,000:F2},{1,000:F2}) ({2,000:0},{3,000:0})({4,0:F1})", gx, gy, max_loc.X, max_loc.Y, max_val); img.PutText(str, new CvPoint(10, 475), font, new CvColor(0, 100, 40)); Cv.CvtColor(img, img_mono, ColorConversion.BgrToGray); Cv.Sub(img_mono, img_dark8, imgdata.img); // dark減算 imgdata.id = ++id; imgdata.t = dn; imgdata.ImgSaveFlag = !(ImgSaveFlag != 0); //int->bool変換 if (fifo.Count == MaxFrame - 1) { fifo.EraseLast(); } fifo.InsertFirst(imgdata); #region 位置検出1//MinMaxLoc /*// 位置検出 * Cv.Smooth(imgdata.img, img2, SmoothType.Gaussian, size, 0, sigma, 0); * CvRect rect; * if (PvMode == MyDETECT) * { * rect = new CvRect( (int)(gx+0.5) - size, (int)(gy+0.5) - size, size*2, size*2); * Cv.SetImageROI(img2, rect); * Cv.MinMaxLoc(img2, out min_val, out max_val, out min_loc, out max_loc, null); * Cv.ResetImageROI(img2); * max_loc.X += (int)(gx + 0.5) - size; // 基準点が(1,1)のため+1 * max_loc.Y += (int)(gy + 0.5) - size; * } * else * { * rect = new CvRect(crop, crop, width - (crop + crop), height - (crop + crop)); * Cv.SetImageROI(img2, rect); * Cv.MinMaxLoc(img2, out min_val, out max_val, out min_loc, out max_loc, null); * Cv.ResetImageROI(img2); * max_loc.X += crop; // 基準点が(1,1)のため+1 * max_loc.Y += crop; * } * window0.ShowImage(img2); * * double m00, m10, m01; * size2x = size2y = size / 2; * if (max_loc.X - size2x < 0) size2x = max_loc.X; * if (max_loc.Y - size2y < 0) size2y = max_loc.Y; * if (max_loc.X + size2x >= width ) size2x = width -max_loc.X -1; * if (max_loc.Y + size2y >= height) size2y = height -max_loc.Y -1; * rect = new CvRect(max_loc.X - size2x, max_loc.Y - size2y, size, size); * CvMoments moments; * Cv.SetImageROI(img2, rect); * Cv.Moments(img2, out moments, false); * Cv.ResetImageROI(img2); * m00 = Cv.GetSpatialMoment(moments, 0, 0); * m10 = Cv.GetSpatialMoment(moments, 1, 0); * m01 = Cv.GetSpatialMoment(moments, 0, 1); * gx = max_loc.X - size2x + m10 / m00; * gy = max_loc.Y - size2y + m01 / m00; */ #endregion #region 位置検出2 //Blob Cv.Threshold(imgdata.img, img2, threshold_blob, 255, ThresholdType.Binary); //2ms blobs.Label(img2, imgLabel); //1.4ms max_label = blobs.GreaterBlob(); elapsed1 = sw.ElapsedTicks; //1.3ms if (blobs.Count > 1 && gx >= 0) { uint min_area = (uint)(threshold_min_area * blobs[max_label].Area); blobs.FilterByArea(min_area, uint.MaxValue); //0.001ms // 最適blobの選定(area大 かつ 前回からの距離小) double x = blobs[max_label].Centroid.X; double y = blobs[max_label].Centroid.Y; uint area = blobs[max_label].Area; //CvRect rect; distance_min = ((x - gx) * (x - gx) + (y - gy) * (y - gy)); //Math.Sqrt() foreach (var item in blobs) { //Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, item.Value.Centroid, item.Value.Area); x = item.Value.Centroid.X; y = item.Value.Centroid.Y; //rect = item.Value.Rect; distance = ((x - gx) * (x - gx) + (y - gy) * (y - gy)); //将来はマハラノビス距離 if (distance < distance_min) { d_val = (item.Value.Area) / max_area; if (distance <= 25) //近距離(5pix) { if (d_val >= 0.4) //&& d_val <= 1.2) { max_label = item.Key; distance_min = distance; } } else { if (d_val >= 0.8 && d_val <= 1.5) { max_label = item.Key; distance_min = distance; } } } //w.WriteLine("{0} {1} {2} {3} {4}", dis, dv, i, item.Key, item.Value.Area); } //gx = x; gy = y; max_val = area; } if (max_label > 0) { maxBlob = blobs[max_label]; max_centroid = maxBlob.Centroid; gx = max_centroid.X; gy = max_centroid.Y; max_area = maxBlob.Area; if (this.States == SAVE) { Pid_Data_Send(); timerSavePostTime.Stop(); timerSaveMainTime.Stop(); timerSaveMainTime.Start(); } } else { gx = gy = 0; max_area = 0; } #endregion // 画面表示 str = String.Format("ID:{0:D2} ", id) + dn.ToString("yyyyMMdd_HHmmss_fff") + String.Format(" ({0,000:F2},{1,000:F2}) ({2,000:0},{3,000:0})({4,0:F1})", gx, gy, xoa, yoa, max_area); if (imgdata.ImgSaveFlag) { str += " True"; } img.PutText(str, new CvPoint(10, 20), font, new CvColor(0, 255, 100)); img.Circle(new CvPoint((int)gx, (int)gy), 10, new CvColor(255, 255, 100)); bw.ReportProgress(0, img); // 処理速度 elapsed0 = sw.ElapsedTicks - elapsed1; // 1frameのticks elapsed1 = sw.ElapsedTicks; framerate0 = alfa_fr * framerate1 + (1 - alfa_fr) * (Stopwatch.Frequency / (double)elapsed0); framerate1 = framerate0; str = String.Format("fr time = {0}({1}){2:F1}", sw.Elapsed, id, framerate0); //," ", sw.ElapsedMilliseconds); //匿名デリゲートで現在の時間をラベルに表示する this.Invoke(new dlgSetString(ShowText), new object[] { textBox1, str }); //img.ToBitmap(bitmap); //pictureBox1.Refresh(); } Application.DoEvents(); Thread.Sleep(interval); } this.States = STOP; this.Invoke(new dlgSetColor(SetColor), new object[] { ObsStart, this.States }); this.Invoke(new dlgSetColor(SetColor), new object[] { ObsEndButton, this.States }); vi.StopDevice(DeviceID); //udpc2.Close(); } } }
static void Main2(string[] args) { CvCapture videoCaprure = null; try { videoCaprure = CvCapture.FromFile(FILE_NAME); } catch (Exception e) { Console.WriteLine("Unable to open file {0}", FILE_NAME); Console.WriteLine(e.ToString()); Console.ReadKey(); } Cv.NamedWindow(MAIN_WINDOW_NAME, WindowMode.AutoSize); double rate = videoCaprure.GetCaptureProperty(CvConst.CV_CAP_PROP_FPS); int delay = (int) (1000/ rate); IplImage previousOriginalFrame = null; int counter = 0; while(true) { var currentOgirinalFrame = videoCaprure.QueryFrame(); if (currentOgirinalFrame == null) return; if (previousOriginalFrame == null) { previousOriginalFrame = Cv.CreateImage(currentOgirinalFrame.Size, currentOgirinalFrame.Depth, currentOgirinalFrame.NChannels); currentOgirinalFrame.Copy(previousOriginalFrame); continue; } counter++; if (counter % 3 != 0) continue; //algo IplImage currentGrayImage = Cv.CreateImage(currentOgirinalFrame.Size, currentOgirinalFrame.Depth, 1); Cv.CvtColor(currentOgirinalFrame, currentGrayImage, ColorConversion.RgbToGray); IplImage previousGrayImage = Cv.CreateImage(previousOriginalFrame.Size, previousOriginalFrame.Depth, 1); Cv.CvtColor(previousOriginalFrame, previousGrayImage, ColorConversion.RgbToGray); IplImage differenceBetweenFrames = Cv.CreateImage(currentGrayImage.Size, currentGrayImage.Depth, 1); Cv.AbsDiff(previousGrayImage, currentGrayImage, differenceBetweenFrames); Cv.Threshold(differenceBetweenFrames, differenceBetweenFrames, 10, 255, ThresholdType.Binary); Cv.Erode(differenceBetweenFrames, differenceBetweenFrames); //finding blobs CvBlobs blobs = new CvBlobs(differenceBetweenFrames); blobs.FilterByArea(300, 10000); //blobs.Label(differenceBetweenFrames); var currentFrameWithRedRects = Cv.CreateImage(currentOgirinalFrame.Size, currentOgirinalFrame.Depth, currentOgirinalFrame.NChannels); currentOgirinalFrame.Copy(currentFrameWithRedRects); foreach (var cvBlob in blobs) { Cv.Rectangle(currentFrameWithRedRects, cvBlob.Value.Rect, CvColor.Red, 4); } Console.WriteLine(blobs.Count); Cv.ShowImage(MAIN_WINDOW_NAME, currentFrameWithRedRects); Cv.ShowImage("Result", differenceBetweenFrames); Cv.WaitKey(delay * 4); currentOgirinalFrame.Copy(previousOriginalFrame); } }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); capture.Retrieve(frame, 0); Image <Hsv, Byte> currenthsvFrame = (frame.ToImage <Bgr, Byte>()).Convert <Hsv, Byte>(); Image <Gray, Byte> color_one = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_two = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_three = new Image <Gray, Byte>(frame.Width, frame.Height); Image <Gray, Byte> color_four = new Image <Gray, Byte>(frame.Width, frame.Height); /* * Color one is Red * Color two is Blue * Color three is Green * Color Four is Yellow * Green is in Right Index Finger * Blue is in Left Index Finger * Red in Right Thumb * Yelloe in Left Thumb */ /* Hsv hsv_min_color_one = new Hsv(0, 135, 110); * Hsv hsv_max_color_one = new Hsv(6, 255, 255); * Hsv hsv_min_color_two = new Hsv(112, 53, 10); * Hsv hsv_max_color_two = new Hsv(119, 255, 255); * Hsv hsv_min_color_three = new Hsv(68, 59, 80); * Hsv hsv_max_color_three = new Hsv(85, 255, 255); * Hsv hsv_min_color_four = new Hsv(20, 165, 165); * Hsv hsv_max_color_four = new Hsv(36, 255, 255);*/ Hsv hsv_min_color_one = new Hsv(0, 135, 50); //Hsv hsv_max_color_one = new Hsv(6, 255, 255); Hsv hsv_max_color_one = new Hsv(8, 255, 255); Hsv hsv_min_color_two = new Hsv(112, 53, 10); Hsv hsv_max_color_two = new Hsv(119, 255, 255); /* * Hsv hsv_min_color_three = new Hsv(68, 59, 80); * Hsv hsv_max_color_three = new Hsv(85, 255, 255); * Hsv hsv_min_color_four = new Hsv(20, 165, 165); * Hsv hsv_max_color_four = new Hsv(36, 255, 255); */ Hsv hsv_min_color_three = new Hsv(65, 70, 0); Hsv hsv_max_color_three = new Hsv(109, 255, 255); Hsv hsv_min_color_four = new Hsv(18, 155, 155); Hsv hsv_max_color_four = new Hsv(35, 255, 255); color_one = currenthsvFrame.InRange(hsv_min_color_one, hsv_max_color_one); color_two = currenthsvFrame.InRange(hsv_min_color_two, hsv_max_color_two); color_three = currenthsvFrame.InRange(hsv_min_color_three, hsv_max_color_three); color_four = currenthsvFrame.InRange(hsv_min_color_four, hsv_max_color_four); //Blob detection #region Blob Detection //Color one detection Image <Bgr, Byte> smoothedFrame_cone = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_one, smoothedFrame_cone, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cone = new Mat(); fgDetector.Apply(smoothedFrame_cone, forgroundMask_cone); CvBlobs blobs_color_one = new CvBlobs(); blobDetector.Detect(forgroundMask_cone.ToImage <Gray, byte>(), blobs_color_one); blobs_color_one.FilterByArea(minarea, maxarea); //Color two Blob Detection Image <Bgr, Byte> smoothedFrame_ctwo = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_two, smoothedFrame_ctwo, new Size(3, 3), 1); //filter out noises Mat forgroundMask_ctwo = new Mat(); fgDetector.Apply(smoothedFrame_ctwo, forgroundMask_ctwo); CvBlobs blobs_color_two = new CvBlobs(); blobDetector.Detect(forgroundMask_ctwo.ToImage <Gray, byte>(), blobs_color_two); blobs_color_two.FilterByArea(minarea, maxarea); //Color three blob detection Image <Bgr, Byte> smoothedFrame_cthree = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_three, smoothedFrame_cthree, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cthree = new Mat(); fgDetector.Apply(smoothedFrame_cthree, forgroundMask_cthree); CvBlobs blobs_color_three = new CvBlobs(); blobDetector.Detect(forgroundMask_cthree.ToImage <Gray, byte>(), blobs_color_three); blobs_color_three.FilterByArea(minarea, maxarea); //Color four detection Image <Bgr, Byte> smoothedFrame_cfour = new Image <Bgr, byte>(currenthsvFrame.Size); CvInvoke.GaussianBlur(color_four, smoothedFrame_cfour, new Size(3, 3), 1); //filter out noises Mat forgroundMask_cfour = new Mat(); fgDetector.Apply(smoothedFrame_cfour, forgroundMask_cfour); CvBlobs blobs_color_four = new CvBlobs(); blobDetector.Detect(forgroundMask_cfour.ToImage <Gray, byte>(), blobs_color_four); blobs_color_four.FilterByArea(minarea, maxarea); #endregion //Makers Interpretition float[] cent_color_one = new float[2]; float[] cent_color_two = new float[2]; float[] cent_color_three = new float[2]; float[] cent_color_four = new float[2]; cent_color_one[0] = 0; cent_color_one[1] = 0; cent_color_two[0] = 0; cent_color_two[1] = 0; cent_color_three[0] = green_history_x; cent_color_three[1] = green_history_y; cent_color_four[0] = 0; cent_color_four[1] = 0; //Corsor control with Green Marker if (blobs_color_three.Count == 1 || mouseflag != 0) { foreach (var pair in blobs_color_three) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cursor_history_x.Enqueue((int)b.Centroid.X); cursor_history_y.Enqueue((int)b.Centroid.Y); cursor_history_x.Dequeue(); cursor_history_y.Dequeue(); cent_color_three[0] = (int)b.Centroid.X; cent_color_three[1] = (int)b.Centroid.Y; /*int temp_sum = 0; * int[] temp = cursor_history_x.ToArray(); * for (int i = 0; i < queue_cursor_length; i++) * temp_sum += temp[i]; * cent_color_three[0] = temp_sum / queue_cursor_length; * * temp_sum = 0; * temp = cursor_history_y.ToArray(); * for (int i = 0; i < queue_cursor_length; i++) * temp_sum += temp[i]; * cent_color_three[1] = temp_sum / queue_cursor_length; * * green_history_x = (int)cent_color_three[0]; * green_history_y = (int)cent_color_three[1];*/ } //Cursor Movement Controlled //Primary Screem // if (Screen.AllScreens.Length == 1) { //Cursor.Position = new Point(Screen.PrimaryScreen.Bounds.Width - (int)(cursor_mul * (int)cent_color_three[0] * Screen.PrimaryScreen.Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_color_three[1]) * Screen.PrimaryScreen.Bounds.Height / capture.Height); Cursor.Position = new Point((int)((cursor_mul_x * (int)cent_color_three[0]) * (Screen.PrimaryScreen.Bounds.Width) / capture.Width) + cursor_add_x, (((int)cursor_mul_y * (int)cent_color_three[1]) * Screen.PrimaryScreen.Bounds.Height / capture.Height) + cursor_add_y); //mouse_event(MOUSEEVENTF_MOVE, ( (-(int)cent_color_three[0] + green_history_x)), ( (-(int)cent_color_three[1] + green_history_y)),0,0); //mouse_event(MOUSEEVENTF_ABSOLUTE, 0, 0, 0, 0); } //Secondary Screen //Cursor.Position = new Point((int)(cursor_mul * (int)cent_color_three[0] * Screen.AllScreens[1].Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_color_three[1]) * Screen.AllScreens[1].Bounds.Height / capture.Height); //Number of Screen = 2 and both a same time /* if (Screen.AllScreens.Length == 2) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_color_three[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_color_three[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } * //Number of screen =3 and all at same time * if (Screen.AllScreens.Length == 3) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_color_three[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width + Screen.AllScreens[2].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_color_three[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } */ //Check for Clicks if (blobs_color_one.Count == 1) { foreach (var pair in blobs_color_one) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_one[0] = b.Centroid.X; cent_color_one[1] = b.Centroid.Y; } if (blobs_color_three.Count == 0) { if (ccount == 1) { //double click mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_LEFTDOWN, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_LEFTUP, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); Thread.Sleep(150); mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_LEFTDOWN, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_LEFTUP, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); } else { ccount--; } } else if ((cent_color_one[0] - cent_color_three[0]) * (cent_color_one[0] - cent_color_three[0]) + (cent_color_one[1] - cent_color_three[1]) * (cent_color_one[1] - cent_color_three[1]) <= 5000) { ccount = safevalue; mouseflag = 1; //single click mouse_event(MOUSEEVENTF_LEFTDOWN, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_LEFTUP, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_ABSOLUTE, 0, 0, 0, 0); } } else { ccount = 0; } } if (blobs_color_two.Count == 1) { foreach (var pair in blobs_color_two) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_color_two[0] = b.Centroid.X; cent_color_two[1] = b.Centroid.Y; } if (blobs_color_three.Count == 1 && ((cent_color_three[0] - cent_color_two[0]) * (cent_color_three[0] - cent_color_two[0]) + (cent_color_three[1] - cent_color_two[1]) * (cent_color_three[1] - cent_color_two[1]) <= 5000)) { //right click mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_RIGHTDOWN, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); mouse_event(MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_RIGHTUP, (uint)cent_color_three[0], (uint)cent_color_three[1], 0, 0); } else //if(blobs_g.Count == 0) { //MessageBox.Show("d"); //Cursor.Position = new Point(Screen.PrimaryScreen.Bounds.Width - (int)(cursor_mul * green_history_x * Screen.PrimaryScreen.Bounds.Width / capture.Width), (int)(cursor_mul * green_history_y) * Screen.PrimaryScreen.Bounds.Height / capture.Height); //mouse_event(MOUSEEVENTF_VWHEEL, 0, 0, (scroll_y - (int)cent_color_two[1]) * scroll_mul_v, 0); mouse_event(MOUSEEVENTF_HWHEEL, 0, 0, (uint)((scroll_x - (int)cent_color_two[0]) * scroll_mul_h), 0); mouse_event(MOUSEEVENTF_VWHEEL, (uint)Cursor.Position.X, (uint)Cursor.Position.Y, 50, 0); //mouse_event(MOUSEEVENTF_HWHEEL, 0, 0, 50, 0); scroll_y = (int)cent_color_two[1]; scroll_x = (int)cent_color_two[0]; } } captureImageBox.Image = frame; grayscaleImageBox.Image = color_one; smoothedGrayscaleImageBox.Image = color_two; cannyImageBox.Image = color_three; Color4ImageBox.Image = color_four; }
private void capture_ImageGrabbed(object sender, EventArgs e) { try { Mat iMat = new Mat(); capture.Retrieve(iMat); img = iMat.ToImage <Bgr, byte>(); FrameOld = img.Clone(); Bitmap imgB = FrameOld.Bitmap; Gray_Frame = img.Convert <Gray, Byte>().Clone(); // When enabled Equalize Hist and GammaCorrect adjust contrast and brithness if (EqualizeHist) { Gray_Frame._EqualizeHist(); // Equalize Histogram } if (GammaCorrect) { Gray_Frame._GammaCorrect(valCorrectGamma); // Correct Gamma } if (GaussianBlure) { CvInvoke.GaussianBlur(Gray_Frame, Gray_Frame, new Size(3, 3), 1); } grayImg.Image = Gray_Frame; faces = _face.DetectMultiScale(Gray_Frame, 1.3, 5, new System.Drawing.Size(24, 24), System.Drawing.Size.Empty); /**/ lblDetectedFaces.BeginInvoke((Action)(() => { // This is used to update the label of Faces detected count int nbrFaces = faces.Length; lblDetectedFaces.Text = nbrFaces + ""; })); foreach (Rectangle face in faces) { img.Draw(face, new Bgr(System.Drawing.Color.Blue), 2); } // Detecting bodies if (bodyDetection) { _bodies = _body.DetectMultiScale(Gray_Frame, 1.3, 5, new System.Drawing.Size(24, 24), System.Drawing.Size.Empty); foreach (Rectangle body in _bodies) { img.Draw(body, new Bgr(System.Drawing.Color.Red), 2); } lblBodies.BeginInvoke((Action)(() => {// This is used to update the label of Faces detected count lblBodies.Text = _bodies.Length + ""; })); } // captureImageBox.Image = img; // ##### Histogram Histo.Clear(); Histo.Calculate(new Image <Gray, Byte>[] { Gray_Frame }, true, null); Mat hMat = new Mat(); Histo.CopyTo(hMat); myHistogram.BeginInvoke((Action)(() => { myHistogram.ClearHistogram(); myHistogram.AddHistogram("Gray Histogram ", Color.Red, hMat, 256, new float[] { 0, 256 }); myHistogram.Refresh(); })); //######################## //######## Fram diffrencess :detectMotionAnoramlities MoveTowards moveTowardsFilter = new MoveTowards(); if (detectMotionAnoramlities) { #region Contour VectorOfVectorOfPoint polygon = new VectorOfVectorOfPoint(); // to draw the perimeter Image <Gray, byte> gray = img.Convert <Gray, byte>(); // convert source to gray Image <Gray, byte> thresh = gray.PyrDown().PyrUp(); // attempt to make edges more distinct? Image <Gray, byte> cannyImg = thresh.Canny(new Gray(10).Intensity, new Gray(50).Intensity); Mat tmpMat = new Mat(); Image <Gray, byte> imOut = new Image <Gray, byte>(img.Width, img.Height, new Gray(0)); CvInvoke.FindContours(cannyImg, polygon, tmpMat, RetrType.External, ChainApproxMethod.ChainApproxTc89Kcos); CvInvoke.DrawContours(imOut, polygon, -1, new MCvScalar(255, 0, 0), 2); pBoxContours.Image = imOut.Bitmap; #endregion Mat forgroundMask = new Mat(); fgDetector.Apply(img, forgroundMask); pBoxBackground.Image = forgroundMask.Bitmap; f = motionDetector.ProcessFrame(imgB); lblDetectedMotions.BeginInvoke((Action)(() => { // This is used to update the label of Faces detected count lblDetectedMotions.Text = f + ""; })); if (f > 0.2f) { txtAlerts.BeginInvoke((Action)(() => { // This is used to update the label of diff % txtAlerts.Text += "+ Quick Motion detected or more than 2% of fram changed" + DateTime.Now + Environment.NewLine; })); taINCIDENTS.Insert("NA", "+ Quick Motion detected or more than 2% of fram changed", "INCIDENT TYPE 1", DateTime.Now.ToString()); } /* * * */ // Blob Counter CvBlobs blobs = new CvBlobs(); blobs.FilterByArea(100, int.MaxValue); BlobCounter blobcounter = new BlobCounter(); blobcounter.MinHeight = 100; blobcounter.MaxWidth = 20; blobcounter.ObjectsOrder = ObjectsOrder.Size; blobcounter.ProcessImage(forgroundMask.Bitmap); Rectangle[] rect = blobcounter.GetObjectsRectangles(); Graphics g = Graphics.FromImage(imgB); int BlobCPT = 0; if (rect.Length > 0) { Rectangle objec = rect[0]; Graphics graphic = Graphics.FromImage(imgB); using (Pen pen = new Pen(Color.Red, 2)) foreach (Rectangle rec in rect) { if ((rec.Width > 50) && (rec.Height > 50)) { g.DrawRectangle(pen, rec); BlobCPT++; } } { } g.Dispose(); } if (f == 0.00f && blobs.Count == 0 && faces.Length == 0) { if (!wasInFreez && freezTriggered) { freezTime = 0; wasInFreez = true; // We suppose 0.5% and less as freez txtAlerts.BeginInvoke((Action)(() => { // This is used to update the label of Faces detected count txtAlerts.Text += "+Freez detecte : - Time:" + DateTime.Now.ToString() + Environment.NewLine; })); taEVENT.Insert("+Freez detecte : - Time:", "FREEZ", DateTime.Now.ToString()); } if (!wasInFreez && !freezTriggered && !TimerStarted) { TimerStarted = true; freezTimer.Start(); } } else if (BlobCPT > 0 && f != 0.00f) { if (wasInFreez) // We need to stop the timer and send the notification message { txtAlerts.BeginInvoke((Action)(() => { // This is used to update the label of Faces detected count txtAlerts.Text += "+Freez Stoped after : " + freezTime + " Second - Time:" + DateTime.Now.ToString() + Environment.NewLine; })); taEVENT.Insert("+Freez Stoped after : " + freezTime + " Second", "FREEZ STOP", DateTime.Now.ToString()); freezTimer.Stop(); TimerStarted = false; wasInFreez = false; freezTriggered = false; } } pBox.Image = imgB; } //####################### Application.DoEvents(); // Allow app to run other threads }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); capture.Retrieve(frame, 0); Mat grayFrame = new Mat(); CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray); //Mat smallGrayFrame = new Mat(); //CvInvoke.PyrDown(grayFrame, smallGrayFrame); //Mat smoothedGrayFrame = new Mat(); //CvInvoke.PyrUp(smallGrayFrame, smoothedGrayFrame); //Image<Gray, Byte> smallGrayFrame = grayFrame.PyrDown(); //Image<Gray, Byte> smoothedGrayFrame = smallGrayFrame.PyrUp(); //Mat cannyFrame = new Mat(); //CvInvoke.Canny(smoothedGrayFrame, cannyFrame, 100, 60); //Image<Gray, Byte> cannyFrame = smoothedGrayFrame.Canny(100, 60); Image <Bgra, Byte> _frame = frame.ToImage <Bgra, Byte>(); Image <Gray, Byte> _grayFrame = grayFrame.ToImage <Gray, Byte>(); Image <Gray, Byte>[] rgb_frame = _frame.Split(); //components of rgb image Image <Gray, Byte> red_com = rgb_frame[2] - _grayFrame; var red_bi = red_com.Convert <Gray, byte>().ThresholdBinary(new Gray(redThres), new Gray(255)); Image <Gray, Byte> blue_com = rgb_frame[0] - _grayFrame; var blue_bi = blue_com.Convert <Gray, byte>().ThresholdBinary(new Gray(blueThres), new Gray(255)); Image <Gray, Byte> green_com = rgb_frame[1] - _grayFrame; var green_bi = green_com.Convert <Gray, byte>().ThresholdBinary(new Gray(greenThres), new Gray(255)); //System.Windows.Forms.MessageBox.Show(""); /////////////////////////////////////////////////////////////////////////////////// //Blob detection //Red Blob detection Image <Bgr, Byte> smoothedFrame_r = new Image <Bgr, byte>(red_com.Size); CvInvoke.GaussianBlur(red_bi, smoothedFrame_r, new Size(3, 3), 1); //filter out noises Mat forgroundMask_r = new Mat(); fgDetector.Apply(smoothedFrame_r, forgroundMask_r); CvBlobs blobs_r = new CvBlobs(); blobDetector.Detect(forgroundMask_r.ToImage <Gray, byte>(), blobs_r); blobs_r.FilterByArea(minarea, maxarea); //blue Blob Detection Image <Bgr, Byte> smoothedFrame_b = new Image <Bgr, byte>(red_com.Size); CvInvoke.GaussianBlur(blue_bi, smoothedFrame_b, new Size(3, 3), 1); //filter out noises Mat forgroundMask_b = new Mat(); fgDetector.Apply(smoothedFrame_b, forgroundMask_b); CvBlobs blobs_b = new CvBlobs(); blobDetector.Detect(forgroundMask_b.ToImage <Gray, byte>(), blobs_b); blobs_b.FilterByArea(minarea, maxarea); //Green blob detection Image <Bgr, Byte> smoothedFrame_g = new Image <Bgr, byte>(red_com.Size); CvInvoke.GaussianBlur(green_bi, smoothedFrame_g, new Size(3, 3), 1); //filter out noises Mat forgroundMask_g = new Mat(); fgDetector.Apply(smoothedFrame_g, forgroundMask_g); CvBlobs blobs_g = new CvBlobs(); blobDetector.Detect(forgroundMask_g.ToImage <Gray, byte>(), blobs_g); blobs_g.FilterByArea(minarea, maxarea); //Mouse Interpretition float[] cent_r = new float[2]; float[] cent_g = new float[2]; float[] cent_b = new float[2]; //Corsor control with Green Marker foreach (var pair in blobs_g) { CvBlob b = pair.Value; CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); cent_g[0] = b.Centroid.X; cent_g[1] = b.Centroid.Y; } if (blobs_g.Count == 1 || mouseflag != 0) { //Cursor Movement Controlled //Primary Screem //if (Screen.AllScreens.Length == 1) { Cursor.Position = new Point(Screen.PrimaryScreen.Bounds.Width - (int)(cursor_mul * (int)cent_g[0] * Screen.PrimaryScreen.Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_g[1]) * Screen.PrimaryScreen.Bounds.Height / capture.Height); } //Secondary Screen //Cursor.Position = new Point((int)(cursor_mul * (int)cent_g[0] * Screen.AllScreens[1].Bounds.Width / capture.Width), (int)(cursor_mul * (int)cent_g[1]) * Screen.AllScreens[1].Bounds.Height / capture.Height); //Number of Screen = 2 and both a same time /* if (Screen.AllScreens.Length == 2) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_g[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_g[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } * //Number of screen =3 and all at same time * if (Screen.AllScreens.Length == 3) * { * * Cursor.Position = new Point((int)(cursor_mul * (int)cent_g[0] * (Screen.AllScreens[1].Bounds.Width + Screen.AllScreens[0].Bounds.Width + Screen.AllScreens[2].Bounds.Width) / capture.Width), * (int)(cursor_mul * (int)cent_g[1]) * (Screen.AllScreens[1].Bounds.Height + Screen.AllScreens[0].Bounds.Height + Screen.AllScreens[0].Bounds.Height) / capture.Height); * } */ /* * //Check for Clicks * if (blobs_r.Count == 1) * { * if(blobs_g.Count == 0) * { * if(ccount == 1) * { * //double click * mouse_event(MOUSEEVENTF_LEFTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_LEFTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * Thread.Sleep(150); * mouse_event(MOUSEEVENTF_LEFTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_LEFTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * } * else * { * ccount--; * } * } * * else if ((cent_g[0] - cent_r[0] >= 10 || cent_r[0] - cent_g[0] <= 10) && (cent_g[1] - cent_r[1] >= 10 || cent_r[1] - cent_g[1] <= 10)) * { * ccount = safevalue; * mouseflag = 1; * //single click * mouse_event(MOUSEEVENTF_LEFTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_LEFTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * } * } * else * { * ccount = 0; * * } * * } * * if (blobs_b.Count == 1) * { * foreach (var pair in blobs_b) * { * CvBlob b = pair.Value; * CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); * cent_b[0] = b.Centroid.X; * cent_b[1] = b.Centroid.Y; * } * * if (blobs_g.Count == 1 && (cent_g[0] - cent_b[0] >= 10 || cent_b[0] - cent_g[0] <= 10) && (cent_g[1] - cent_b[1] >= 10 || cent_b[1] - cent_g[1] <= 10)) * { * //right click * mouse_event(MOUSEEVENTF_RIGHTDOWN, (int)cent_g[0], (int)cent_g[1], 0, 0); * mouse_event(MOUSEEVENTF_RIGHTUP, (int)cent_g[0], (int)cent_g[1], 0, 0); * } * * else if(blobs_g.Count == 0) * { * mouse_event(MOUSEEVENTF_VWHEEL, 0, 0, (scroll_y - (int)cent_b[1]) * scroll_mul_v, 0); * mouse_event(MOUSEEVENTF_HWHEEL, 0, 0, (scroll_x - (int)cent_b[0]) * scroll_mul_h, 0); * scroll_y = (int)cent_b[1]; * scroll_x = (int)cent_b[0]; * * } */ } captureImageBox.Image = frame; grayscaleImageBox.Image = red_bi; smoothedGrayscaleImageBox.Image = green_bi; cannyImageBox.Image = blue_bi; }
public CameraTrackingFindSubjectsReturnModel FindSubjects() { double largestW = 0; double largestH = 0; double centerX = 0; double centerY = 0; bool foundSubject = false; Rectangle subject = new Rectangle(); // get detection 'blobs' or regions CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(this.lastMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (this.lastFrame.Width + this.lastFrame.Width) / 2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); FrameWidth = this.lastFrame.Width; FrameHeight = this.lastFrame.Height; foreach (var pair in _tracker) { CvTrack b = pair.Value; // limit the largest and smallest size boxes we care about. if (b.BoundingBox.Width < (this.lastFrame.Width / this.smallestDetectionWidthSizeDivisor) || b.BoundingBox.Height < (this.lastFrame.Height / this.smallestDetectionHeightSizeDivisor) || (b.BoundingBox.Width > (this.lastFrame.Width / this.largestDetectionWidthSizeDivisor) && b.BoundingBox.Height > (this.lastFrame.Height / this.largestDetectionHeightSizeDivisor))) { continue; } // keep track of the largest regions as we only care to track the largest if (b.BoundingBox.Width > largestW) { subject = b.BoundingBox; largestW = b.BoundingBox.Width; largestH = b.BoundingBox.Height; centerX = b.Centroid.X; centerY = b.Centroid.Y; CvInvoke.Rectangle( this.lastFrame, b.BoundingBox, new MCvScalar( 255.0, 255.0, 255.0), 20); CvInvoke.PutText( this.lastFrame, b.Id.ToString(), new Point( (int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); foundSubject = true; } else { CvInvoke.Rectangle( this.lastFrame, b.BoundingBox, new MCvScalar( 255.0, 255.0, 255.0), 1); CvInvoke.PutText( this.lastFrame, b.Id.ToString(), new Point( (int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar( 255.0, 255.0, 255.0)); } } return(new CameraTrackingFindSubjectsReturnModel() { CenterX = centerX, CenterY = centerY, BoundingBox = subject, FoundSubject = foundSubject }); }
private void ExtractBlobAndCrop(Image <Gray, byte> skin) { using (MemStorage storage = new MemStorage()) { Image <Gray, Byte> smoothedFrame = new Image <Gray, byte>(skin.Size); CvInvoke.GaussianBlur(skin, smoothedFrame, new Size(3, 3), 1); //filter out noises imageBoxFrameGrabber.Image = skin; Mat forgroundMask = new Mat(); Mat ss = new Mat(); ss = skin.Mat; //grabber.Retrieve(ss); fgDetector.Apply(ss, forgroundMask); //imageBox1.Image = forgroundMask; CvBlobs blobs = new CvBlobs(); //blobDetector.Detect(forgroundMask.ToImage<Gray, byte>(), blobs); blobDetector.Detect(skin, blobs); blobs.FilterByArea(30000, 150000); CvBlob b = null; CvBlob btemp; int area = 0; foreach (var pair in blobs) { btemp = pair.Value; if (area < btemp.Area) { b = pair.Value; area = btemp.Area; } } //Crop LArgest Blob Bitmap skin_bit = skin.ToBitmap(); //MessageBox.Show("" + area); if (area != 0) { CvInvoke.Rectangle(currentFrame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); //Rectangle rec = new Rectangle(b.BoundingBox.X, b.BoundingBox.Y, b.BoundingBox.Width, b.BoundingBox.Height); Bitmap crop_image = new Bitmap((b.BoundingBox.Width > b.BoundingBox.Height ? b.BoundingBox.Width : b.BoundingBox.Height), (b.BoundingBox.Width > b.BoundingBox.Height ? b.BoundingBox.Width : b.BoundingBox.Height)); //Bitmap crop_image = skin_bit.Clone(rec, skin_bit.PixelFormat); Graphics g = Graphics.FromImage(crop_image); g.DrawImage(skin_bit, -b.BoundingBox.X, -b.BoundingBox.Y); //g.DrawImage(skin_bit, -50, -50); croped = new Image <Gray, Byte>(crop_image).Resize(350, 350, Inter.Cubic); croped1 = new Image <Gray, Byte>(crop_image).Resize(100, 100, Inter.Cubic); croped2 = new Image <Gray, Byte>(crop_image).Resize(50, 50, Inter.Cubic); int gesture_number = fow_prop.image(croped2); label1.Text = "" + gesture_number; imageBoxSkin.Image = croped; crop_image.Dispose(); skin_bit.Dispose(); } } }
private void timer_process_Tick(object sender, EventArgs e) { timer_process.Enabled = false; Bitmap bitmap = new Bitmap(Screen.PrimaryScreen.Bounds.Width, Screen.PrimaryScreen.Bounds.Height); Graphics graphics = Graphics.FromImage(bitmap as Image); graphics.CopyFromScreen(0, 0, 0, 0, bitmap.Size); bitmap.Save("4.png"); long Ticks = DateTime.Now.Millisecond; Mat mat = new Mat("4.png", Emgu.CV.CvEnum.LoadImageType.Color); Image <Bgr, Byte> imgBgr = mat.ToImage <Bgr, Byte>(); Image <Gray, Byte> imgGray = mat.ToImage <Gray, Byte>(); int nWid = imgGray.Width; int nHei = imgGray.Height; byte[,,] pData = imgGray.Data; for (int y = 0; y < nHei; y++) { for (int x = 0; x < nWid; x++) { byte c = pData[y, x, 0]; if (c > 5) { pData[y, x, 0] = 0; } else { pData[y, x, 0] = 255; } } } CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(imgGray, blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); if (blobs.Count < 1) { timer_process.Enabled = true; return; } //------------------------------- Rectangle rc = Rectangle.Empty; foreach (var pair in blobs) { CvBlob b = pair.Value; rc = b.BoundingBox; //CvInvoke.Rectangle(imgBgr, b.BoundingBox, new MCvScalar(255.0, 0, 0), 2); break; } // -------Detect Blue Region ---- / imgGray = imgBgr.Convert <Gray, Byte>(); pData = imgGray.Data; for (int y = 0; y < nHei; y++) { for (int x = 0; x < nWid; x++) { if (!rc.Contains(x, y)) { pData[y, x, 0] = 0; continue; } byte c = pData[y, x, 0]; if (c >= 100 && c <= 120) { pData[y, x, 0] = 255; } else { pData[y, x, 0] = 0; } } } blobs.Clear(); _blobDetector.Detect(imgGray, blobs); blobs.FilterByArea(100, int.MaxValue); //_tracker.Process(smoothedFrame, forgroundMask); if (blobs.Count < 1) { timer_process.Enabled = true; return; } //------------------------------- rc = Rectangle.Empty; int nSizeMax = 0; foreach (var pair in blobs) { CvBlob b = pair.Value; if (b.BoundingBox.Width * b.BoundingBox.Height > nSizeMax) { rc = b.BoundingBox; nSizeMax = rc.Width * rc.Height; } //break; } CvInvoke.Rectangle(imgBgr, rc, new MCvScalar(255, 255, 0), 2); Global.g_rcROI = rc; Global.DEF_MAIN_BOARD_X = 238; Global.DEF_MAIN_BOARD_Y = 42; Global.DEF_MAIN_BOARD_W = 570; Global.DEF_MAIN_BOARD_H = 570; int nGameBoardX = Global.DEF_MAIN_BOARD_X + rc.X; int nGameBoardY = Global.DEF_MAIN_BOARD_Y + rc.Y; Global.GetRatioCalcedValues(rc.Width, rc.Height, ref nGameBoardX, ref nGameBoardY); Global.GetRatioCalcedValues(rc.Width, rc.Height, ref Global.DEF_MAIN_BOARD_W, ref Global.DEF_MAIN_BOARD_H); CvInvoke.Rectangle(imgBgr, new Rectangle(nGameBoardX, nGameBoardY, Global.DEF_MAIN_BOARD_W, Global.DEF_MAIN_BOARD_H), new MCvScalar(255, 255, 0), 2); Global.DEF_MARKS_X = 15; Global.DEF_MARKS_Y = 204; Global.DEF_MARKS_W = 189; Global.DEF_MARKS_H = 69; int nMarksX = Global.DEF_MARKS_X + rc.X; int nMarksY = Global.DEF_MARKS_Y + rc.Y; Global.GetRatioCalcedValues(rc.Width, rc.Height, ref nMarksX, ref nMarksY); Global.GetRatioCalcedValues(rc.Width, rc.Height, ref Global.DEF_MARKS_W, ref Global.DEF_MARKS_H); CvInvoke.Rectangle(imgBgr, new Rectangle(nMarksX, nMarksY, Global.DEF_MARKS_W, Global.DEF_MARKS_H), new MCvScalar(255, 255, 0), 2); int nStepX = Global.DEF_MAIN_BOARD_W / 8; int nStepY = Global.DEF_MAIN_BOARD_H / 8; var rois = new List <Rectangle>(); // List of rois var imageparts = new List <Image <Bgr, byte> >(); // List of extracted image parts for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) { Rectangle roi = new Rectangle(nGameBoardX + j * nStepX, nGameBoardY + i * nStepY, Global.DEF_ITEM_W, Global.DEF_ITEM_H); rois.Add(roi); imgBgr.ROI = roi; imageparts.Add(imgBgr.Copy()); } } imgBgr.ROI = Rectangle.Empty; m_LstCharacter.Clear(); bool bCanProcess = true; int k = 0, nRow = 0, nCol = 0; foreach (Image <Bgr, Byte> img in imageparts) { int nCharac = (int)ImageMatcher.DetermineCharacter(img); m_LstCharacter.Add(nCharac); MovementDecision.g_AllocCharacters[nRow, nCol] = nCharac; nCol++; if (nCol >= 8) { nRow++; nCol = 0; } //if (nCharac != 0) CvInvoke.Rectangle(imgBgr, rois[k], new MCvScalar(255, 255, 0), 2); //CvInvoke.Rectangle(imgBgr, rois[k], cols[nCharac - 1], 2); if (nCharac == 0) { bCanProcess = false; } k++; } string szLine = ""; lstBox.Items.Clear(); for (int i = 0; i < 8; i++) { szLine = ""; for (int j = 0; j < 8; j++) { szLine += "" + MovementDecision.g_AllocCharacters[i, j] + " "; } lstBox.Items.Add(szLine); } //imgBgr.Save("processed.png"); picScr.Image = imgBgr.Bitmap; if (!bCanProcess) { timer_process.Enabled = true; return; } MovementDecision.Process(); long Ticks2 = DateTime.Now.Millisecond; lbProcessTime.Text = "" + (Ticks2 - Ticks); timer_process.Enabled = true; }
/// <summary> /// ラベリングにより最大の面積の領域を残す /// </summary> /// <param name="imgSrc"></param> /// <param name="imgRender"></param> private void FilterByMaximalBlob(IplImage imgSrc, IplImage imgDst) { CvBlobs blobs = new CvBlobs(); imgDst.Zero(); blobs.Label(imgSrc); CvBlob max = blobs.GreaterBlob(); if (max == null) return; blobs.FilterByArea(max.Area, max.Area); blobs.FilterLabels(imgDst); }
void ProcessFrame(object sender, EventArgs e) { Mat frame = _cameraCapture.QueryFrame(); Mat smoothedFrame = new Mat(); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises //frame._SmoothGaussian(3); #region use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); _fgDetector.Apply(smoothedFrame, forgroundMask); #endregion CvBlobs blobs = new CvBlobs(); _blobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); float scale = (frame.Width + frame.Width) / 2.0f; _tracker.Update(blobs, 0.01 * scale, 5, 5); long detectionTime; List <Rectangle> faces = new List <Rectangle>(); List <Rectangle> eyes = new List <Rectangle>(); IImage image = (IImage)frame;//这一步是重点 faceImage = frame.Bitmap; DetectFace.Detect(image , "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, out detectionTime); #region 多人识别 多人识别存在较大误差率(图片库 如果高清,识别效果就是好) //Graphics g1 = Graphics.FromImage(frame.Bitmap); //List<FaceIdentifyModel> tempList = new List<FaceIdentifyModel>(); //foreach (Rectangle face in faces) //{ // Image rectImage1 = ImageHelper.CaptureImage(frame.Bitmap, face); // FaceIdentifyModel MoreIdentifyInfo = FaceAPI.FaceIdentify(rectImage1, tb_Group.Text.Trim(), 1, 1);//人脸识别 一个人的识别效果比较好 // MoreIdentifyInfo.rect = face; // tempList.Add(MoreIdentifyInfo); //} //Color color_of_pen1 = Color.Gray; //color_of_pen1 = Color.Yellow; //Pen pen1 = new Pen(color_of_pen1, 2.0f); //Font font1 = new Font("微软雅黑", 16, GraphicsUnit.Pixel); //SolidBrush drawBrush1 = new SolidBrush(Color.Yellow); //tb_Identify.Text = tempList.ToJson(); //foreach (var t in tempList) //{ // g1.DrawRectangle(pen1, t.rect); // if (t.result != null) // { // g1.DrawString(t.result[0].user_info.Replace(",", "\r\n"), font1, drawBrush1, new Point(t.rect.X + 20, t.rect.Y - 20)); // } //} #endregion #region 单人识别 //单人 人脸识别 多人效果比较差 foreach (Rectangle face in faces) { #region 采用画图,显示自己的文本框 Graphics g = Graphics.FromImage(frame.Bitmap); ImageModel tempImage = new ImageModel(); tempImage.Rect = face; tempImage.Image = frame.Bitmap; //接口查询速度差 //string faceInfo = FaceAPI.FaceDetect(ImageHelper.CaptureImage(frame.Bitmap, face));//人脸检测 Image rectImage = ImageHelper.CaptureImage(frame.Bitmap, face); FaceIdentifyModel IdentifyInfo = FaceAPI.FaceIdentify(rectImage, tb_Group.Text.Trim(), 1, 1);//人脸识别 一个人的识别效果比较好 // tb_Result.Text = faceInfo; tb_Identify.Text = IdentifyInfo.ToJson().ToString(); //采用画板 Color color_of_pen = Color.Gray; color_of_pen = Color.Yellow; Pen pen = new Pen(color_of_pen, 2.0f); Rectangle rect = face; g.DrawRectangle(pen, rect); Font font = new Font("微软雅黑", 16, GraphicsUnit.Pixel); SolidBrush drawBrush = new SolidBrush(Color.Yellow); if (IdentifyInfo != null) { if (IdentifyInfo.result != null) { for (int i = 0; i < IdentifyInfo.result.Count; i++) { string faceInfo = ""; faceInfo = IdentifyInfo.result[i].user_info.Replace(",", "\r\n"); //显示用户信息 g.DrawString(faceInfo, font, drawBrush, new Point(face.X + 20, face.Y - 20)); } } } //CvInvoke.Rectangle(frame, face, new MCvScalar(255.0, 255.0, 255.0), 2); //CvInvoke.PutText(frame, faceInfo, new Point(face.X + 20, face.Y - 20), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); // 保存原始截图 //System.Drawing.Image ResourceImage = frame.Bitmap; //ResourceImage.Save(saveDir + saveFileName); //线程队列 保存人脸识别截图 QueueHelper.WriteImage(tempImage); //t1 = new Thread(new ThreadStart(() => //{ // faceInfo = FaceAPI.FaceDetect(ImageHelper.CaptureImage(frame.Bitmap, face)); // this.Invoke(new Action(() => // { // g.DrawString(faceInfo, font, drawBrush, new Point(face.X + 20, face.Y - 20)); // })); //})); //t1.IsBackground = true; //t1.Start(); #endregion } #endregion #region 视频调用原有的Open CV 不支持中文字 //foreach (var pair in _tracker) //{ // CvTrack b = pair.Value; // #region 视频中调用open CV 上直接画文本框 // CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2); // CvInvoke.PutText(frame, "man,show", new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0)); // if (b.BoundingBox.Width < 100 || b.BoundingBox.Height < 50) // { // continue; // } // #endregion //} #endregion imageBox1.Image = frame; imageBox2.Image = forgroundMask; }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = _capture.QueryFrame(); if (frame == null) { if (mEtapa == 1) preencherParametrosMapeamento(); _capture.Dispose(); return; } mContadorDeFrames++; if (mEtapa == 0) { verificarEatualizarParametrosCalibracao(); } _capture.Retrieve(frame, 0); Image<Bgr, Byte> smoothedFrame = new Image<Bgr, byte>(frame.Size); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(parametros.AlphaMediaMovel, parametros.AlphaMediaMovel), parametros.AlphaMediaMovel); //filter out noises // use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); mDetector.Apply(smoothedFrame, forgroundMask); //CvInvoke.AbsDiff(smoothedFrame, forgroundMask.ToImage<Bgr, byte>(), vPlanoFundo); mblobs = new CvBlobs(); mBlobDetector.Detect(forgroundMask.ToImage<Gray, byte>(), mblobs); mblobs.FilterByArea(100, int.MaxValue); if (mEtapa == 0) { mJanelaCalibracao.Imagem.Image = frame; Mat vCopiaMenorBinaria = new Mat(); CvInvoke.Resize(forgroundMask, vCopiaMenorBinaria, new Size(0, 0), 0.7, 0.7, Inter.Area); mJanelaCalibracao.PlanoDeFundo.Image = smoothedFrame; mJanelaCalibracao.Objetos.Image = vCopiaMenorBinaria; } if (mEtapa == 1) { mJanelaAreaRestrita.Imagem.Image = frame; } if (mEtapa == 2) { mJanelaMonitoramento.ImagemMonitorada.Image = frame; } mImagemColorida = frame; if (mEtapa == 0) { desenharParametroTamanhoPessoa(); desenharRetanguloPessoa(); } if (mEtapa == 1) { desenharEMapear(); } if(mEtapa == 2) { atualizarParametros(parametros); desenharEprocessar(); } }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = _capture.QueryFrame(); if (frame == null) { if (mEtapa == 1) { preencherParametrosMapeamento(); } _capture.Dispose(); return; } mContadorDeFrames++; if (mEtapa == 0) { verificarEatualizarParametrosCalibracao(); } _capture.Retrieve(frame, 0); Image <Bgr, Byte> smoothedFrame = new Image <Bgr, byte>(frame.Size); CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(parametros.AlphaMediaMovel, parametros.AlphaMediaMovel), parametros.AlphaMediaMovel); //filter out noises // use the BG/FG detector to find the forground mask Mat forgroundMask = new Mat(); mDetector.Apply(smoothedFrame, forgroundMask); //CvInvoke.AbsDiff(smoothedFrame, forgroundMask.ToImage<Bgr, byte>(), vPlanoFundo); mblobs = new CvBlobs(); mBlobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), mblobs); mblobs.FilterByArea(100, int.MaxValue); if (mEtapa == 0) { mJanelaCalibracao.Imagem.Image = frame; Mat vCopiaMenorBinaria = new Mat(); CvInvoke.Resize(forgroundMask, vCopiaMenorBinaria, new Size(0, 0), 0.7, 0.7, Inter.Area); mJanelaCalibracao.PlanoDeFundo.Image = smoothedFrame; mJanelaCalibracao.Objetos.Image = vCopiaMenorBinaria; } if (mEtapa == 1) { mJanelaAreaRestrita.Imagem.Image = frame; } if (mEtapa == 2) { mJanelaMonitoramento.ImagemMonitorada.Image = frame; } mImagemColorida = frame; if (mEtapa == 0) { desenharParametroTamanhoPessoa(); desenharRetanguloPessoa(); } if (mEtapa == 1) { desenharEMapear(); } if (mEtapa == 2) { atualizarParametros(parametros); desenharEprocessar(); } }
private void CaptureMotion() { try { float wFactor = (float)this.Width / (float)CaptureBox.Width; float hFactor = (float)this.Height / (float)CaptureBox.Height; CvArr array = null; CvCapture cap = CvCapture.FromCamera(CaptureDevice.Any); this.Invoke(new Action(() => { lblLoading.Visible = false; radioButton1.Visible = true; radioButton2.Visible = true; })); while (true) { IplImage img = cap.QueryFrame(); if (img == null) { continue; } img.Flip(array, FlipMode.Y); if (mode == 1) { string filepath = "haarcascade_frontalface_alt2.xml"; CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(filepath); CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(img, cascade, Cv.CreateMemStorage(), 3.0, 1, HaarDetectionType.Zero, new CvSize(70, 70), new CvSize(500, 500)); foreach (CvAvgComp face in faces) { //IplImage ClonedImage = img.Clone(); //Cv.SetImageROI(ClonedImage, face.Rect); //IplImage ThisFace = Cv.CreateImage(face.Rect.Size, ClonedImage.Depth, ClonedImage.NChannels); //Cv.Copy(ClonedImage, ThisFace, null); //Cv.ResetImageROI(ClonedImage); //Bitmap FaceImage = BitmapConverter.ToBitmap(ThisFace); //FaceImage.SetResolution(240, 180); //CaptureBox.Image = FaceImage; img.DrawRect(face.Rect, CvColor.Red, 3); Bitmap FaceImage = BitmapConverter.ToBitmap(img); FaceImage.SetResolution(240, 180); CaptureBox.Image = FaceImage; this.Invoke(new Action(() => { LifeBox.Left = (int)(face.Rect.Left * wFactor - (float)(LifeBox.Width / 2.0) - (float)(this.Width / 2.0)); LifeBox.Top = (int)(face.Rect.Top * hFactor - (float)(LifeBox.Height / 2.0) - (float)(this.Height / 2.0)); if (LifeBox.Left > (this.Width - LifeBox.Width - 12)) { LifeBox.Left = (this.Width - LifeBox.Width - 24); } if (LifeBox.Top > (this.Height - LifeBox.Height - 48)) { LifeBox.Top = (this.Height - LifeBox.Height - 48); } if (LifeBox.Left < 12) { LifeBox.Left = 12; } if (LifeBox.Top < 12) { LifeBox.Top = 12; } Thread.Sleep(30); })); break; } } else { int AllBlobs = 0; CvBlobs blobs = null; IplImage imgHSVsrc = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 3); IplImage imgHSVdst = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 1); Cv.CvtColor(img, imgHSVsrc, ColorConversion.BgrToHsv); Cv.InRangeS(imgHSVsrc, new CvScalar(86, 80, 30), new CvScalar(115, 250, 250), imgHSVdst); Cv.ReleaseImage(imgHSVsrc); blobs = new CvBlobs(imgHSVdst); blobs.FilterByArea(7000, 40000); AllBlobs = blobs.Count; foreach (KeyValuePair <int, CvBlob> blob in blobs) { CvBlob CurrentBlob = blob.Value; CvRect BlobRect = CurrentBlob.Rect; CvPoint Point1, Point2; Point1.X = BlobRect.X; Point1.Y = BlobRect.Y; Point2.X = BlobRect.X + BlobRect.Width; Point2.Y = BlobRect.Y + BlobRect.Height; img.DrawRect(Point1, Point2, CvColor.LightGreen, 3, LineType.AntiAlias); this.Invoke(new Action(() => { LifeBox.Left = (int)(BlobRect.Left * wFactor - (float)(LifeBox.Width / 2.0) - (float)(this.Width / 2.0)); LifeBox.Top = (int)(BlobRect.Top * hFactor - (float)(LifeBox.Height / 2.0) - (float)(this.Height / 2.0)); if (LifeBox.Left > (this.Width - LifeBox.Width - 12)) { LifeBox.Left = (this.Width - LifeBox.Width - 24); } if (LifeBox.Top > (this.Height - LifeBox.Height - 48)) { LifeBox.Top = (this.Height - LifeBox.Height - 48); } if (LifeBox.Left < 12) { LifeBox.Left = 12; } if (LifeBox.Top < 12) { LifeBox.Top = 12; } Thread.Sleep(30); })); break; } Bitmap Item = BitmapConverter.ToBitmap(img); Item.SetResolution(240, 180); CaptureBox.Image = Item; Bitmap HSVItem = BitmapConverter.ToBitmap(imgHSVdst); HSVItem.SetResolution(240, 180); HSVCaptureBox.Image = HSVItem; Cv.ReleaseImage(imgHSVdst); } } } catch (Exception e) { Console.WriteLine("ERROR: " + e.Message + "DETAILS: " + e.StackTrace); } }
private void ProcessFrame(object sender, EventArgs e) { //String str = String.Format("withBall.jpg"); //originalImg = CvInvoke.Imread(str) originalImg = capture.QueryFrame(); Image <Bgr, Byte> outputImg = originalImg.ToImage <Bgr, Byte>(); int imgWidth = originalImg.Width; int imgHeight = originalImg.Height; UMat grayImg = new UMat(); //Convert RBG to Gray CvInvoke.CvtColor(originalImg, grayImg, ColorConversion.Bgr2Gray); //use image pyr to remove noise UMat pyrDown = new UMat(); CvInvoke.PyrDown(grayImg, pyrDown); CvInvoke.PyrUp(pyrDown, grayImg); UMat binaryImg = new UMat(); //Find Potiential Plate Region CvInvoke.Threshold(grayImg, binaryImg, 200, 255, ThresholdType.BinaryInv); Image <Gray, Byte> binaryImgG = binaryImg.ToImage <Gray, Byte>(); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); int[,] hierachy = CvInvoke.FindContourTree(binaryImgG, contours, ChainApproxMethod.ChainApproxNone); int maxArea = 0; int maxAreaContourIndex = 0; for (int idx = 0; idx < contours.Size; idx++) { //bool isChild = isChildContour(hierachy, idx); int numberOfChildren = GetNumberOfChildren(hierachy, idx); using (VectorOfPoint contour = contours[idx]) { if ((numberOfChildren > 3)) { if (CvInvoke.ContourArea(contour) > maxArea) { maxAreaContourIndex = idx; } } } } Image <Gray, Byte> mask1 = new Image <Gray, Byte>(imgWidth, imgHeight); CvInvoke.DrawContours(mask1, contours, maxAreaContourIndex, new MCvScalar(255), -1); int openingFactor1 = 100; Image <Gray, Byte> plateMask = new Image <Gray, Byte>(imgWidth, imgHeight); plateMask = mask1.Erode(openingFactor1); plateMask = plateMask.Dilate(openingFactor1); CvBlobs blobs = new CvBlobs(); CvBlobDetector blob_detector = new CvBlobDetector(); //blobs.FilterByArea(10000, 1000000); blob_detector.Detect(plateMask, blobs); foreach (CvBlob blob in blobs.Values) { Rectangle r = blob.BoundingBox; outputImg.Draw(r, new Bgr(0, 255, 255), 4); } Image <Gray, Byte> invBinaryImgG = binaryImg.ToImage <Gray, Byte>(); CvInvoke.BitwiseNot(invBinaryImgG, invBinaryImgG); Image <Gray, Byte> mask3 = plateMask.Clone(); CvInvoke.BitwiseAnd(plateMask, invBinaryImgG, mask3); blob_detector.Detect(mask3, blobs); int patternSize = 20; int ballSize = 60; int tolerance = 10; int patternHigh = patternSize + tolerance; int patternLow = patternSize - tolerance; int ballHigh = ballSize + tolerance * 2; int ballLow = ballSize - tolerance * 2; blobs.FilterByArea(patternLow * patternLow, ballHigh * ballHigh); List <PointF> patternPoints = new List <PointF>(); PointF ballPoint = new PointF(); int numberOfPatternPointFound = 0; foreach (CvBlob blob in blobs.Values) { Rectangle r = blob.BoundingBox; if ((r.Height > patternLow) && (r.Height < patternHigh) && (r.Width > patternLow) && (r.Width < patternHigh)) { outputImg.Draw(new CircleF(blob.Centroid, 2), new Bgr(0, 0, 255), 2); patternPoints.Add(blob.Centroid); numberOfPatternPointFound++; } if ((r.Height > ballLow) && (r.Height < ballHigh) && (r.Width > ballLow) && (r.Width < ballHigh)) { outputImg.Draw(new CircleF(blob.Centroid, 5), new Bgr(0, 0, 255), 5); ballPoint = blob.Centroid; } } label14.Text = String.Format("{0}", numberOfPatternPointFound); List <PointF> sortedPatternPoints = new List <PointF>(); // 1 for TopLeft - 2 for Top Right - 3 for Bottom Right - 4 for Bottom Left List <int> pointType = new List <int>();; PointF centerPoint = new PointF(); foreach (PointF patternPoint in patternPoints) { centerPoint.X += patternPoint.X; centerPoint.Y += patternPoint.Y; } centerPoint.X /= numberOfPatternPointFound; centerPoint.Y /= numberOfPatternPointFound; x_position.Text = ballPoint.X.ToString(); y_position.Text = ballPoint.Y.ToString(); foreach (PointF patternPoint in patternPoints) { if ((patternPoint.X < centerPoint.X) && (patternPoint.Y < centerPoint.Y)) { sortedPatternPoints.Add(patternPoint); pointType.Add(1); } else if ((patternPoint.X > centerPoint.X) && (patternPoint.Y < centerPoint.Y)) { sortedPatternPoints.Add(patternPoint); pointType.Add(2); } else if ((patternPoint.X > centerPoint.X) && (patternPoint.Y > centerPoint.Y)) { sortedPatternPoints.Add(patternPoint); pointType.Add(3); } else if ((patternPoint.X < centerPoint.X) && (patternPoint.Y > centerPoint.Y)) { sortedPatternPoints.Add(patternPoint); pointType.Add(4); } } int id = 0; foreach (PointF patternPoint in sortedPatternPoints) { CvInvoke.PutText(outputImg, String.Format("{0}", pointType[id++]), new System.Drawing.Point((int)patternPoint.X, (int)patternPoint.Y), FontFace.HersheyComplex, 1.0, new Bgr(0, 255, 0).MCvScalar); } imageBox1.Image = outputImg; }