public Rectangle camshift(Image <Bgr, byte> frame, Rectangle trackerbox, Mat HIST) { Mat HSV = new Mat(); CvInvoke.CvtColor(frame, HSV, ColorConversion.Bgr2Hsv); float[] range = { 0, 180 }; int[] histsize = { 24 }; int[] channels = { 0, 0 }; int[] Chn = { 0 }; Mat dst = new Mat(); float[] Range = { 0, 180 }; var vhue = new VectorOfMat(HSV); CvInvoke.CalcBackProject(vhue, Chn, HIST, dst, Range, 1); Mat element = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(5, 5), new Point(1, 1));// CvInvoke.Erode(dst, dst, element, new Point(1, 1), 1, BorderType.Default, new MCvScalar(0, 0, 0)); Mat DST = new Mat(); MCvTermCriteria termCrit = new MCvTermCriteria(10, 1); try { RotatedRect result = CvInvoke.CamShift(dst, ref trackerbox, termCrit); } catch (Exception e) { MessageBox.Show(e.ToString()); } tbox = trackerbox; return(trackerbox); }
protected override CamshiftOutput DoProcess(TrackingInput input) { var output = new CamshiftOutput(); Image <Gray, byte> grayscaleInput = input.Captured.ToImage <Gray, byte>(); if (input.Config.StartNewTrack) { _trackStarted = false; _backProjection = grayscaleInput.Copy(); var trackingRegion = input.Config.ObjectOfInterest; grayscaleInput.ROI = trackingRegion; var inputRoiImage = grayscaleInput.Copy(); grayscaleInput.ROI = Rectangle.Empty; //clear the roi StartNewTrack(input.Config.ObjectOfInterest, grayscaleInput, inputRoiImage, output); } if (_trackStarted) { using (VectorOfMat vectorMatGrayscaleInput = new VectorOfMat(grayscaleInput.Mat)) { CvInvoke.CalcBackProject(vectorMatGrayscaleInput, _channels, _histogram, _backProjection, _ranges, 1); } output.ObjectOfInterest = CvInvoke.CamShift(_backProjection, ref _rectangleSearchWindow, TermCriteria); } output.BackProjection = _backProjection; return(output); }
public Rectangle Tracking(Image <Bgr, Byte> image) { UpdateHue(image); if (backproject != null) { backproject.Dispose(); } backproject = hist.BackProject(new Image <Gray, Byte>[] { hue }); // Apply mask backproject._And(mask); // Tracking windows empty means camshift lost bounding-box last time // here we give camshift a new start window from 0,0 (you could change it) if (trackingWindow.IsEmpty || trackingWindow.Width == 0 || trackingWindow.Height == 0) { trackingWindow = new Rectangle(0, 0, 100, 100); } var r = CvInvoke.CamShift(backproject, ref trackingWindow, TermCriteria); return(trackingWindow); }
public void ProcessFrame(Mat srcimg) { Image <Bgr, Byte> image = imageProcessing.MatToImage(srcimg); // Draw selection box if (_selectObject) { if ((_selRect.Width > 0) && (_selRect.Height > 0)) { CvInvoke.Rectangle(srcimg, _selRect, new Bgr(Color.Red).MCvScalar, 2, LineType.AntiAlias); } } UpdateHue(image); // Check if time to init if (_initTracking < 0) { InitTrackWindow(image); } if (_initTracking > 0) { //Mat prediction = m_KF.Predict(); _imgBackproject = _imgHist.BackProject(new Image <Gray, Byte>[] { _imgHue }); _imgBackproject._And(_imgMask); if (_trackWindow.IsEmpty || _trackWindow.Width == 0 || _trackWindow.Height == 0) { _trackWindow = new Rectangle(0, 0, 100, 100); } _trackBox = CvInvoke.CamShift(_imgBackproject, ref _trackWindow, new MCvTermCriteria(10, 1)); //PredictPos(); } }
public void tracking(Mat hist_roi) { using (var nextframe = cap.QueryFrame().ToImage <Bgr, Byte>()) { if (nextframe != null) { float[] range = { 0, 180 }; int[] histsize = { 180 }; int[] channels = { 0, 0 }; int[] Chn = { 0 }; //Rectangle ret = new Rectangle(); //ret = nextframe.Mat; Rectangle trackwindow = new Rectangle(rectx, recty, rectw, recth); Mat hsv = new Mat(); Mat hist = new DenseHistogram(16, new RangeF(0, 16)); CvInvoke.CvtColor(nextframe, hsv, ColorConversion.Bgr2Hsv);//hsv Mat mask = new Mat(); CvInvoke.InRange(hsv, new ScalarArray(new MCvScalar(0, 60, 32)), new ScalarArray(new MCvScalar(180, 256, 255)), mask); Mat hue = new Mat(); hue.Create(hsv.Rows, hsv.Cols, hsv.Depth, 0); //CvInvoke.MixChannels() int[] chn = { 0, 0 }; var vhue = new VectorOfMat(hue); var vhsv = new VectorOfMat(hsv); //var vhsv = new VectorOfMat(hsv); CvInvoke.MixChannels(vhsv, vhue, chn); Mat dst = new Mat(); float[] Range = { 0, 180, 0, 255 }; CvInvoke.CalcBackProject(vhue, Chn, hist_roi, dst, Range, 1); Size s = new Size(5, 5); // CvInvoke.GetStructuringElement(ElementShape.Ellipse,s); CvInvoke.Threshold(dst, dst, 50, 255, 0); imageBox1.Image = dst; // MCvTermCriteria termcriteria = new MCvTermCriteria(TermCritType.Eps | TermCritType.Iter, 10, 1); MCvTermCriteria termCrit = new MCvTermCriteria(10, 0.1); RotatedRect result = CvInvoke.CamShift(dst, ref trackerbox, termCrit); var grayframe = nextframe.Convert <Gray, byte>(); grayframe.ROI = trackerbox; var grayface = grayframe.Copy().Mat; var faces = haar.DetectMultiScale(grayface, 1.1, 10, Size.Empty); int totalface = faces.Length; RectangleF ret = trackerbox; // PointF[] PTS = CvInvoke.BoxPoints(ret); // Point[] pts = new Point[10]; //for (int x = 0; x < PTS.Length; x++) // { // pts[x] = Point.Round(PTS[x]); CvInvoke.CvtColor(dst, nextframe, ColorConversion.Gray2Bgr); //CvInvoke.Polylines(nextframe, pts, true, new MCvScalar(255, 0)); // if (totalface == 1) // { MCvScalar color = new MCvScalar(0, 0, 255); CvInvoke.Ellipse(nextframe, ret, color, 3, LineType.AntiAlias); // } } imageBox1.Image = nextframe; } //while loop end*/ }
private void ProcessFrame(object sender, EventArgs e) { if (_capture != null && _capture.Ptr != IntPtr.Zero && _capture.IsOpened) { //取得網路攝影機的影像 _capture.Retrieve(_captureFrame); if (_captureFrame == null || _captureFrame.IsEmpty) { return; } _resultFrame = _captureFrame.Clone(); _sourceFrame = _captureFrame.Clone(); _sourceImage = imageProcessing.MatToImage(_captureFrame); //CvInvoke.Resize(_captureFrame, _captureFrame, new Size(_sourcePictureBox.Width, _sourcePictureBox.Height), 0, 0, Emgu.CV.CvEnum.Inter.Linear);//, Emgu.CV.CvEnum.Inter.Linear //顯示影像到PictureBox上 if (imageProcess_for_realTime_way == "Gray") { _resultImage = imageProcessing.ConvertToGray(_sourceImage); _resultPictureBox.Image = _resultImage.Bitmap; } else if (imageProcess_for_realTime_way == "Mirror") { _resultImage = imageProcessing.ConvertToMirror(_sourceImage); _resultPictureBox.Image = _resultImage.Bitmap; } else if (imageProcess_for_realTime_way == "Rotating") { int _Threshold_trackBar_value = 0; if (_Threshold_trackBar.InvokeRequired) { _Threshold_trackBar_value = (int)_Threshold_trackBar.Invoke(new obj_delegate(() => { return(_Threshold_trackBar.Value); })); } else { _Threshold_trackBar_value = _Threshold_trackBar.Value; } _resultImage = imageProcessing.Rotating(_sourceImage, _Threshold_trackBar_value / 100.0); _resultPictureBox.Image = _resultImage.Bitmap; } else if (imageProcess_for_realTime_way == "Otsu") { _resultImage = imageProcessing.ConvertToOtsu(_sourceImage); _resultPictureBox.Image = _resultImage.Bitmap; } else if (imageProcess_for_realTime_way == "HistogramEqualization") { _resultImage = imageProcessing.HistogramEqualization(_sourceImage); _resultPictureBox.Image = _resultImage.Bitmap; } else if (imageProcess_for_realTime_way == "imageBlending") { int _Threshold_trackBar_value = 0; if (_Threshold_trackBar.InvokeRequired) { _Threshold_trackBar_value = (int)_Threshold_trackBar.Invoke(new obj_delegate(() => { return(_Threshold_trackBar.Value); })); } else { _Threshold_trackBar_value = _Threshold_trackBar.Value; } _resultImage = imageProcessing.imageBlending(_sourceImage, _sourceImage2, _Threshold_trackBar_value / 100.0); _resultPictureBox.Image = _resultImage.Bitmap; } else if (imageProcess_for_realTime_way == "RemovingBackgrounds" && background != null && !background.IsEmpty) { int _Threshold_trackBar_value = 30; if (_Threshold_trackBar.InvokeRequired) { _Threshold_trackBar_value = (int)_Threshold_trackBar.Invoke(new obj_delegate(() => { return((int)((double)_Threshold_trackBar.Value / 100.0)); })); } else { _Threshold_trackBar_value = (int)((double)_Threshold_trackBar.Value / 100.0); } /* 在這裡實作 */ if (mousedown) { _resultImage = _sourceImage; Image <Bgr, byte> backgroundImage = background.ToImage <Bgr, byte>(); for (int y = 0; y < _sourceImage.Height; y++) { for (int x = 0; x < _sourceImage.Width; x++) { if (Math.Abs((int)_sourceImage.Data[y, x, 0] - colorForRemovingBackground.B) < _Threshold_trackBar_value && Math.Abs((int)_sourceImage.Data[y, x, 1] - colorForRemovingBackground.G) < _Threshold_trackBar_value && Math.Abs((int)_sourceImage.Data[y, x, 2] - colorForRemovingBackground.R) < _Threshold_trackBar_value) { _resultImage.Data[y, x, 0] = backgroundImage.Data[y, x, 0]; _resultImage.Data[y, x, 1] = backgroundImage.Data[y, x, 1]; _resultImage.Data[y, x, 2] = backgroundImage.Data[y, x, 2]; } else { continue; } } } _resultPictureBox.Image = _resultImage.Bitmap; } CvInvoke.PutText(_sourceFrame, "now keyColor is R: " + colorForRemovingBackground.R + " G: " + colorForRemovingBackground.G + " B: " + colorForRemovingBackground.B, new Point(10, 20), Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.4, new MCvScalar(0, 0, 255)); } else if (imageProcess_for_realTime_way == "CamShift") { if (!IsSelection && !IsTracking) { selection = new Rectangle(mouseDownPosition, new Size(mouseUpPosition.X - mouseDownPosition.X, mouseUpPosition.Y - mouseDownPosition.Y)); Graphics sG = _sourcePictureBox.CreateGraphics(); sG.DrawRectangle(new Pen(Color.Red, 10), selection); } else if (IsSelection && selection.Height != 0 && selection.Width != 0) { //選擇後初始化 rectangle = new Rectangle(new Point(Math.Min(selection.Left, selection.Right), Math.Min(selection.Top, selection.Bottom)), new Size(Math.Abs(selection.Size.Width), Math.Abs(selection.Size.Height))); IsSelection = false; IsTracking = true; } else if (IsTracking) { //開始執行 Image <Hsv, Byte> sourceHSV = new Image <Hsv, Byte>(_sourceImage.Width, _sourceImage.Height); CvInvoke.CvtColor(_sourceImage, sourceHSV, Emgu.CV.CvEnum.ColorConversion.Bgr2Hsv); //轉HSV //計算hue hue._EqualizeHist(); hue = sourceHSV.Split()[0]; hue.ROI = rectangle; //計算mask mask = sourceHSV.Split()[1].ThresholdBinary(new Gray(60), new Gray(255)); CvInvoke.InRange(sourceHSV, new ScalarArray(new MCvScalar(0, 30, Math.Min(10, 255), 0)), new ScalarArray(new MCvScalar(180, 256, Math.Max(10, 255), 0)), mask); mask.ROI = rectangle; //計算histogram hist.Calculate(new Image <Gray, Byte>[] { hue }, false, mask); CvInvoke.Normalize(hist, hist); //清空ROI hue.ROI = Rectangle.Empty; mask.ROI = Rectangle.Empty; //計算backproject backProjection = hist.BackProject <Byte>(new Image <Gray, Byte>[] { hue }); backProjection._And(mask); //顯示camshift計算結果 Graphics rG = _resultPictureBox.CreateGraphics(); rG.DrawRectangle(new Pen(Color.Green, 10), CvInvoke.CamShift(backProjection, ref rectangle, new MCvTermCriteria(10, 1)).MinAreaRect()); } //_resultPictureBox.Image = backProjection.Bitmap; //測試backProject _resultPictureBox.Image = _resultFrame.Bitmap; } else if (imageProcess_for_realTime_way == "Game") { _resultPictureBox.Image = _resultFrame.Bitmap; } else if (imageProcess_for_realTime_way == "findPedestrian") { _resultPictureBox.Image = _resultFrame.Bitmap; } else { _resultPictureBox.Image = _resultFrame.Bitmap; } _sourcePictureBox.Image = _sourceFrame.Bitmap; } //釋放繪圖資源->避免System.AccessViolationException GC.Collect(); }
public RotatedRect Tracking(Image <Bgr, Byte> image) { GetFrameHue(image); // User changed bins num ,recalculate Hist if (Main._advancedHsv) { if (bins != Main.HsvSetting.Getbins) { bins = Main.HsvSetting.Getbins; hist.Dispose(); hist = new DenseHistogram(bins, new RangeF(0, 180)); CalcHist(image); } } backprojection = hist.BackProject(new Image <Gray, Byte>[] { hue }); // Add mask backprojection._And(mask); // FindContours //CvInvoke.Canny(backprojection, backcopy, 3, 6); backprojection.CopyTo(backcopy); CvInvoke.FindContours(backcopy, vvp, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone); int trackArea = trackingWindow.Height * trackingWindow.Width; FindTargetByArea(vvp, trackArea * 0.25, trackArea * 10, ref vvpApprox); vvpApproxDensity = GetVVPDensity(vvpApprox, out vvpApproxRect); targetVVPIndex = FindTargetByOverlap(vvpApprox, trackingWindow); //FindTargetByCenter(vvpApprox, new PointF(trackingWindow.X + trackingWindow.Width / 2, trackingWindow.Y + trackingWindow.Height / 2)); // If lost trackbox if (trackingWindow.IsEmpty || trackingWindow.Width <= 10 || trackingWindow.Height <= 10 || _lost || targetVVPIndex == -1) { if (!timer.IsRunning) { timer.Start(); } if (timer.ElapsedMilliseconds > 1000) { _lost = true; } if (timer.ElapsedMilliseconds > 3000) { //targetVVPIndex = Array.IndexOf(vvpApproxDensity, vvpApproxDensity.Max()); } for (int i = 0; i < vvpApproxDensity.Length; i++) { if (vvpApproxDensity[i] >= targetDensity * 0.8) { trackingWindow = vvpApproxRect[i]; _lost = false; timer.Reset(); } } } else { trackbox = CvInvoke.CamShift(backprojection, ref trackingWindow, new MCvTermCriteria(10, 1)); targetDensity += vvpApproxDensity[targetVVPIndex]; targetDensity /= 2; if (timer.IsRunning) { timer.Reset(); } } return(trackbox); }