public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var frame = new Mat()) { CameraCapture.Retrieve(frame); var inputImage = frame.ToImage <Bgr, byte>(); if (radTrackingApi.Checked) { inputImage = DoTrackingApi(frame, inputImage); } else if (radCamshift.Checked) { var output = DoCamShift(frame, inputImage); imageBoxProcessed.Image = output.BackProjection; } if (!_imageBoxSelector.SeedingRectangle.IsEmpty) { inputImage.Draw(_imageBoxSelector.SeedingRectangle, new Bgr(Color.Chartreuse)); } imageBoxTracking.Image = inputImage; } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { if (_transmitTask == null || _transmitTask.IsCompleted) { using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var bgrImage = matCaptured.ToImage <Bgr, byte>(); WriteText(bgrImage, 30, DateTime.Now.ToString("HH:mm:ss tt")); imageBoxCaptured.Image = bgrImage; IImageTransmitter transmitter = null; if (radBsonImage.Checked) { transmitter = _imageTransmitter; } if (radBsonJpeg.Checked) { transmitter = _jpegTransmitter; } if (transmitter != null) { _transmitTask = transmitter.Transmit(bgrImage); } } } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { var frame = new Mat(); CameraCapture.Retrieve(frame); var grayFrame = new Mat(); CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray); var smallGrayFrame = new Mat(); CvInvoke.PyrDown(grayFrame, smallGrayFrame); var smoothedGrayFrame = new Mat(); CvInvoke.PyrUp(smallGrayFrame, smoothedGrayFrame); var cannyFrame = new Mat(); CvInvoke.Canny(smoothedGrayFrame, cannyFrame, 100, 60); imageBoxCaptured.Image = frame; imageBoxGray.Image = grayFrame; imageBoxSmoothedGray.Image = smoothedGrayFrame; imageBoxCanny.Image = cannyFrame; NotifyStatus(string.Empty); }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var matCaptured = new Mat()) { var retrieveElapsed = Stopwatch.StartNew(); CameraCapture.Retrieve(matCaptured); retrieveElapsed.Stop(); if (_readyRectangle.IsEmpty) { _detectorInput.ErodeDilateIterations = (int)spinDilateIterations.Value; _detectorInput.Settings.Roi = GetRegionOfInterestFromControls(); _detectorInput.Captured = matCaptured; var output = _colorDetector.Process(_detectorInput); DrawReticle(output.CapturedImage, output.CentralPoint.ToPoint(), Color.Aqua); if (output.IsDetected) { var radius = 50; var circle = new CircleF(output.CentralPoint, radius); var color = new Bgr(Color.Yellow); output.CapturedImage.Draw(circle, color, 3); var ballTextLocation = output.CentralPoint.ToPoint(); ballTextLocation.X += radius; // output.CapturedImage.Draw("ball", ballTextLocation, FontFace.HersheyPlain, 3, color); } if (checkBoxRoi.Checked) { output.CapturedImage.Draw(_detectorInput.Settings.Roi, Color.Green.ToBgr(), 3); } if (!_imageBoxSelector.SeedingRectangle.IsEmpty) { output.CapturedImage.Draw(_imageBoxSelector.SeedingRectangle, new Bgr(Color.Chartreuse)); } imageBoxCaptured.Image = output.CapturedImage; imageBoxFiltered.Image = output.ThresholdImage; NotifyStatus( "Retrieved frame in {0}, {1}" , retrieveElapsed.Elapsed.ToHumanReadable(HumanReadableTimeSpanOptions.Abbreviated) , output); } else { DoAutoThresholding(matCaptured); } ResizeImageControls(); } }
private Image <Bgr, byte> PullImage() { Image <Bgr, byte> output; using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); output = matCaptured.ToImage <Bgr, byte>(); WriteText(output, 30, DateTime.Now.ToString(TimeFormat)); imageBoxCaptured.Image = output; } return(output); }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var input = new CascadeDetectorInput { Captured = matCaptured }; var result = _detector.Process(input); if (result.IsDetected) { Log.Info(m => m("{0}", result)); } } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { if (_transmitTask == null || _transmitTask.IsCompleted) { using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var bgrImage = matCaptured.ToImage <Bgr, byte>(); WriteText(bgrImage, 30, DateTime.Now.ToString("HH:mm:ss tt")); imageBoxCaptured.Image = bgrImage; _transmitTask = _bsonPoster.Transmit(bgrImage); } } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var frame = new Mat()) { CameraCapture.Retrieve(frame); var input = new MotionDetectorInput(); var inputImage = frame.ToImage <Bgr, byte>(); input.Captured = frame; input.Settings = _currentSettings; var output = _motionDetector.Process(input); var bgrRed = new Bgr(Color.Red); var bgrBlue = new Bgr(Color.Blue); foreach (var motionRegion in output.MotionSections) { var text = string.Format("A={0}, M={1}", motionRegion.Area, motionRegion.PixelsInMotionCount); inputImage.Draw(motionRegion.Region, bgrRed); if (chkRectangleStats.Checked) { inputImage.Draw(text, motionRegion.Region.Location, Emgu.CV.CvEnum.FontFace.HersheyComplexSmall, .8, bgrRed); } DrawMotion(output.MotionImage, motionRegion.Region, motionRegion.Angle, bgrRed); } DrawMotion(output.MotionImage, new Rectangle(Point.Empty, output.MotionImage.Size), output.OverallAngle, new Bgr(Color.Green)); if (output.BiggestMotion != null) { var motion = output.BiggestMotion; inputImage.Draw(motion.Region, bgrBlue); } imageBoxCaptured.Image = inputImage; imageBoxMasked.Image = output.ForegroundImage; imageBoxMotion.Image = output.MotionImage; NotifyStatus( "Motion detection took {0}. {1} motions, {2} over all pixel count" , output.Elapsed.ToHumanReadable() , output.MotionSections.Count , output.OverallMotionPixelCount); } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var input = new CascadeDetectorInput { Captured = matCaptured }; var result = _detector.Process(input); var image = matCaptured.ToImage <Bgr, byte>(); foreach (Rectangle item in result.Objects) { image.Draw(item, new Bgr(Color.Blue), 2); } imageBoxCaptured.Image = image; } }
private void InternalImageGrabbedHandler(object sender, EventArgs e) { FpsTracker.NotifyImageGrabbed(sender, e); using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var input = new CameraProcessInput(); input.Captured = matCaptured; input.SetCapturedImage = false; var output = _controller.Process(input); Screen.BeginRepaint(); Screen.WriteLine("Processing time: {0:N0}ms", output.Elapsed.TotalMilliseconds); Screen.WriteLine("Servo Settle Time: {0:N0}ms (Key 1=up, 2=down)", _controller.ServoSettleTime.TotalMilliseconds); Screen.WriteLine("Pan Tilt Before: {0}", output.PanTiltPrior); Screen.WriteLine("Pan Tilt After : {0}", output.PanTiltNow); Screen.WriteLine("Target: {0}", output.Target); } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var detector = new ColourDetector(); var input = new ColourDetectorInput { Captured = matCaptured , Settings = Settings , SetCapturedImage = false }; var result = detector.Process(input); if (result.IsDetected) { Log.Info(result); } } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var frame = new Mat()) { CameraCapture.Retrieve(frame); var input = new FaceDetectorInput(); input.Captured = frame; input.DetectEyes = chkDetectEyes.Checked; var result = _faceDetector.Process(input); var imageBgr = result.CapturedImage; IImage imageOut = imageBgr; if (chkRectangles.Checked) { foreach (var face in result.Faces) { imageBgr.Draw(face.Region, new Bgr(Color.Red), 2); var eyeCount = 0; foreach (Rectangle eye in face.Eyes) { eyeCount++; imageBgr.Draw(eye, new Bgr(Color.Blue), 2); imageBgr.Draw(eyeCount.ToString(), eye.Location, FontFace.HersheyComplexSmall, 2, new Bgr(Color.Blue)); } } } if (chkSunnies.Checked && result.Faces.Count > 0) { imageOut = WearSunnies(imageBgr, result.Faces[0].Eyes); } imageBox.Image = imageOut; NotifyStatus("Face detection took {0}", result.Elapsed.ToHumanReadable()); } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var input = new ColourDetectorInput { Captured = matCaptured , Settings = Settings , SetCapturedImage = false }; var result = _detector.Process(input); if (result.IsDetected) { if (!_objectCurrentlyDetected) { _debounceWatch.Start(); _objectCurrentlyDetected = true; } SweeperToRed(); Log.Info(m => m("Red detected! {0}", result)); } else { var isInDebouncePeriod = _debounceWatch.IsRunning && _debounceWatch.ElapsedMilliseconds < 800; if (_objectCurrentlyDetected && !isInDebouncePeriod) { _debounceWatch.Reset(); Log.Info(m => m("Red gone")); SweeperToGreen(); _objectCurrentlyDetected = false; } } } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var inputFrame = new Mat()) { if (!CameraCapture.Retrieve(inputFrame)) { return; } using (var smoothedGrayFrame = new Mat()) using (var smallGrayFrame = new Mat()) using (var cannyFrame = new Mat()) using (var grayFrame = new Mat()) { if (inputFrame.NumberOfChannels > 1) { CvInvoke.CvtColor(inputFrame, grayFrame, ColorConversion.Bgr2Gray); imageBoxCaptured.Image = inputFrame.ToImage <Bgra, byte>(); } else { imageBoxCaptured.Image = inputFrame.ToImage <Gray, byte>(); inputFrame.CopyTo(grayFrame); } CvInvoke.PyrDown(grayFrame, smallGrayFrame); CvInvoke.PyrUp(smallGrayFrame, smoothedGrayFrame); CvInvoke.Canny(smoothedGrayFrame, cannyFrame, 100, 60); imageBoxGray.Image = grayFrame.ToImage <Gray, byte>(); imageBoxSmoothedGray.Image = smoothedGrayFrame.ToImage <Gray, byte>(); imageBoxCanny.Image = cannyFrame.ToImage <Gray, byte>(); } } NotifyStatus(string.Empty); }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var grayImage = matCaptured.ToImage <Gray, byte>(); #region circle detection var watch = Stopwatch.StartNew(); double cannyThreshold = 180.0; double circleAccumulatorThreshold = 120; CircleF[] circles = CvInvoke.HoughCircles( grayImage , HoughType.Gradient , 2.0 , 40.0 , cannyThreshold , circleAccumulatorThreshold , 5); watch.Stop(); NotifyStatus("{0} Hough circles in {1}; ", circles.Length, watch.Elapsed.ToHumanReadable()); #endregion #region draw circles var circleImage = matCaptured.ToImage <Bgr, byte>(); foreach (CircleF circle in circles) { circleImage.Draw(circle, new Bgr(Color.Green), 10); } #endregion imageBoxCaptured.Image = circleImage; } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var matCaptured = new Mat()) { var retrieveElapsed = Stopwatch.StartNew(); CameraCapture.Retrieve(matCaptured); retrieveElapsed.Stop(); ResizeImageControls(matCaptured); _detectorInput.Settings.Roi = GetRegionOfInterestFromControls(); _detectorInput.Captured = matCaptured; var output = _colorDetector.Process(_detectorInput); if (output.IsDetected) { var radius = 50; var circle = new CircleF(output.CentralPoint, radius); var color = new Bgr(Color.Yellow); output.CapturedImage.Draw(circle, color, 3); var ballTextLocation = output.CentralPoint.ToPoint(); ballTextLocation.X += radius; // output.CapturedImage.Draw("ball", ballTextLocation, FontFace.HersheyPlain, 3, color); } if (checkBoxRoi.Checked) { output.CapturedImage.Draw(_detectorInput.Settings.Roi, Color.Green.ToBgr()); } //#region circle detection //var watch = Stopwatch.StartNew(); //double cannyThreshold = 180.0; //double circleAccumulatorThreshold = 120; //CircleF[] circles = CvInvoke.HoughCircles( // thresholdImage // , HoughType.Gradient // , 2.0 // , 20.0 // , cannyThreshold // , circleAccumulatorThreshold // , 5); //watch.Stop(); //NotifyStatus("Hough circles - {0} ms; ", watch.ElapsedMilliseconds); //#endregion //#region draw circles //var circleImage = matCaptured.ToImage<Bgr, byte>(); //foreach (CircleF circle in circles) //{ // circleImage.Draw(circle, new Bgr(Color.Brown), 2); //} //#endregion imageBoxCaptured.Image = output.CapturedImage; imageBoxFiltered.Image = output.ThresholdImage; NotifyStatus( "Retrieved frame in {0}, {1}" , retrieveElapsed.Elapsed.ToHumanReadable(HumanReadableTimeSpanOptions.Abbreviated) , output); } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { if (_calibrationInProgress) { return; } using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var statusAccumulation = new StringBuilder(); var bgrImage = matCaptured.ToImage <Bgr, byte>(); DrawReticle(bgrImage, _centre, Color.Red); if (UserReticle != null) { DrawReticle(bgrImage, UserReticle.Value, Color.Green); } var input = new CameraProcessInput(); input.SetCapturedImage = true; input.Captured = matCaptured; CameraPanTiltProcessOutput output = null; if (chkBoxColourTracking.Checked) { var result = _colourTrackingController.Process(input); output = result; if (result.IsDetected) { DrawReticle(bgrImage, result.Target, Color.Yellow); } imageBoxFiltered.Image = result.ThresholdImage; statusAccumulation.AppendFormat("{0} moment area", result.MomentArea); // WriteText(bgrImage, _captureConfig.Resolution.Height - 10, "Colour Tracking"); } if (chkBoxFaceTracker.Checked) { // WriteText(bgrImage, _captureConfig.Resolution.Height - 50, "Face Tracking"); var result = _faceTrackingController.Process(input); output = result; if (result.IsDetected) { foreach (var face in result.Faces) { bgrImage.Draw(face.Region, new Bgr(Color.Yellow), 2); } DrawReticle(bgrImage, result.Target, Color.Yellow); } statusAccumulation.AppendFormat("{0} faces detected", result.Faces.Count); } if (chkBoxMotionTracking.Checked) { // WriteText(bgrImage, _captureConfig.Resolution.Height - 75, "Motion Tracking"); var result = _motionTrackingController.Process(input); output = result; if (result.IsDetected) { foreach (var motionSection in result.MotionSections) { bgrImage.Draw(motionSection.Region, new Bgr(Color.Green)); } if (result.TargetedMotion != null) { bgrImage.Draw(result.TargetedMotion.Region, new Bgr(Color.Red), 2); } } statusAccumulation.AppendFormat("{0} motions", result.MotionSections.Count); imageBoxFiltered.Image = result.ForegroundImage; } if (chkMultimode.Checked) { var multimodeOutput = _multimodePanTiltController.Process(input); output = multimodeOutput; if (output.Target != Point.Empty) { DrawReticle(bgrImage, output.Target, Color.Yellow); } } if (output != null) { if (output.IsServoInMotion) { statusAccumulation.AppendFormat(", Waiting for servo"); } else { statusAccumulation.AppendFormat(", tracking took {0}", output.Elapsed.ToHumanReadable()); } NotifyStatus(statusAccumulation.ToString()); } imageBoxCaptured.Image = bgrImage; } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { using (var frame = new Mat()) { CameraCapture.Retrieve(frame); var input = new FaceDetectorInput(); input.Captured = frame; input.DetectEyes = chkDetectEyes.Checked; input.ClassifierParams = _classiferParams; var result = _faceDetector.Process(input); var imageBgr = result.CapturedImage; if (chkRectangles.Checked) { foreach (var face in result.Faces) { var rectangleColor = new Bgr(Color.Red); imageBgr.Draw(face.Region, rectangleColor, 2); if (chkShowRectDimensions.Checked) { imageBgr.Draw( string.Format("{0}x{1}", face.Region.Width, face.Region.Height) , face.Region.Location , FontFace.HersheyComplexSmall , 2 , rectangleColor); } var eyeCount = 0; foreach (Rectangle eye in face.Eyes) { eyeCount++; imageBgr.Draw(eye, new Bgr(Color.Blue), 2); imageBgr.Draw(eyeCount.ToString(), eye.Location, FontFace.HersheyComplexSmall, 2, new Bgr(Color.Blue)); } } } var inputBgra = imageBgr.Mat.ToImage <Bgra, byte>(); Image <Bgra, byte> output = inputBgra; result.Faces.ForEach(f => { if (chkSunnies.Checked) { output = WearSunnies2(output, f); } if (chkHat.Checked) { output = WearHat(output, f); } }); imageBox.Image = output; NotifyStatus("Face detection took {0}", result.Elapsed.ToHumanReadable()); } }