protected override FaceTrackingPanTiltOutput DoProcess(CameraProcessInput baseInput) { var input = new FaceDetectorInput(); input.Captured = baseInput.Captured; input.DetectEyes = false; var result = _faceDetector.Process(input); var targetPoint = CentrePoint; if (result.Faces.Count > 0) { Face faceTarget = result.Faces[0]; targetPoint = faceTarget.Region.Center(); } var outerResult = ReactToTarget(targetPoint); outerResult.Faces.AddRange(result.Faces); if (input.SetCapturedImage) { outerResult.CapturedImage = input.Captured.ToImage <Bgr, byte>(); } return(outerResult); }
protected override CameraPanTiltProcessOutput DoProcess(CameraProcessInput input) { var trackingInput = new TrackingInput(); trackingInput.Config = TrackConfig; trackingInput.Captured = input.Captured; trackingInput.SetCapturedImage = false; var camshiftOutput = _detector.Process(trackingInput); TrackConfig.StartNewTrack = false; // turn it off after inititalising CameraPanTiltProcessOutput output; if (camshiftOutput.HasObjectOfInterest) { var targetPoint = camshiftOutput.ObjectOfInterest.Center.ToPoint(); output = ReactToTarget(targetPoint); } else { output = ReactToTarget(CentrePoint); } return(output); }
private ColourTrackingPanTiltOutput ProcessColour(CameraProcessInput input) { _colourDetectorInput.Captured = input.Captured; _colourTrackingController.Settings = _colourDetectorInput.Settings; var colourOutput = _colourTrackingController.Process(_colourDetectorInput); colourOutput.CapturedImage = GetBgr(colourOutput.ThresholdImage); return(colourOutput); }
private bool IsColourFullFrame(CameraProcessInput input) { if (!_isColourTrained) { return(false); } var colourOutput = ProcessColour(input); const int fullFrameMinimumPercent = 90; var fullFramePixelCount = colourOutput.CapturedImage.Width * colourOutput.CapturedImage.Height; var mimimumColourPixelCount = fullFramePixelCount * fullFrameMinimumPercent / 100; var isFullFrameColour = colourOutput.IsDetected && colourOutput.MomentArea > mimimumColourPixelCount; return(isFullFrameColour); }
public ProcessingMode AcceptInput(CameraProcessInput input) { if (_timeSinceLastFaceSample.Elapsed > _sampleFaceEvery) { if (IsFaceFound(input)) { return(ProcessingMode.FaceDetection); } _timeSinceLastFaceSample.Restart(); } if (_timeSinceLastColourSample.Elapsed > _sampleColourEvery) { if (IsColourFullFrame(input)) { return(ProcessingMode.ColourObjectTrack); } _timeSinceLastColourSample.Restart(); } if (_internalState == AutonomousState.Waiting && _timeSinceLastSmoothPursuit.Elapsed > _nextSmoothPursuit) { _timeTarget.Start(_panTiltController.CurrentSetting); _screen.WriteLine($"Starting smooth pursuit {_timeTarget.Original} to {_timeTarget.Target} over {_timeTarget.TimeSpan.ToHumanReadable()}"); _internalState = AutonomousState.SmoothPursuit; } if (_internalState == AutonomousState.SmoothPursuit) { var nextPosition = _timeTarget.GetNextPosition(); _panTiltController.MoveAbsolute(nextPosition); if (_timeTarget.Ticks % 25 == 0) { _screen.WriteLine($"{nextPosition}"); } if (_timeTarget.TimeTargetReached) { DecideNextSmoothPursuit(); _screen.WriteLine("Smooth pursuit target reached"); _internalState = AutonomousState.Waiting; } } return(ProcessingMode.Autonomous); }
public ProcessingMode AcceptInput(CameraProcessInput input) { if (_timeSinceLastFaceSample.Elapsed > _sampleFaceEvery) { if (IsFaceFound(input)) { return ProcessingMode.FaceDetection; } _timeSinceLastFaceSample.Restart(); } if (_timeSinceLastColourSample.Elapsed > _sampleColourEvery) { if (IsColourFullFrame(input)) { return ProcessingMode.ColourObjectTrack; } _timeSinceLastColourSample.Restart(); } if (_internalState == AutonomousState.Waiting && _timeSinceLastSmoothPursuit.Elapsed > _nextSmoothPursuit) { _timeTarget.Start(_panTiltController.CurrentSetting); _screen.WriteLine($"Starting smooth pursuit {_timeTarget.Original} to {_timeTarget.Target} over {_timeTarget.TimeSpan.ToHumanReadable()}"); _internalState = AutonomousState.SmoothPursuit; } if (_internalState == AutonomousState.SmoothPursuit) { var nextPosition = _timeTarget.GetNextPosition(); _panTiltController.MoveAbsolute(nextPosition); if (_timeTarget.Ticks % 25 == 0) { _screen.WriteLine($"{nextPosition}"); } if (_timeTarget.TimeTargetReached) { DecideNextSmoothPursuit(); _screen.WriteLine("Smooth pursuit target reached"); _internalState = AutonomousState.Waiting; } } return ProcessingMode.Autonomous; }
private bool IsColourFullFrame(CameraProcessInput input) { var isFullFrameColour = false; // detect all black using (new TemporaryThresholdSettings(_colourDetectorInput, ThresholdSettings.Get(0, 0, 0, 180, 255, 40))) { _colourDetectorInput.Captured = input.Captured; var colourOutput = _colourDetector.Process(_colourDetectorInput); const int fullFrameMinimumPercent = 70; var fullFramePixelCount = colourOutput.CapturedImage.Width * colourOutput.CapturedImage.Height; var mimimumColourPixelCount = fullFramePixelCount * fullFrameMinimumPercent / 100; isFullFrameColour = colourOutput.MomentArea > mimimumColourPixelCount; } return(isFullFrameColour); }
protected override MotionTrackingPanTiltOutput DoProcess(CameraProcessInput input) { var detectorInput = new MotionDetectorInput(); detectorInput.Settings = Settings; detectorInput.SetCapturedImage = input.SetCapturedImage; detectorInput.Captured = input.Captured; var motionOutput = _motionDetector.Process(detectorInput); var targetPoint = CentrePoint; MotionSection biggestMotion = null; if (motionOutput.IsDetected) { _screen.BeginRepaint(); biggestMotion = motionOutput.BiggestMotion; targetPoint = biggestMotion.Region.Center(); } var output = ReactToTarget(targetPoint); if (IsServoInMotion) { _screen.WriteLine("Reacting to target {0}, size {1}", targetPoint, biggestMotion.Region.Area()); if (_timeToZeroMotion != null && !motionOutput.IsDetected) { _timeToZeroMotion.Stop(); _log.InfoFormat("Time to zero motion was {0:F}ms", _timeToZeroMotion.ElapsedMilliseconds); _timeToZeroMotion = null; } } output.MotionSections.AddRange(motionOutput.MotionSections); if (biggestMotion != null) { output.TargetedMotion = motionOutput.BiggestMotion; } output.ForegroundImage = motionOutput.ForegroundImage; return(output); }
public TOutput Process(CameraProcessInput input) { Ticks++; if (IsServoInMotion) { var output = new TOutput(); output.IsServoInMotion = true; return(output); } var stopWatch = Stopwatch.StartNew(); var result = DoProcess(input); if (result.CapturedImage == null && input.SetCapturedImage) { result.CapturedImage = input.Captured.ToImage <Bgr, byte>(); } result.Elapsed = stopWatch.Elapsed; return(result); }
private void InternalImageGrabbedHandler(object sender, EventArgs e) { FpsTracker.NotifyImageGrabbed(sender, e); using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var input = new CameraProcessInput(); input.Captured = matCaptured; input.SetCapturedImage = false; var output = _controller.Process(input); Screen.BeginRepaint(); Screen.WriteLine("Processing time: {0:N0}ms", output.Elapsed.TotalMilliseconds); Screen.WriteLine("Servo Settle Time: {0:N0}ms (Key 1=up, 2=down)", _controller.ServoSettleTime.TotalMilliseconds); Screen.WriteLine("Pan Tilt Before: {0}", output.PanTiltPrior); Screen.WriteLine("Pan Tilt After : {0}", output.PanTiltNow); Screen.WriteLine("Target: {0}", output.Target); } }
protected override ColourTrackingPanTiltOutput DoProcess(CameraProcessInput input) { var colourDetectorInput = new ColourDetectorInput(); colourDetectorInput.Captured = input.Captured; colourDetectorInput.SetCapturedImage = input.SetCapturedImage; colourDetectorInput.Settings = Settings; var colourDetectorOutput = _colourDetector.Process(colourDetectorInput); var targetPoint = CentrePoint; if (colourDetectorOutput.IsDetected) { targetPoint = colourDetectorOutput.CentralPoint.ToPoint(); } var output = ReactToTarget(targetPoint); output.Absorb(colourDetectorOutput); return(output); }
private void InternalImageGrabbedHandler(object sender, EventArgs e) { FpsTracker.NotifyImageGrabbed(sender, e); using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var input = new CameraProcessInput(); input.Captured = matCaptured; input.SetCapturedImage = false; var output = _controller.Process(input); Screen.BeginRepaint(); Screen.WriteLine("Processing time: {0:N0}ms", output.Elapsed.TotalMilliseconds); Screen.WriteLine("Servo Wait Time: {0:N0}ms", _controller.ServoSettleTime.TotalMilliseconds); Screen.WriteLine("Pan Tilt Before: {0}", output.PanTiltPrior); Screen.WriteLine("Pan Tilt After : {0}", output.PanTiltNow); Screen.WriteLine("Target: {0}", output.Target); } }
public override void ImageGrabbedHandler(object sender, EventArgs e) { if (_calibrationInProgress) { return; } using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var statusAccumulation = new StringBuilder(); var bgrImage = matCaptured.ToImage <Bgr, byte>(); DrawReticle(bgrImage, _centre, Color.Red); if (UserReticle != null) { DrawReticle(bgrImage, UserReticle.Value, Color.Green); } var input = new CameraProcessInput(); input.SetCapturedImage = true; input.Captured = matCaptured; CameraPanTiltProcessOutput output = null; if (chkBoxColourTracking.Checked) { var result = _colourTrackingController.Process(input); output = result; if (result.IsDetected) { DrawReticle(bgrImage, result.Target, Color.Yellow); } imageBoxFiltered.Image = result.ThresholdImage; statusAccumulation.AppendFormat("{0} moment area", result.MomentArea); // WriteText(bgrImage, _captureConfig.Resolution.Height - 10, "Colour Tracking"); } if (chkBoxFaceTracker.Checked) { // WriteText(bgrImage, _captureConfig.Resolution.Height - 50, "Face Tracking"); var result = _faceTrackingController.Process(input); output = result; if (result.IsDetected) { foreach (var face in result.Faces) { bgrImage.Draw(face.Region, new Bgr(Color.Yellow), 2); } DrawReticle(bgrImage, result.Target, Color.Yellow); } statusAccumulation.AppendFormat("{0} faces detected", result.Faces.Count); } if (chkBoxMotionTracking.Checked) { // WriteText(bgrImage, _captureConfig.Resolution.Height - 75, "Motion Tracking"); var result = _motionTrackingController.Process(input); output = result; if (result.IsDetected) { foreach (var motionSection in result.MotionSections) { bgrImage.Draw(motionSection.Region, new Bgr(Color.Green)); } if (result.TargetedMotion != null) { bgrImage.Draw(result.TargetedMotion.Region, new Bgr(Color.Red), 2); } } statusAccumulation.AppendFormat("{0} motions", result.MotionSections.Count); imageBoxFiltered.Image = result.ForegroundImage; } if (chkMultimode.Checked) { var multimodeOutput = _multimodePanTiltController.Process(input); output = multimodeOutput; if (output.Target != Point.Empty) { DrawReticle(bgrImage, output.Target, Color.Yellow); } } if (output != null) { if (output.IsServoInMotion) { statusAccumulation.AppendFormat(", Waiting for servo"); } else { statusAccumulation.AppendFormat(", tracking took {0}", output.Elapsed.ToHumanReadable()); } NotifyStatus(statusAccumulation.ToString()); } imageBoxCaptured.Image = bgrImage; } }
protected override CameraPanTiltProcessOutput DoProcess(CameraProcessInput input) { var output = new CameraPanTiltProcessOutput(); ProcessingMode nextState = State; switch (State) { case ProcessingMode.ColourObjectTrack: var colourOutput = ProcessColour(input); output = colourOutput; if (Ticks % 60 == 0) // provide some feedback on moment size but don't spam { _screen.WriteLine(colourOutput.ToString()); } nextState = _colourTrackManager.AcceptOutput(colourOutput); break; case ProcessingMode.FaceDetection: var faceTrackOutput = _faceTrackingController.Process(input); nextState = _faceTrackManager.AcceptOutput(faceTrackOutput); output = faceTrackOutput; break; case ProcessingMode.CamshiftTrack: var camshiftOutput = _camshiftTrackingController.Process(input); if (camshiftOutput.Target == Point.Empty) { SetMode(ProcessingMode.Autonomous); } output = camshiftOutput; break; case ProcessingMode.ColourObjectSelect: _screen.WriteLine($"Threshold training for {_thresholdSelector.RequiredMomentAreaInRoiPercent}% ROI coverage"); var thresholdSettings = _thresholdSelector.Select(input.Captured, _regionOfInterest); _screen.WriteLine($"Threshold tuning complete: {thresholdSettings}"); _colourDetectorInput.SetCapturedImage = true; _colourDetectorInput.Settings.Accept(thresholdSettings); _isColourTrained = true; nextState = ProcessingMode.ColourObjectTrack; break; case ProcessingMode.Autonomous: nextState = _autonomousManager.AcceptInput(input); if (nextState == ProcessingMode.ColourObjectTrack) { } break; case ProcessingMode.CamshiftSelect: throw new NotImplementedException(); } if (output.CapturedImage == null) { output.CapturedImage = input.Captured.ToImage <Bgr, byte>(); } ProcessOutputPipeline(output); if (nextState != State) { _screen.WriteLine($"Changing to {nextState}"); State = nextState; switch (nextState) { case ProcessingMode.Autonomous: _autonomousManager.Reset(); // Reset the timers break; case ProcessingMode.ColourObjectTrack: _screen.WriteLine($"Color detector settings: {_colourDetectorInput.Settings}"); break; } } return(output); }
protected abstract TOutput DoProcess(CameraProcessInput input);
public override void ImageGrabbedHandler(object sender, EventArgs e) { if (_calibrationInProgress) { return; } using (var matCaptured = new Mat()) { CameraCapture.Retrieve(matCaptured); var statusAccumulation = new StringBuilder(); var bgrImage = matCaptured.ToImage<Bgr, byte>(); DrawReticle(bgrImage, _centre, Color.Red); if (UserReticle != null) { DrawReticle(bgrImage, UserReticle.Value, Color.Green); } var input = new CameraProcessInput(); input.SetCapturedImage = true; input.Captured = matCaptured; CameraPanTiltProcessOutput output = null; if (chkBoxColourTracking.Checked) { var result = _colourTrackingController.Process(input); output = result; if (result.IsDetected) { DrawReticle(bgrImage, result.Target, Color.Yellow); } imageBoxFiltered.Image = result.ThresholdImage; statusAccumulation.AppendFormat("{0} moment area", result.MomentArea); // WriteText(bgrImage, _captureConfig.Resolution.Height - 10, "Colour Tracking"); } if (chkBoxFaceTracker.Checked) { // WriteText(bgrImage, _captureConfig.Resolution.Height - 50, "Face Tracking"); var result = _faceTrackingController.Process(input); output = result; if (result.IsDetected) { foreach (var face in result.Faces) { bgrImage.Draw(face.Region, new Bgr(Color.Yellow), 2); } DrawReticle(bgrImage, result.Target, Color.Yellow); } statusAccumulation.AppendFormat("{0} faces detected", result.Faces.Count); } if (chkBoxMotionTracking.Checked) { // WriteText(bgrImage, _captureConfig.Resolution.Height - 75, "Motion Tracking"); var result = _motionTrackingController.Process(input); output = result; if (result.IsDetected) { foreach (var motionSection in result.MotionSections) { bgrImage.Draw(motionSection.Region, new Bgr(Color.Green)); } if (result.TargetedMotion != null) { bgrImage.Draw(result.TargetedMotion.Region, new Bgr(Color.Red), 2); } } statusAccumulation.AppendFormat("{0} motions", result.MotionSections.Count); imageBoxFiltered.Image = result.ForegroundImage; } if (chkMultimode.Checked) { var multimodeOutput = _multimodePanTiltController.Process(input); output = multimodeOutput; if (output.Target != Point.Empty) { DrawReticle(bgrImage, output.Target, Color.Yellow); } } if (output != null) { if (output.IsServoInMotion) { statusAccumulation.AppendFormat(", Waiting for servo"); } else { statusAccumulation.AppendFormat(", tracking took {0}", output.Elapsed.ToHumanReadable()); } NotifyStatus(statusAccumulation.ToString()); } imageBoxCaptured.Image = bgrImage; } }
protected override CameraPanTiltProcessOutput DoProcess(CameraProcessInput input) { var output = new CameraPanTiltProcessOutput(); ProcessingMode nextState = State; switch (State) { case ProcessingMode.Static: var staticHackOutput = new StaticTrackingPanTiltOutput(); nextState = _staticManager.AcceptOutput(staticHackOutput); break; case ProcessingMode.ColourTrackFromFileSettings: _colourDetectorInput.Settings = _colourSettingsRepository.Read(); _screen.WriteLine($"Read colour settings {_colourDetectorInput.Settings}"); nextState = ProcessingMode.ColourObjectTrack; break; case ProcessingMode.ColourObjectTrack: var colourOutput = ProcessColour(input); output = colourOutput; if (Ticks % (90 * 3) == 0) // provide some feedback on moment size but don't spam { _screen.WriteLine("colTrack:" + colourOutput); } nextState = _colourTrackManager.AcceptOutput(colourOutput); break; case ProcessingMode.FaceDetection: var faceTrackOutput = _faceTrackingController.Process(input); nextState = _faceTrackManager.AcceptOutput(faceTrackOutput); output = faceTrackOutput; break; //case ProcessingMode.CamshiftTrack: // var camshiftOutput = _camshiftTrackingController.Process(input); // if (camshiftOutput.Target == Point.Empty) // { // SetMode(ProcessingMode.Autonomous); // } // output = camshiftOutput; // break; case ProcessingMode.ColourObjectSelect: _screen.WriteLine($"Threshold training for {_thresholdSelector.RequiredMomentAreaInRoiPercent}% ROI coverage"); var thresholdSettings = _thresholdSelector.Select(input.Captured, _regionOfInterest); _screen.WriteLine($"Threshold tuning complete: {thresholdSettings}"); _colourDetectorInput.SetCapturedImage = true; _colourDetectorInput.Settings.MomentArea = new RangeF(50, 10000); _colourDetectorInput.Settings.Accept(thresholdSettings); //_isColourTrained = true; nextState = ProcessingMode.ColourObjectTrack; break; case ProcessingMode.Autonomous: nextState = _autonomousManager.AcceptInput(input); if (nextState == ProcessingMode.ColourObjectTrack) { } break; case ProcessingMode.CamshiftSelect: throw new NotImplementedException(); } if (output.CapturedImage == null) { output.CapturedImage = input.Captured.ToImage <Bgr, byte>(); } ProcessOutputPipeline(output); if (_forcedNextState != ProcessingMode.Unknown) { nextState = _forcedNextState; _forcedNextState = ProcessingMode.Unknown; } if (nextState != State) { _screen.WriteLine($"Changing {State} to {nextState}"); switch (nextState) { case ProcessingMode.Static: _staticManager.Reset(); break; case ProcessingMode.Autonomous: if (State == ProcessingMode.FaceDetection) // coming out of face detection { SoundService.PlayAsync("cant-see-you.wav"); } MoveAbsolute(50, 60); _autonomousManager.Reset(); // Reset the timers break; case ProcessingMode.ColourObjectTrack: _colourTrackManager.Reset(); _screen.WriteLine($"Color detector settings: {_colourDetectorInput.Settings}"); SoundService.PlayAsync("color-tracking.wav"); break; case ProcessingMode.FaceDetection: _faceTrackManager.Reset(); SoundService.PlayAsync("face-tracking.wav"); _screen.WriteLine(ClassifierParams.ToString()); break; } State = nextState; } return(output); }