protected override CameraPanTiltProcessOutput DoProcess(CameraProcessInput input) { var output = new CameraPanTiltProcessOutput(); ProcessingMode nextState = State; switch (State) { case ProcessingMode.ColourObjectTrack: var colourOutput = ProcessColour(input); output = colourOutput; if (Ticks % 60 == 0) // provide some feedback on moment size but don't spam { _screen.WriteLine(colourOutput.ToString()); } nextState = _colourTrackManager.AcceptOutput(colourOutput); break; case ProcessingMode.FaceDetection: var faceTrackOutput = _faceTrackingController.Process(input); nextState = _faceTrackManager.AcceptOutput(faceTrackOutput); output = faceTrackOutput; break; case ProcessingMode.CamshiftTrack: var camshiftOutput = _camshiftTrackingController.Process(input); if (camshiftOutput.Target == Point.Empty) { SetMode(ProcessingMode.Autonomous); } output = camshiftOutput; break; case ProcessingMode.ColourObjectSelect: _screen.WriteLine($"Threshold training for {_thresholdSelector.RequiredMomentAreaInRoiPercent}% ROI coverage"); var thresholdSettings = _thresholdSelector.Select(input.Captured, _regionOfInterest); _screen.WriteLine($"Threshold tuning complete: {thresholdSettings}"); _colourDetectorInput.SetCapturedImage = true; _colourDetectorInput.Settings.Accept(thresholdSettings); _isColourTrained = true; nextState = ProcessingMode.ColourObjectTrack; break; case ProcessingMode.Autonomous: nextState = _autonomousManager.AcceptInput(input); if (nextState == ProcessingMode.ColourObjectTrack) { } break; case ProcessingMode.CamshiftSelect: throw new NotImplementedException(); } if (output.CapturedImage == null) { output.CapturedImage = input.Captured.ToImage <Bgr, byte>(); } ProcessOutputPipeline(output); if (nextState != State) { _screen.WriteLine($"Changing to {nextState}"); State = nextState; switch (nextState) { case ProcessingMode.Autonomous: _autonomousManager.Reset(); // Reset the timers break; case ProcessingMode.ColourObjectTrack: _screen.WriteLine($"Color detector settings: {_colourDetectorInput.Settings}"); break; } } return(output); }
protected override CameraPanTiltProcessOutput DoProcess(CameraProcessInput input) { var output = new CameraPanTiltProcessOutput(); ProcessingMode nextState = State; switch (State) { case ProcessingMode.Static: var staticHackOutput = new StaticTrackingPanTiltOutput(); nextState = _staticManager.AcceptOutput(staticHackOutput); break; case ProcessingMode.ColourTrackFromFileSettings: _colourDetectorInput.Settings = _colourSettingsRepository.Read(); _screen.WriteLine($"Read colour settings {_colourDetectorInput.Settings}"); nextState = ProcessingMode.ColourObjectTrack; break; case ProcessingMode.ColourObjectTrack: var colourOutput = ProcessColour(input); output = colourOutput; if (Ticks % (90 * 3) == 0) // provide some feedback on moment size but don't spam { _screen.WriteLine("colTrack:" + colourOutput); } nextState = _colourTrackManager.AcceptOutput(colourOutput); break; case ProcessingMode.FaceDetection: var faceTrackOutput = _faceTrackingController.Process(input); nextState = _faceTrackManager.AcceptOutput(faceTrackOutput); output = faceTrackOutput; break; //case ProcessingMode.CamshiftTrack: // var camshiftOutput = _camshiftTrackingController.Process(input); // if (camshiftOutput.Target == Point.Empty) // { // SetMode(ProcessingMode.Autonomous); // } // output = camshiftOutput; // break; case ProcessingMode.ColourObjectSelect: _screen.WriteLine($"Threshold training for {_thresholdSelector.RequiredMomentAreaInRoiPercent}% ROI coverage"); var thresholdSettings = _thresholdSelector.Select(input.Captured, _regionOfInterest); _screen.WriteLine($"Threshold tuning complete: {thresholdSettings}"); _colourDetectorInput.SetCapturedImage = true; _colourDetectorInput.Settings.MomentArea = new RangeF(50, 10000); _colourDetectorInput.Settings.Accept(thresholdSettings); //_isColourTrained = true; nextState = ProcessingMode.ColourObjectTrack; break; case ProcessingMode.Autonomous: nextState = _autonomousManager.AcceptInput(input); if (nextState == ProcessingMode.ColourObjectTrack) { } break; case ProcessingMode.CamshiftSelect: throw new NotImplementedException(); } if (output.CapturedImage == null) { output.CapturedImage = input.Captured.ToImage <Bgr, byte>(); } ProcessOutputPipeline(output); if (_forcedNextState != ProcessingMode.Unknown) { nextState = _forcedNextState; _forcedNextState = ProcessingMode.Unknown; } if (nextState != State) { _screen.WriteLine($"Changing {State} to {nextState}"); switch (nextState) { case ProcessingMode.Static: _staticManager.Reset(); break; case ProcessingMode.Autonomous: if (State == ProcessingMode.FaceDetection) // coming out of face detection { SoundService.PlayAsync("cant-see-you.wav"); } MoveAbsolute(50, 60); _autonomousManager.Reset(); // Reset the timers break; case ProcessingMode.ColourObjectTrack: _colourTrackManager.Reset(); _screen.WriteLine($"Color detector settings: {_colourDetectorInput.Settings}"); SoundService.PlayAsync("color-tracking.wav"); break; case ProcessingMode.FaceDetection: _faceTrackManager.Reset(); SoundService.PlayAsync("face-tracking.wav"); _screen.WriteLine(ClassifierParams.ToString()); break; } State = nextState; } return(output); }