// Polls the servo hardware to determine if the servos are still moving. Returns when all // servos in servoPositionList have completed their moves. private void WaitForMoveComplete(List <ServoPosition> servoPositionList, double timeToDestination) { // Create a list of servos to monitor. List <Servo> servosToMonitor = new List <Servo>(); foreach (ServoPosition servoPosition in servoPositionList) { Servo servo = servoList.Find(x => x.index == servoPosition.index); if (servo == null) { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Error, "WaitForMoveComplete failed, servo " + servoPosition.index.ToString() + " not found."); return; } servosToMonitor.Add(servo); } // Poll servo positions and wait until all servos reach their destinations. double pollTimeout = timeToDestination + pollTimeoutAdjustment; int pollTimeoutCount = (int)(pollTimeout * 1000 / (double)pollPeriod_ms); int currentPollCount = 0; while (true) { if (!IsEnabled()) { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Warning, "WaitForMoveComplete interrupted, servos are disabled."); return; } if (currentPollCount >= pollTimeoutCount) { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Warning, "WaitForMoveComplete timeout, servos failed to reach destination in " + pollTimeout.ToString() + " seconds."); return; } currentPollCount++; UpdateServoValues(); // Determine if any servos in the list are still moving. bool servoIsMoving = false; foreach (Servo servo in servosToMonitor) { if (servo.isMoving) { servoIsMoving = true; } } if (!servoIsMoving) { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Debug, "WaitForMoveComplete succeeded, all servos reached destinations."); return; } Thread.Sleep(pollPeriod_ms); } }
// Connect to the Kinect sensor and begin processing events. public void InitializeKinect() { this.sensor = null; // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit (See components in Toolkit Browser). foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; // Connect to first Kinect. } } if (null == this.sensor) { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Error, "InitializeKinect() failed, not connected to Kinect sensor."); return; } else { // Turn on the skeleton stream to receive skeleton frames this.sensor.SkeletonStream.Enable(); // Add an event handler to be called whenever there is new color frame data SkeletonFrameReady += SensorSkeletonFrameReady; // Start the sensor! try { this.sensor.Start(); } catch (IOException) { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Error, "InitializeKinect() failed, unable to Start Kinect sensor."); this.sensor = null; return; } } RecognizerInfo ri = GetKinectRecognizer(); if (null != ri) { this.speechEngine = new SpeechRecognitionEngine(ri.Id); /**************************************************************** * * Use this code to create grammar programmatically rather than from * a grammar file. * * var directions = new Choices(); * directions.Add(new SemanticResultValue("forward", "FORWARD")); * directions.Add(new SemanticResultValue("forwards", "FORWARD")); * directions.Add(new SemanticResultValue("straight", "FORWARD")); * directions.Add(new SemanticResultValue("backward", "BACKWARD")); * directions.Add(new SemanticResultValue("backwards", "BACKWARD")); * directions.Add(new SemanticResultValue("back", "BACKWARD")); * directions.Add(new SemanticResultValue("turn left", "LEFT")); * directions.Add(new SemanticResultValue("turn right", "RIGHT")); * * var gb = new GrammarBuilder { Culture = ri.Culture }; * gb.Append(directions); * * var g = new Grammar(gb); * ****************************************************************/ // Create a grammar from grammar definition XML file. using (var memoryStream = new MemoryStream(Encoding.ASCII.GetBytes(Properties.Resources.SpeechGrammar))) { var g = new Grammar(memoryStream); speechEngine.LoadGrammar(g); } speechEngine.SpeechRecognized += SpeechRecognized; speechEngine.SpeechRecognitionRejected += SpeechRejected; sequenceProcessor.SpeakStarted += SpeakStarted; sequenceProcessor.SpeakCompleted += SpeakCompleted; // For long recognition sessions (a few hours or more), it may be beneficial to turn off adaptation of the acoustic model. // This will prevent recognition accuracy from degrading over time. ////speechEngine.UpdateRecognizerSetting("AdaptationOn", 0); speechEngine.SetInputToAudioStream( sensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); speechEngine.RecognizeAsync(RecognizeMode.Multiple); } else { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Error, "InitializeKinect() failed, no speech recognizer."); } ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Info, "InitializeKinect() succeeded, Kinect sensor is ready."); }
// Reads the servo status from the servo controller hardware, and then updates the // servoList with these new polled values. Also tracks whether each servo is currently // moving by comparing their current and target positions. public void UpdateServoValues() { if (!IsConnected()) { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Error, "UpdateServoValues() failed, not connected to servo hardware."); return; } if (servoList.Count == 0) { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Error, "UpdateServoValues() failed, no servos have been defined."); return; } try { // Get the servo parameters from the hardware. ServoStatus[] servoStatusArray; long servoCount; lock (uscLock) { if (uscDevice == null) { throw new System.Exception("uscDevice is null"); } uscDevice.getVariables(out servoStatusArray); servoCount = uscDevice.servoCount; } // Update the servoList with these parameters. foreach (Servo servo in servoList) { if (servo.index < 0 || servo.index >= servoCount) { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Error, "UpdateServoValues() failed, servo index out of range. Servo index = " + servo.index.ToString()); } else { servo.polledPosition = servoStatusArray[servo.index].position; servo.polledTarget = servoStatusArray[servo.index].target; servo.polledSpeed = servoStatusArray[servo.index].speed; servo.polledAcceleration = servoStatusArray[servo.index].acceleration; ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Debug, "Servo " + servo.index.ToString() + ": Target = " + servo.polledTarget.ToString() + ", Position = " + servo.polledPosition.ToString() + ", Speed = " + servo.polledSpeed.ToString() + ", Acceleration = " + servo.polledAcceleration.ToString()); if (servo.isMoving == false && servo.polledTarget != servo.polledPosition) { // Servo has started moving. ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Debug, "Servo " + servo.index + " has started moving from " + servo.polledPosition.ToString() + " to " + servo.polledTarget.ToString()); servo.isMoving = true; } else if (servo.isMoving == true && servo.polledTarget == servo.polledPosition) { // Servo has stopped moving. ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Debug, "Servo " + servo.index + " has stopped moving at " + servo.polledPosition.ToString()); servo.isMoving = false; } if (servo.isMoving) { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Debug, "Servo " + servo.index + " is at position " + servo.polledPosition.ToString()); } } } } catch (System.Exception ex) { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Error, "Caught exception in UpdateServoValues(): " + ex.Message); } }
/// <summary> /// Handler for recognized speech events. /// </summary> /// <param name="sender">object sending the event.</param> /// <param name="e">event arguments.</param> private void SpeechRecognized(object sender, SpeechRecognizedEventArgs e) { if (!speechRecognitionEnabled) { return; } // Check if the speech synthesizer is already speaking. Ignore any detected // speech during this time, since it might have been generated by the synthesizer. lock (speechLock) { if (synthesizerIsSpeaking) { ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Warning, "SpeechRecognized: Ignored word '" + e.Result.Text + "', since synthesizer is speaking."); return; } } ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Info, "SpeechRecognized: Detected word '" + e.Result.Text + "' with confidence " + e.Result.Confidence.ToString()); // Speech utterance confidence below which we treat speech as if it hadn't been heard const double ConfidenceThreshold = 0.5; if (e.Result.Confidence >= ConfidenceThreshold) { string sequenceName = ""; // Perform the specified Sequence depending on the recognized speech. switch (e.Result.Semantics.Value.ToString()) { case "Hello": sequenceName = "Hello"; break; case "PlayHarp": sequenceName = "PlayHarp"; break; case "PLAYMUSIC": sequenceName = "PLAYMUSIC"; break; case "Count": sequenceName = "Count"; break; case "NAME": sequenceName = "NAME"; break; case "DEFAULT": sequenceName = "DEFAULT"; break; } if (sequenceName != "") { sequenceProcessor.RunSequence(sequenceName); } } }