/// <summary> /// Execute initialization tasks. /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void WindowLoaded(object sender, RoutedEventArgs e) { // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { try { // Start the sensor! this.sensor.Start(); leftHandUp = new LeftHandUp(); rightHandUp = new RightHandUp(); swipeLeftSeg2 = new SwipeLeftSegment2(); swipeRightSeg2 = new SwipeRightSegment2(); leftHandForward = new LeftHandForward(); rightHandForward = new RightHandForward(); swipeDown = new SwipeDown(); sensor.SkeletonStream.Enable(); myProcess = Process.Start(@"C:\Users\Nocturnal\Dropbox\FrontiersII\CelX Script.celx"); } catch (IOException) { // Some other application is streaming from the same Kinect sensor this.sensor = null; } } if (null == this.sensor) { this.statusBarText.Text = Properties.Resources.NoKinectReady; return; } RecognizerInfo ri = GetKinectRecognizer(); if (null != ri) { this.speechEngine = new SpeechRecognitionEngine(ri.Id); // Create a grammar from grammar definition XML file. using (var memoryStream = new MemoryStream(Encoding.ASCII.GetBytes(Properties.Resources.SpeechGrammar))) { var g = new Grammar(memoryStream); speechEngine.LoadGrammar(g); } speechEngine.SpeechRecognized += SpeechRecognized; speechEngine.SpeechRecognitionRejected += SpeechRejected; speechEngine.SetInputToAudioStream( sensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); speechEngine.RecognizeAsync(RecognizeMode.Multiple); } else { this.statusBarText.Text = Properties.Resources.NoSpeechRecognizer; } sensor.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(sensor_AllFramesReady); }
private void DefineGestures() { //swipe right IRelativeGestureSegment[] swipeRightSegments = new IRelativeGestureSegment[4]; SwipeRightSegment1 swipeRightSegment1 = new SwipeRightSegment1(); SwipeRightSegment2 swipeRightSegment2 = new SwipeRightSegment2(); RightHandUp rightHandUp = new RightHandUp(); swipeRightSegments[0] = rightHandUp; swipeRightSegments[1] = swipeRightSegment1; swipeRightSegments[2] = rightHandUp; swipeRightSegments[3] = swipeRightSegment2; this.gestures.AddGesture(GestureType.RightSwipe, swipeRightSegments); //swipe left IRelativeGestureSegment[] swipeLeftSegments = new IRelativeGestureSegment[4]; SwipeLeftSegment1 swipeLeftSegment1 = new SwipeLeftSegment1(); SwipeLeftSegment2 swipeLeftSegment2 = new SwipeLeftSegment2(); LeftHandUp leftHandUp = new LeftHandUp(); swipeLeftSegments[0] = leftHandUp; swipeLeftSegments[1] = swipeLeftSegment1; swipeLeftSegments[2] = leftHandUp; swipeLeftSegments[3] = swipeLeftSegment2; this.gestures.AddGesture(GestureType.LeftSwipe, swipeLeftSegments); //swipe down IRelativeGestureSegment[] swipeDownSegments = new IRelativeGestureSegment[1]; SwipeDown swipeDown = new SwipeDown(); swipeDownSegments[0] = swipeDown; this.gestures.AddGesture(GestureType.SwipeDown, swipeDownSegments); //Right up IRelativeGestureSegment[] rightUpSegments = new IRelativeGestureSegment[1]; RightHandUp rightUp = new RightHandUp(); rightUpSegments[0] = rightUp; this.gestures.AddGesture(GestureType.RightUp, rightUpSegments); }
/// <summary> /// Execute initialization tasks. /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void WindowLoaded(object sender, RoutedEventArgs e) { // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { try { // Start the sensor! this.sensor.Start(); leftHandUp = new LeftHandUp(); rightHandUp = new RightHandUp(); swipeLeftSeg1 = new SwipeLeftSegment1(); swipeLeftSeg2 = new SwipeLeftSegment2(); swipeRightSeg1 = new SwipeRightSegment1(); swipeRightSeg2 = new SwipeRightSegment2(); leftHandForward = new LeftHandForward(); rightHandForward = new RightHandForward(); swipeDown = new SwipeDown(); viewModel = new SkeletonViewModel(); viewModel.GestureRecognized += new EventHandler<GestureEventArgs>(this.GestureRecognized); sensor.SkeletonStream.Enable(); } catch (IOException) { // Some other application is streaming from the same Kinect sensor this.sensor = null; } } if (null == this.sensor) { this.statusBarText.Text = Properties.Resources.NoKinectReady; return; } RecognizerInfo ri = GetKinectRecognizer(); if (null != ri) { recognitionSpans = new List<Span> { mercurySpan, venusSpan, earthSpan, marsSpan, jupiterSpan, saturnSpan, uranusSpan, neptuneSpan, plutoSpan, sunSpan }; this.speechEngine = new SpeechRecognitionEngine(ri.Id); /**************************************************************** * * Use this code to create grammar programmatically rather than from * a grammar file. * * var directions = new Choices(); * directions.Add(new SemanticResultValue("forward", "FORWARD")); * directions.Add(new SemanticResultValue("forwards", "FORWARD")); * directions.Add(new SemanticResultValue("straight", "FORWARD")); * directions.Add(new SemanticResultValue("backward", "BACKWARD")); * directions.Add(new SemanticResultValue("backwards", "BACKWARD")); * directions.Add(new SemanticResultValue("back", "BACKWARD")); * directions.Add(new SemanticResultValue("turn left", "LEFT")); * directions.Add(new SemanticResultValue("turn right", "RIGHT")); * * var gb = new GrammarBuilder { Culture = ri.Culture }; * gb.Append(directions); * * var g = new Grammar(gb); * ****************************************************************/ // Create a grammar from grammar definition XML file. using (var memoryStream = new MemoryStream(Encoding.ASCII.GetBytes(Properties.Resources.SpeechGrammar))) { var g = new Grammar(memoryStream); speechEngine.LoadGrammar(g); } speechEngine.SpeechRecognized += SpeechRecognized; speechEngine.SpeechRecognitionRejected += SpeechRejected; speechEngine.SetInputToAudioStream( sensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); speechEngine.RecognizeAsync(RecognizeMode.Multiple); } else { this.statusBarText.Text = Properties.Resources.NoSpeechRecognizer; } sensor.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(sensor_AllFramesReady); }
/// <summary> /// Execute initialization tasks. /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void WindowLoaded(object sender, RoutedEventArgs e) { writer.WriteLine("--------------------------------"); writer.WriteLine(DateTime.Now.ToString()); // Look through all sensors and start the first connected one. // This requires that a Kinect is connected at the time of app startup. // To make your app robust against plug/unplug, // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit writer.WriteLine("Finding potential sensors..."); foreach (var potentialSensor in KinectSensor.KinectSensors) { writer.WriteLine("\tSensor Found: " + potentialSensor.ToString()); writer.WriteLine("\tSensor Status: " + potentialSensor.Status); if ((potentialSensor.Status).Equals(KinectStatus.Connected)) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { try { writer.WriteLine("Starting sensor..."); // Start the sensor! this.sensor.Start(); if (this.sensor.IsRunning) { writer.WriteLine("Sensor running successfully"); } else { writer.WriteLine("WARNING: Sensor not running correctly!"); } writer.WriteLine("Loading Hand Actions..."); leftHandUp = new LeftHandUp(); rightHandUp = new RightHandUp(); swipeLeftSeg2 = new SwipeLeftSegment2(); swipeRightSeg2 = new SwipeRightSegment2(); leftHandForward = new LeftHandForward(); rightHandForward = new RightHandForward(); writer.WriteLine("Hand action load complete"); swipeDown = new SwipeDown(); sensor.SkeletonStream.Enable(); if (sensor.SkeletonStream.IsEnabled) { writer.WriteLine("Sensor Skeleton Stream enabled"); } else { writer.WriteLine("WARNING: Sensor Skeleton Stream not enabled correctly"); } writer.WriteLine("Starting Celestia script at " + DateTime.Now.ToString() + "..."); //myProcess = Process.Start(@"C:\Users\Nocturnal\Dropbox\FrontiersII\CelX Script.celx"); //myProcess = Process.Start(@"C:\Users\Noonan11\Dropbox\FrontiersII\CelX Script.celx"); myProcess = Process.Start(@"C:\Users\Jake Noel-Storr\Dropbox\FrontiersII\CelX Script.celx"); writer.WriteLine("Celestia Process started"); } catch (IOException exception) { writer.WriteLine("ERROR: " + exception.ToString()); // Some other application is streaming from the same Kinect sensor this.sensor = null; } finally { writer.Flush(); } } else // Sensor is null { writer.WriteLine("ERROR line 200 MainWindow.xaml.cs::: Sensor is null"); this.statusBarText.Text = Properties.Resources.NoKinectReady; writer.Flush(); return; } RecognizerInfo ri = GetKinectRecognizer(); if (null != ri) { this.speechEngine = new SpeechRecognitionEngine(ri.Id); // Create a grammar from grammar definition XML file. using (var memoryStream = new MemoryStream(Encoding.ASCII.GetBytes(Properties.Resources.SpeechGrammar))) { writer.WriteLine("\nCreating speech grammar..."); writer.Flush(); var g = new Grammar(memoryStream); speechEngine.LoadGrammar(g); // SYSTEM STALLS ON THIS LINE -ROSS writer.WriteLine("Speech grammar created"); writer.Flush(); } speechEngine.SpeechRecognized += SpeechRecognized; speechEngine.SpeechRecognitionRejected += SpeechRejected; speechEngine.SetInputToAudioStream( sensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); speechEngine.RecognizeAsync(RecognizeMode.Multiple); writer.WriteLine("Speech recognizer created"); writer.Flush(); } else { writer.WriteLine("ERROR: Speech recognizer not enabled"); this.statusBarText.Text = Properties.Resources.NoSpeechRecognizer; } writer.Flush(); sensor.AllFramesReady += new EventHandler<AllFramesReadyEventArgs>(sensor_AllFramesReady); }