void StopTracking() { handTracker = null; if (recogEngine != null) { recogEngine.Dispose(); } recogEngine = null; }
void ResetGestureEngine() { if (recogEngine == null) { recogEngine = new GestureRecognitionEngine(modelSelector.SelectedModel); } else { recogEngine.Reset(); } }
/// <summary> /// Starts the tracking. /// </summary> private void StartTracking() { if (this.sensor != null) { this.sensor.SkeletonStream.Enable(); this.sensor.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(sensor_SkeletonFrameReady); //if (gesture_recognize == true) recognitionEngine = new GestureRecognitionEngine(); recognitionEngine.GestureType = GestureType.HandsClapping; recognitionEngine.GestureRecognized += new EventHandler<GestureEventArgs>(recognitionEngine_GestureRecognized); kinectSoundPlayer = new SoundPlayer("Clap.wav"); if(speech_recognize==true) { speechEngine.SpeechRecognized += SpeechRecognized; speechEngine.SpeechRecognitionRejected += SpeechRejected; speechEngine.SetInputToAudioStream( sensor.AudioSource.Start(), new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); speechEngine.RecognizeAsync(RecognizeMode.Multiple); } } buttonStartTracking.IsEnabled = false; buttonPlayRecording.IsEnabled = true; buttonStopTracking.IsEnabled = true; }
private void Window_Loaded(object sender, RoutedEventArgs e) { PopulatePoseLibrary(); LoadImages(); blackImg.Visibility = System.Windows.Visibility.Hidden; upRightArm.Visibility = System.Windows.Visibility.Hidden; upLeftArm.Visibility = System.Windows.Visibility.Hidden; lowRightArm.Visibility = System.Windows.Visibility.Hidden; lowLeftArm.Visibility = System.Windows.Visibility.Hidden; beamAngleTxt.Visibility = System.Windows.Visibility.Hidden; soundSourceAngleTxt.Visibility = System.Windows.Visibility.Hidden; recognizedColorTxt.Visibility = System.Windows.Visibility.Hidden; TBCountDown.Visibility = System.Windows.Visibility.Hidden; hud.Visibility = System.Windows.Visibility.Hidden; hudRed.Visibility = System.Windows.Visibility.Hidden; if (KinectSensor.KinectSensors.Count == 0) { MessageBox.Show("No Kinects detected", "Depth Sensor Basics"); Application.Current.Shutdown(); } else { sensor = KinectSensor.KinectSensors[0]; if (sensor == null) { MessageBox.Show("Kinect is not ready to use", "Depth Sensor Basics"); Application.Current.Shutdown(); } } // ------------------------------------------------------- // color sensor.ColorStream.Enable(); // allocate storage for color data colorData = new byte[sensor.ColorStream.FramePixelDataLength]; // create an empty bitmap with the same size as color frame colorImageBitmap = new WriteableBitmap( sensor.ColorStream.FrameWidth, sensor.ColorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); colorImg.Source = colorImageBitmap; // register an event handler sensor.ColorFrameReady += new EventHandler <ColorImageFrameReadyEventArgs>(sensor_ColorFrameReady); // skeleton stream sensor.SkeletonStream.Enable(); sensor.SkeletonFrameReady += new EventHandler <SkeletonFrameReadyEventArgs>(sensor_SkeletonFrameReady); skeletons = new Skeleton[sensor.SkeletonStream.FrameSkeletonArrayLength]; // ------------------------------------------------------- // Create the drawing group we'll use for drawing drawingGroup = new DrawingGroup(); // Create an image source that we can use in our image control drawingImg = new DrawingImage(drawingGroup); // Display the drawing using our image control skeletonImg.Source = drawingImg; // prevent drawing outside of our render area drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, 640, 480)); // start the kinect sensor.Start(); //gesture setup----------------------------------------------------------------------------------- recognitionEngine = new GestureRecognitionEngine(); recognitionEngine.AddGesture(new SwipeToLeftGesture()); recognitionEngine.AddGesture(new SwipeToRightGesture()); recognitionEngine.AddGesture(new ClapGesture()); recognitionEngine.GestureRecognized += new EventHandler <GestureEventArgs>(recognitionEngine_GestureRecognized); //audio source-------------------------------------------------------------------------------- sensor.AudioSource.SoundSourceAngleChanged += new EventHandler <SoundSourceAngleChangedEventArgs>(AudioSource_SoundSourceAngleChanged); sensor.AudioSource.BeamAngleChanged += new EventHandler <BeamAngleChangedEventArgs>(AudioSource_BeamAngleChanged); kinectRecognizerInfo = findKinectRecognizerInfo(); if (kinectRecognizerInfo != null) { recognizer = new SpeechRecognitionEngine(kinectRecognizerInfo); } buildCommands(); // selects the beam angle using custom-written software // This gives the best results sensor.AudioSource.BeamAngleMode = BeamAngleMode.Adaptive; System.IO.Stream audioStream = sensor.AudioSource.Start(); recognizer.SetInputToAudioStream(audioStream, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); recognizer.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(recognizer_SpeechRecognized); // recognize words repeatedly and asynchronously recognizer.RecognizeAsync(RecognizeMode.Multiple); // clean up previously stored photos System.IO.DirectoryInfo di = new DirectoryInfo("photos"); foreach (FileInfo file in di.GetFiles()) { file.Delete(); } foreach (DirectoryInfo dir in di.GetDirectories()) { dir.Delete(true); } Timer = new DispatcherTimer(); Timer.Interval = new TimeSpan(0, 0, 1); Timer.Tick += Timer_Tick; //Timer.Start(); }