/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; } // load all gestures from the gesture database using (var database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { this.vgbFrameSource.AddGestures(database.AvailableGestures); } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> public MainWindow() { // initialize the MainWindow this.InitializeComponent(); // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the GestureDetector object this.gestureResultView = new GestureResultView(false, false, false, 0.0f, 0.0f, -1.0f); this.gestureDetector = new GestureDetector(this.kinectSensor, this.gestureResultView); this.gestureResultGrid.DataContext = this.gestureResultView; // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; using (MasterEntities db = new MasterEntities()) { //Retrieve data from stored procedure var data = db.TodayMob(); foreach (var x in data) { this.today_mobility.Text = Convert.ToString(x.Minute.Value) + " minutes"; } //Retrieve data from stored procedure var liters = db.TodayHydratation(); foreach (var x in liters) { this.today_hydratation.Text = Convert.ToString(x.Liters.Value) + " Liters"; } //Retrieve data from stored procedure var alarms = db.GetTotalAlarms(); foreach (var x in alarms) { this.totalAlarms.Text = Convert.ToString(x.Value); } } //SPEECH RECOGNITION // grab the audio stream IReadOnlyList <AudioBeam> audioBeamList = this.kinectSensor.AudioSource.AudioBeams; System.IO.Stream audioStream = audioBeamList[0].OpenInputStream(); // create the convert stream this.kinectAudioStream = new KinectAudioStream(audioStream); RecognizerInfo ri = TryGetKinectRecognizer(); if (null != ri) { this.speechEngine = new SpeechRecognitionEngine(ri.Id); Choices commands = new Choices(); commands.Add(new SemanticResultValue("help", "HELP")); commands.Add(new SemanticResultValue("please help", "HELP")); commands.Add(new SemanticResultValue("please", "PLEASE")); commands.Add(new SemanticResultValue("ambulance", "AMBULANCE")); commands.Add(new SemanticResultValue("police", "POLICE")); var gb = new GrammarBuilder { Culture = ri.Culture }; gb.Append(commands); var g = new Grammar(gb); this.speechEngine.LoadGrammar(g); this.speechEngine.SpeechRecognized += this.SpeechRecognized; this.kinectAudioStream.SpeechActive = true; this.speechEngine.SetInputToAudioStream( this.kinectAudioStream, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null)); this.speechEngine.RecognizeAsync(RecognizeMode.Multiple); } else { Application.Current.Shutdown(); } }