public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(gestureDatabase)) { foreach (Gesture gesture in database.AvailableGestures) { this.vgbFrameSource.AddGesture(gesture); } } }
/// <summary> /// Initializes a new instance of the MainWindow class /// </summary> /// public MainWindow() { // only one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // open the reader for the body frames this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); // set the BodyFramedArrived event notifier this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI this.kinectBodyView = new KinectBodyView(this.kinectSensor); // initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // initialize the MainWindow this.InitializeComponent(); // set our data context objects for display in UI this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI int col0Row = 0; GestureResultView result = new GestureResultView(0, false, false, 0.8f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); // split gesture results across the first two columns of the content grid ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[0].GestureResultView; // Gesture results for bodies: 0, 2, 4 Grid.SetColumn(contentControl, 0); Grid.SetRow(contentControl, col0Row); ++col0Row; this.contentGrid.Children.Add(contentControl); }
public MainWindow() { this.kinectSensor = KinectSensor.GetDefault(); this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; this.kinectSensor.Open(); this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader(); this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; this.kinectBodyView = new KinectBodyView(this.kinectSensor); this.gestureDetectorList = new List <GestureDetector>(); this.InitializeComponent(); this.DataContext = this; this.kinectBodyViewbox.DataContext = this.kinectBodyView; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); this.gestureDetectorList.Add(detector); ContentControl contentControl = new ContentControl(); contentControl.Content = this.gestureDetectorList[i].GestureResultView; this.contentGrid.Children.Add(contentControl); } }
/// public bool comeSpeech, comeGesture, followSpeech, followGesture, syncSpeech, syncGesture; ///bool[] comeSpeech = new bool[3]; public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the 'Seated' gesture from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(gestureDatabase)) { // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name foreach (Gesture gesture in database.AvailableGestures) { if (gesture.Name.Equals("Hamza")) { this.vgbFrameSource.AddGesture(gesture); } } using (VisualGestureBuilderDatabase database1 = new VisualGestureBuilderDatabase(gestureDatabase1)) { foreach (Gesture gesture in database1.AvailableGestures) { if (gesture.Name.Equals("comeGesture")) { this.vgbFrameSource.AddGesture(gesture); } } } using (VisualGestureBuilderDatabase database2 = new VisualGestureBuilderDatabase(gestureDatabase2)) { foreach (Gesture gesture in database2.AvailableGestures) { if (gesture.Name.Equals("followGest2")) { this.vgbFrameSource.AddGesture(gesture); } } } using (VisualGestureBuilderDatabase database3 = new VisualGestureBuilderDatabase(gestureDatabase3)) { foreach (Gesture gesture in database3.AvailableGestures) { if (gesture.Name.Equals("syncGest")) { this.vgbFrameSource.AddGesture(gesture); } } } } }