/// <summary> /// CallKinect opens up Kinect Sensor which then uses MultiSourceFrameReader to display the user's skeleton back to the user giving visual feedback of the user's position. /// </summary> public void CallKinect() { try { MultiSourceFrameReader _reader; kinectSensor = KinectSensor.GetDefault(); // open the sensor kinectSensor.Open(); // open the reader for the body frames bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader(); //for body tracking _reader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; // set the BodyFramedArrived event notifier bodyFrameReader.FrameArrived += Reader_BodyFrameArrived; // initialize the BodyViewer object for displaying tracked bodies in the UI kinectBodyView = new KinectBodyView(kinectSensor); // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI //GestureResultView result = new GestureResultView(0, false, false, 0.0f); gestureDetectorList = new List <GestureDetector>(); //gestureDetectorList = new GestureDetector(kinectSensor, result); int maxBodies = kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f, ""); GestureDetector detector = new GestureDetector(kinectSensor, result); gestureDetectorList.Add(detector); gestureDetectorList[i].detectedWave += WaveDetected; gestureDetectorList[i].detectedStance += StanceDetected; gestureDetectorList[i].detectedQuit += QuitDetected; gestureDetectorList[i].detectedRepeat += RepeatDetected; gestureDetectorList[i].detectedHelp += HelpDetected; gestureDetectorList[i].detectedElbowStrike += ElbowStrikeDetected; gestureDetectorList[i].detectedJab += JabDetected; gestureDetectorList[i].detectedUpperCut += UpperCutDetected; gestureDetectorList[i].detectedElbowStrike += ElbowStrikeDetected; gestureDetectorList[i].detectedSwipe += SwipeDetected; } } catch (Exception e) { System.Windows.MessageBox.Show(e.Message); //throw; } }
/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the 'all gestures' from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name foreach (Gesture gesture in database.AvailableGestures) { //if (gesture.Name.Equals(this.seatedGestureName)) //{ this.vgbFrameSource.AddGesture(gesture); //System.Windows.MessageBox.Show("Detected"); //} } } }