//what to do once a gesture's confidence is modified private void GestureResult_PropertyChanged(object sender, PropertyChangedEventArgs e) { GestureResultView result = sender as GestureResultView; Features[result.BodyIndex][result.GestureName] = result.Confidence; update(); }
//lab 13 void GestureResult_PropertyChanged(object sender, PropertyChangedEventArgs e) { GestureResultView result = sender as GestureResultView; this.GestureVisual.Opacity = result.Confidence; if (result.Confidence > 0.8) { try { FinalResultTextBlock.Text = result.GestureName; CaptureStateTextBlock.Text = Convert.ToString(result.Confidence); } catch (Exception ex) { // If there's an exception, // show it instead of the Final Result. if (ex.GetType() != typeof(OperationCanceledException)) { FinalResultTextBlock.Text = string.Format("{0}: {1}", ex.GetType().ToString(), ex.Message); } } Screenshot(); } }
public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the 'Seated' gesture from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name foreach (Gesture gesture in database.AvailableGestures) { if (gesture.Name.Equals(this.HowAreYouGestureName)) { this.vgbFrameSource.AddGesture(gesture); } } // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name foreach (Gesture gesture in database.AvailableGestures) { if (gesture.Name.Equals(this.handsAboveHeadGestureName)) { this.vgbFrameSource.AddGesture(gesture); } } } }
/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the 'Seated' gesture from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name //foreach (Gesture gesture in database.AvailableGestures) //{ // if (gesture.Name.Equals(this.seatedGestureName)) // { // this.vgbFrameSource.AddGesture(gesture); // } //} // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name foreach (Gesture gesture in database.AvailableGestures) { if (gesture.Name.Equals(this.handsAboveHeadGestureName)) { this.vgbFrameSource.AddGesture(gesture); } } } }
//lab 13 void GestureResult_PropertyChanged(object sender, PropertyChangedEventArgs e) { GestureResultView result = sender as GestureResultView; this.GestureVisual.Text = "Confidence level: " + (Math.Round(result.Confidence, 2)).ToString() + " Count: " + this.count; if (result.Confidence == 1) { this.endTime = DateTime.Now; if ((this.endTime - this.startTime).TotalSeconds > 0.5) { this.count++; } this.startTime = DateTime.Now; } }
public MainPage() { CreateFolder(); for (int i = 0; i < 6; i++) { foreach (var feature in featuresList) { Features[i].Add(feature, 0.0f); } } // one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); this.coordinateMapper = this.kinectSensor.CoordinateMapper; SetupCurrentDisplay(DEFAULT_DISPLAYFRAMETYPE); //SetupCurrentDisplay(DisplayFrameType.Depth); this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body); this.multiSourceFrameReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived; // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // use the window object as the view model in this simple example this.DataContext = this; // open the sensor this.kinectSensor.Open(); this.InitializeComponent(); // Initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); // Create a gesture detector for each body (6 bodies => 6 detectors) int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView("test", i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); result.PropertyChanged += GestureResult_PropertyChanged; this.gestureDetectorList.Add(detector); } }
void Gesture_Loaded(string exercise) { //lab 13 // Initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); //lab 13 // Create a gesture detector for each body (6 bodies => 6 detectors) int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result, exercise); result.PropertyChanged += GestureResult_PropertyChanged; this.gestureDetectorList.Add(detector); } }
public MainPage() { // one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); this.coordinateMapper = this.kinectSensor.CoordinateMapper; this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body); this.multiSourceFrameReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived; // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // use the window object as the view model in this simple example this.DataContext = this; // open the sensor this.kinectSensor.Open(); this.InitializeComponent(); this.Loaded += MainPage_Loaded; //lab 13 // Initialize the gesture detection objects for our gestures this.gestureDetectorList = new List <GestureDetector>(); //lab 13 // Create a gesture detector for each body (6 bodies => 6 detectors) int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f, null); GestureDetector detector = new GestureDetector(this.kinectSensor, result); result.PropertyChanged += GestureResult_PropertyChanged; this.gestureDetectorList.Add(detector); } }
public MainPage() { // one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); this.coordinateMapper = this.kinectSensor.CoordinateMapper; this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body); this.multiSourceFrameReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived; // specify the required face frame results // init with all the features so they are accessible later. this.faceFrameFeatures = FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.BoundingBoxInInfraredSpace | FaceFrameFeatures.PointsInInfraredSpace | FaceFrameFeatures.RotationOrientation | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.MouthOpen; this.faceManager = new FaceManager(this.kinectSensor, this.faceFrameFeatures); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // use the window object as the view model in this simple example this.DataContext = this; // open the sensor this.kinectSensor.Open(); this.InitializeComponent(); this.Loaded += MainPage_Loaded; //lab 13 // Initialize the gesture detection objects for our gestures this.gestureDetectorList = new List<GestureDetector>(); //lab 13 // Create a gesture detector for each body (6 bodies => 6 detectors) int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { GestureResultView result = new GestureResultView(i, false, false, 0.0f); GestureDetector detector = new GestureDetector(this.kinectSensor, result); result.PropertyChanged += GestureResult_PropertyChanged; this.gestureDetectorList.Add(detector); } }
/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView, string exercise) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; switch (exercise) { case "Squats": gestureDatabase = @"Database\Squats.gbd"; handsAboveHeadGestureName = "Squats"; break; case "chestpress": gestureDatabase = @"Database\chestpress.gbd"; handsAboveHeadGestureName = "chestpress"; break; case "jumpingjack": gestureDatabase = @"Database\jumpingjacks.gbd"; handsAboveHeadGestureName = "jumpingjacks"; break; case "kettlebells": gestureDatabase = @"Database\kettlebells.gbd"; handsAboveHeadGestureName = "kettlebells"; break; case "armraises": gestureDatabase = @"Database\armraises.gbd"; handsAboveHeadGestureName = "armraises"; break; case "dumbbell": gestureDatabase = @"Database\dumbbell.gbd"; handsAboveHeadGestureName = "dumbbell"; break; default: gestureDatabase = @"Database\jumpingjacks.gbd"; handsAboveHeadGestureName = "jumpingjacks"; break; } // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the 'Seated' gesture from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name //foreach (Gesture gesture in database.AvailableGestures) //{ // if (gesture.Name.Equals(this.seatedGestureName)) // { // this.vgbFrameSource.AddGesture(gesture); // } //} // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name foreach (Gesture gesture in database.AvailableGestures) { if (gesture.Name.Equals(this.handsAboveHeadGestureName)) { //this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.AddGesture(gesture); } } } }
/// <summary> /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader /// </summary> /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param> /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param> public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView) { if (kinectSensor == null) { throw new ArgumentNullException("kinectSensor"); } if (gestureResultView == null) { throw new ArgumentNullException("gestureResultView"); } this.GestureResultView = gestureResultView; // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor. this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0); this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost; // open the reader for the vgb frames this.vgbFrameReader = this.vgbFrameSource.OpenReader(); if (this.vgbFrameReader != null) { this.vgbFrameReader.IsPaused = true; this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived; } // load the 'Seated' gesture from the gesture database using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase)) { // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name //foreach (Gesture gesture in database.AvailableGestures) //{ // if (gesture.Name.Equals(this.seatedGestureName)) // { // this.vgbFrameSource.AddGesture(gesture); // } //} // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures), // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name foreach (Gesture gesture in database.AvailableGestures) { if (gesture.Name.Equals(this.bowingGestureName)) { this.vgbFrameSource.AddGesture(gesture); } else if (gesture.Name.Equals(this.raiseGestureName)) { this.vgbFrameSource.AddGesture(gesture); } if (gesture.Name.Equals(this.lowerGestureName)) { this.vgbFrameSource.AddGesture(gesture); } } } // setup TCPClient IPEndPoint ep = new IPEndPoint(IPAddress.Parse("169.254.0.2"), int.Parse("51717")); client.Connect(ep); }