private void skeletonOperations(AllFramesReadyEventArgs e) { Skeleton[] skeletons; Skeleton watchedSkeleton = null; SkeletonFrame skeletonFrame = null; try { skeletonFrame = e.OpenSkeletonFrame(); if (skeletonFrame == null) { return; } skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength]; skeletonFrame.CopySkeletonDataTo(skeletons); watchedSkeleton = getTrackedSkeleton(skeletons, trackClosest(skeletons)); if (watchedSkeleton != null) { skeletonUpdated(this, new SkeletonEventArgs(watchedSkeleton)); // Give each tracker the updated frame. if (Tools.allJointsTracked(watchedSkeleton)) { if (Gesture.compare) { Gesture.testCompare(watchedSkeleton); } if (Posture.compare) { Posture.testCompare(watchedSkeleton); } } if (Agitation.detect) { Agitation.testAgitation(watchedSkeleton); } if (HandsRaised.compare) { HandsRaised.testCompare(watchedSkeleton); } if (HandsJoined.detect) { HandsJoined.startDetection(watchedSkeleton); } if (EmotionRecognition.detect) { EmotionRecognition.EmotionRecognizer(); } if (ArmsWide.compare) { ArmsWide.testCompare(watchedSkeleton); } if (ArmsCrossed.compare) { ArmsCrossed.testCompare(watchedSkeleton); } if (HandsInPocket.compare) //experimental { HandsInPocket.testCompare(watchedSkeleton); } /*Experimental movement to test the training*/ if (DrawingSheetAvatarViewModel.Get().isTraining) { if (TrainingWithAvatarViewModel.canBeInterrupted) { if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "WavingTraining") == 0) { TrainingWithAvatarViewModel._gesture.Update(watchedSkeleton); } if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "HandTraining") == 0) { TrainingWithAvatarViewModel._handgesture.Update(watchedSkeleton); } if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "PowerTraining") == 0) { TrainingWithAvatarViewModel._powergesture.Update(watchedSkeleton); } if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "WelcomeTraining") == 0) { TrainingWithAvatarViewModel._welcomegesture.Update(watchedSkeleton); } if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "SaluteTraining") == 0) { TrainingWithAvatarViewModel._salutegesture.Update(watchedSkeleton); } if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "HypeTraining") == 0) { TrainingWithAvatarViewModel._hypegesture.Update(watchedSkeleton); } if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "FaceTraining") == 0) { TrainingWithAvatarViewModel._facegesture.Update(watchedSkeleton); } } } if (KinectDevice.useAutoElevation) { ClippedEdgesElevationChange(watchedSkeleton); } } }finally{ if (skeletonFrame != null) { skeletonFrame.Dispose(); } } }
/// <summary> /// Lauch the detection of the agitation of a joint. /// </summary> /// <param name="sk">Skeleton containing the joint.</param> /// <param name="j">Joint to observe.</param> /// <remarks>Author: Clement Michard</remarks> /// Modified by Baptiste Germond, changing to counting the total of frame to do less calcul /// And deactivating the agitation of the legs when not tracked private static void agitationJoint(Skeleton sk, JointType j) { if (sk.Joints[j].TrackingState == JointTrackingState.Tracked) { /*Ensure that if the leg are not tracked it will not count them in the agitation method*/ if (!(DrawingSheetAvatarViewModel.Get().LegTracked == false && (sk.Joints[j].JointType == JointType.HipCenter //|| sk.joints[j].JointType == JointType.HipLeft //|| sk.Joints[j].JointType == JointType.HipRight || sk.Joints[j].JointType == JointType.KneeLeft || sk.Joints[j].JointType == JointType.KneeRight))) { //If we record the performance and the List conting the agitation of the joint has not the joint observe if (rec && !agitNotAgit.Keys.Contains(j)) { //We create the entry in the List agitNotAgit[j] = new List <int>(); } //If the dictionary don't contain the joint observed if (!agitation.ContainsKey(j)) { //We create the new Agitation and put it in the Dictionary agitation[j] = new Agitation(j); } //Adding the position of the joint agitation[j].Enqueue(Geometry.refKinectToSkeleton(new Point3D(sk.Joints[j].Position), sk)); if (agitation[j].Count == agitation[j].time * KINECT_RATE) { //Return wether the joint is too agitated or not bool agitated = agitation[j].tooAgitated(); //If we are recoring if (rec) { if (j == JointType.HandLeft)//We had to choose one joint to count only one time each frame { //Counting the number of frame recorded nbFrameRecorded++; } if (!agitNotAgit[j].Contains((int)(Tools.getStopWatch() / 100)) && agitated) { //Adding the time where the joint was agitated during the recording agitNotAgit[j].Add((int)(Tools.getStopWatch() / 100)); } } //If the joint is too agitated if (!tooAgitatedJoints.Contains(j) && agitated) { //Adding it to the List and raising the feedback tooAgitatedJoints.Add(j); agitationEvent(j, new InstantFeedback(tooAgitatedText)); feedAg = true; } //If not agitated else if (tooAgitatedJoints.Contains(j) && !agitated) { //Removing it from the list tooAgitatedJoints.Remove(j); //If the list is empty we stop the feedback if (tooAgitatedJoints.Count == 0) { feedAg = false; } } // Somehow this is useful for the agitation icon to be displayed correctly during replays else if (agitated) { agitationEvent(j, new InstantFeedback(tooAgitatedText)); } // Recording of the values necessary for the .csv file if (rec && j == JointType.HandLeft) { if (feedAg) { agitationRecord.Add(Tools.getStopWatch() / 1000.0, 1); } else { agitationRecord.Add(Tools.getStopWatch() / 1000.0, 0); } } } } } // If a joint is no longer tracked, we consider it as "not agitated" else { tooAgitatedJoints.Remove(j); //If the list is empty we stop the feedback if (tooAgitatedJoints.Count == 0) { feedAg = false; } } }