///<summary> ///Releasing the media elements when closing the replay window ///Switch to recording mode ///Written by Baptiste Germond ///</summary> private void quit() { isReplaying = false; statisticsPath = ""; if (skeletonScrolling != null) { skeletonScrolling.Stop(); skeletonScrolling = null; } (TrainingSideTool.Get().FindResource("StopReplayButtonAction") as Storyboard).Begin(); DrawingSheetView.Get().ReplayVideo.Close(); DrawingSheetView.Get().ReplayVideo.Source = null; DrawingSheetView.Get().ReplayAudio.Close(); DrawingSheetView.Get().ReplayAudio.Source = null; ReplayView.Get().SoundCheckbox.IsChecked = false; SideToolsViewModel.Get().enableTrackingAndTrainingTab(); TrainingSideToolViewModel.Get().recordingMode(); DrawingSheetAvatarViewModel.Get().normalMode(); // reactivate the sensors KinectDevice.sensor.SkeletonStream.Enable(); if (faceTrack) { KinectDevice.faceTracking = true; } TrackingSideToolViewModel.get().SpeedRate = speedRateActive; // reactivate the audience if (TrainingSideToolViewModel.audienceOn) { TrainingSideToolViewModel.audienceOn = false; GeneralSideTool.Get().AudienceControlCheckBox.IsChecked = true; } }
public static TrackingSideToolViewModel get() { if (trackingSideToolViewModel == null) { trackingSideToolViewModel = new TrackingSideToolViewModel(); } return(trackingSideToolViewModel); }
/// <summary> /// Launch the record when the waiting time is over /// </summary> public void launchRecordingAfterWait() { inRecord = true; allFalse(); if (FaceRecognition == true) { TrackingSideToolViewModel tracking = TrackingSideToolViewModel.get(); tracking.FaceTracking = true; tracking.emo = true; //tracking.Mouth = true; //tracking.Mouth2 = true; //Those feature are not working for now //tracking.pupilR = true; tracking.LookR = true; } if (VoiceRecognition == true) { TrackingSideToolViewModel tracking = TrackingSideToolViewModel.get(); //tracking.PeakDetection = true; //Create lags when in record tracking.SpeedRate = true; tracking.ShowTextOnScreen = true; /*tracking.VoiceMonotony = true; * tracking.BadVoiceReflex = true;*///Features are not working } if (BodyRecognition == true) { TrackingSideToolViewModel.get().UseFeedback = true; } SideToolsViewModel.Get().chooseTraining(); TrainingSideToolViewModel tstvm = TrainingSideToolViewModel.Get(); tstvm.limitedTimeHours = "0"; tstvm.limitedTimeMinutes = MinRecord.ToString(); tstvm.limitedTimeSeconds = SecRecord.ToString(); tstvm.isTimeLimited = true; /*Depending on which view the user was using when he start his session record * the session recording will record the view the user was using*/ if (MainWindow.drawingSheet.getMode() == SheetMode.AvatarMode) { tstvm.ToggleAvatarOpenGLRecording = true; } else if (MainWindow.drawingSheet.getMode() == SheetMode.StreamMode) { tstvm.ToggleStreamRecording = true; } tstvm.ToggleAudioRecording = true; //Launch the record ButtonAutomationPeer peer = new ButtonAutomationPeer(TrainingSideTool.Get().StartRecordingButton); IInvokeProvider invokeProv = peer.GetPattern(PatternInterface.Invoke) as IInvokeProvider; invokeProv.Invoke(); }
/// <summary> /// Function that allows to enable checkboxes just after a recording, in function of the elements caught /// </summary> public void enableCheckBox() { foreach (KeyValuePair <JointType, bool> key in Agitation.getCatchedJoin()) { if (key.Key == JointType.HipCenter) { choiceResultView.chkAgitationHips.IsEnabled = true; } else if (key.Key == JointType.HandLeft) { choiceResultView.chkAgitationLHand.IsEnabled = true; } else if (key.Key == JointType.HandRight) { choiceResultView.chkAgitationRHand.IsEnabled = true; } else if (key.Key == JointType.KneeLeft) { choiceResultView.chkAgitationLKnee.IsEnabled = true; } else if (key.Key == JointType.KneeRight) { choiceResultView.chkAgitationRKnee.IsEnabled = true; } else if (key.Key == JointType.ShoulderLeft) { choiceResultView.chkAgitationLShoulder.IsEnabled = true; } else if (key.Key == JointType.ShoulderRight) { choiceResultView.chkAgitationRShoulder.IsEnabled = true; } } choiceResultView.chkArmsMotion.IsEnabled = true; choiceResultView.chkArmsCrossed.IsEnabled = true; choiceResultView.chkHandsJoined.IsEnabled = true; if (TrackingSideToolViewModel.get().FaceTracking) { choiceResultView.chkFace.IsEnabled = true; if (TrackingSideToolViewModel.get().emo) { choiceResultView.chkEmotion.IsEnabled = true; } if (lookingDirection.detect) { choiceResultView.chkLookDirec.IsEnabled = true; } } if (TrackingSideToolViewModel.get().SpeedRate) { choiceResultView.chkAudio.IsEnabled = true; choiceResultView.chkNumberSyllables.IsEnabled = true; } }
/// <summary> /// Reset every option used for the recording session /// </summary> public void allFalse() { TrackingSideToolViewModel tracking = TrackingSideToolViewModel.get(); tracking.FaceTracking = false; tracking.emo = false; //tracking.Mouth = false; //tracking.Mouth2 = false; //Same as enabling it //tracking.pupilR = false; tracking.LookR = false; tracking.PeakDetection = false; tracking.SpeedRate = false; tracking.ShowTextOnScreen = false; /*tracking.VoiceMonotony = false; * tracking.BadVoiceReflex = false;*/ tracking.UseFeedback = false; }
public void setTrackingVM(TrackingSideToolViewModel tstvm) { this.trackingVM = tstvm; }
/// <summary> /// Function called when the user clicked on the OK button of the window /// </summary> private void ShowResults() { if ((isLoad && NbRecording > 0) || IsAtLeastOneCheckBoxIsChecked(choiceResultView.stkPanel)) { choiceResultView.Close(); List <bool> lbool = new List <bool>(); lbool.Add(choiceResultView.chkAgitationHips.IsChecked.HasValue && choiceResultView.chkAgitationHips.IsChecked.Value); // this is a way to convert a bool? to a bool lbool.Add(choiceResultView.chkAgitationLHand.IsChecked.HasValue && choiceResultView.chkAgitationLHand.IsChecked.Value); lbool.Add(choiceResultView.chkAgitationLKnee.IsChecked.HasValue && choiceResultView.chkAgitationLKnee.IsChecked.Value); lbool.Add(choiceResultView.chkAgitationLShoulder.IsChecked.HasValue && choiceResultView.chkAgitationLShoulder.IsChecked.Value); lbool.Add(choiceResultView.chkAgitationRHand.IsChecked.HasValue && choiceResultView.chkAgitationRHand.IsChecked.Value); lbool.Add(choiceResultView.chkAgitationRKnee.IsChecked.HasValue && choiceResultView.chkAgitationRKnee.IsChecked.Value); lbool.Add(choiceResultView.chkAgitationRShoulder.IsChecked.HasValue && choiceResultView.chkAgitationRShoulder.IsChecked.Value); lbool.Add(choiceResultView.chkHandsJoined.IsChecked.HasValue && choiceResultView.chkHandsJoined.IsChecked.Value); lbool.Add(choiceResultView.chkArmsCrossed.IsChecked.HasValue && choiceResultView.chkArmsCrossed.IsChecked.Value); lbool.Add(choiceResultView.chkEmotion.IsChecked.HasValue && choiceResultView.chkEmotion.IsChecked.Value); lbool.Add(choiceResultView.chkLookDirec.IsChecked.HasValue && choiceResultView.chkLookDirec.IsChecked.Value); lbool.Add(choiceResultView.chkNumberSyllables.IsChecked.HasValue && choiceResultView.chkNumberSyllables.IsChecked.Value); var results = new ResultsView(lbool); if (isLoad) //if the windowis called after a user click on the "Open charts analysis" { List <string> listpathdate = new List <string>(); if (lastRecord) // to know if the "Last Record" choice is selected in the comboBox { listpathdate.Add(listpath.ElementAt(0)); } else { foreach (string s in listpath) { DateTime date = Tools.getDateFromPath(s); if (date.CompareTo(maxDate) <= 0 && date.CompareTo(minDate) >= 0) // we compare the minimum date and the maximum date { listpathdate.Add(s); } } } ((ResultsViewModel)results.DataContext).loadManyCharts(listpathdate); // we called the function to load files } else //if the window is called after the user clicked on the button "Display my results" { ((ResultsViewModel)results.DataContext).getAgitationStatistics(Agitation.getAgitationStats()); List <IGraph> temp = new List <IGraph>(); temp.AddRange(HandsJoined.getHandStatistics()); temp.AddRange(ArmsCrossed.getArmsStatistics()); ((ResultsViewModel)results.DataContext).getArmsMotion(temp); //temp is a union between HandsJoined.getHandStatistics() and ArmsCrossed.getArmsStatistics() if (TrackingSideToolViewModel.get().FaceTracking) { List <IGraph> listGraphFace = new List <IGraph>(); listGraphFace.AddRange(EmotionRecognition.getEmotionsStatistics()); listGraphFace.AddRange(lookingDirection.getLookingStatistics()); ((ResultsViewModel)results.DataContext).getFaceStatistics(listGraphFace); } if (TrackingSideToolViewModel.get().SpeedRate) { List <IGraph> listGraphVoice = new List <IGraph>(); listGraphVoice.AddRange(AudioProvider.getVoiceStatistics()); ((ResultsViewModel)results.DataContext).getVoiceStatistics(listGraphVoice); } } ((ResultsViewModel)results.DataContext).addResultsPartToView(); results.Show(); } }