public CameraControl() { this.InitializeComponent(); mediaCapture = new MediaCapture(); emotionRecognition = new EmotionRecognition(); faceIdentity = new FaceIdentity(); emotionScoresList = new List <EmotionScore>(); emotionColor = new EmotionColor(); InitializeCapture(); }
private void skeletonOperations(AllFramesReadyEventArgs e) { Skeleton[] skeletons; Skeleton watchedSkeleton = null; SkeletonFrame skeletonFrame = null; try { skeletonFrame = e.OpenSkeletonFrame(); if (skeletonFrame == null) { return; } skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength]; skeletonFrame.CopySkeletonDataTo(skeletons); watchedSkeleton = getTrackedSkeleton(skeletons, trackClosest(skeletons)); if (watchedSkeleton != null) { skeletonUpdated(this, new SkeletonEventArgs(watchedSkeleton)); // Give each tracker the updated frame. if (Tools.allJointsTracked(watchedSkeleton)) { if (Gesture.compare) { Gesture.testCompare(watchedSkeleton); } if (Posture.compare) { Posture.testCompare(watchedSkeleton); } } if (Agitation.detect) { Agitation.testAgitation(watchedSkeleton); } if (HandsRaised.compare) { HandsRaised.testCompare(watchedSkeleton); } if (HandsJoined.detect) { HandsJoined.startDetection(watchedSkeleton); } if (EmotionRecognition.detect) { EmotionRecognition.EmotionRecognizer(); } if (ArmsWide.compare) { ArmsWide.testCompare(watchedSkeleton); } if (ArmsCrossed.compare) { ArmsCrossed.testCompare(watchedSkeleton); } if (HandsInPocket.compare) //experimental { HandsInPocket.testCompare(watchedSkeleton); } /*Experimental movement to test the training*/ if (DrawingSheetAvatarViewModel.Get().isTraining) { if (TrainingWithAvatarViewModel.canBeInterrupted) { if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "WavingTraining") == 0) { TrainingWithAvatarViewModel._gesture.Update(watchedSkeleton); } if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "HandTraining") == 0) { TrainingWithAvatarViewModel._handgesture.Update(watchedSkeleton); } if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "PowerTraining") == 0) { TrainingWithAvatarViewModel._powergesture.Update(watchedSkeleton); } if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "WelcomeTraining") == 0) { TrainingWithAvatarViewModel._welcomegesture.Update(watchedSkeleton); } if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "SaluteTraining") == 0) { TrainingWithAvatarViewModel._salutegesture.Update(watchedSkeleton); } if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "HypeTraining") == 0) { TrainingWithAvatarViewModel._hypegesture.Update(watchedSkeleton); } if (String.Compare(TrainingWithAvatarViewModel.AvatarGesture, "FaceTraining") == 0) { TrainingWithAvatarViewModel._facegesture.Update(watchedSkeleton); } } } if (KinectDevice.useAutoElevation) { ClippedEdgesElevationChange(watchedSkeleton); } } }finally{ if (skeletonFrame != null) { skeletonFrame.Dispose(); } } }
/// <summary> /// Function called when the user clicked on the OK button of the window /// </summary> private void ShowResults() { if ((isLoad && NbRecording > 0) || IsAtLeastOneCheckBoxIsChecked(choiceResultView.stkPanel)) { choiceResultView.Close(); List <bool> lbool = new List <bool>(); lbool.Add(choiceResultView.chkAgitationHips.IsChecked.HasValue && choiceResultView.chkAgitationHips.IsChecked.Value); // this is a way to convert a bool? to a bool lbool.Add(choiceResultView.chkAgitationLHand.IsChecked.HasValue && choiceResultView.chkAgitationLHand.IsChecked.Value); lbool.Add(choiceResultView.chkAgitationLKnee.IsChecked.HasValue && choiceResultView.chkAgitationLKnee.IsChecked.Value); lbool.Add(choiceResultView.chkAgitationLShoulder.IsChecked.HasValue && choiceResultView.chkAgitationLShoulder.IsChecked.Value); lbool.Add(choiceResultView.chkAgitationRHand.IsChecked.HasValue && choiceResultView.chkAgitationRHand.IsChecked.Value); lbool.Add(choiceResultView.chkAgitationRKnee.IsChecked.HasValue && choiceResultView.chkAgitationRKnee.IsChecked.Value); lbool.Add(choiceResultView.chkAgitationRShoulder.IsChecked.HasValue && choiceResultView.chkAgitationRShoulder.IsChecked.Value); lbool.Add(choiceResultView.chkHandsJoined.IsChecked.HasValue && choiceResultView.chkHandsJoined.IsChecked.Value); lbool.Add(choiceResultView.chkArmsCrossed.IsChecked.HasValue && choiceResultView.chkArmsCrossed.IsChecked.Value); lbool.Add(choiceResultView.chkEmotion.IsChecked.HasValue && choiceResultView.chkEmotion.IsChecked.Value); lbool.Add(choiceResultView.chkLookDirec.IsChecked.HasValue && choiceResultView.chkLookDirec.IsChecked.Value); lbool.Add(choiceResultView.chkNumberSyllables.IsChecked.HasValue && choiceResultView.chkNumberSyllables.IsChecked.Value); var results = new ResultsView(lbool); if (isLoad) //if the windowis called after a user click on the "Open charts analysis" { List <string> listpathdate = new List <string>(); if (lastRecord) // to know if the "Last Record" choice is selected in the comboBox { listpathdate.Add(listpath.ElementAt(0)); } else { foreach (string s in listpath) { DateTime date = Tools.getDateFromPath(s); if (date.CompareTo(maxDate) <= 0 && date.CompareTo(minDate) >= 0) // we compare the minimum date and the maximum date { listpathdate.Add(s); } } } ((ResultsViewModel)results.DataContext).loadManyCharts(listpathdate); // we called the function to load files } else //if the window is called after the user clicked on the button "Display my results" { ((ResultsViewModel)results.DataContext).getAgitationStatistics(Agitation.getAgitationStats()); List <IGraph> temp = new List <IGraph>(); temp.AddRange(HandsJoined.getHandStatistics()); temp.AddRange(ArmsCrossed.getArmsStatistics()); ((ResultsViewModel)results.DataContext).getArmsMotion(temp); //temp is a union between HandsJoined.getHandStatistics() and ArmsCrossed.getArmsStatistics() if (TrackingSideToolViewModel.get().FaceTracking) { List <IGraph> listGraphFace = new List <IGraph>(); listGraphFace.AddRange(EmotionRecognition.getEmotionsStatistics()); listGraphFace.AddRange(lookingDirection.getLookingStatistics()); ((ResultsViewModel)results.DataContext).getFaceStatistics(listGraphFace); } if (TrackingSideToolViewModel.get().SpeedRate) { List <IGraph> listGraphVoice = new List <IGraph>(); listGraphVoice.AddRange(AudioProvider.getVoiceStatistics()); ((ResultsViewModel)results.DataContext).getVoiceStatistics(listGraphVoice); } } ((ResultsViewModel)results.DataContext).addResultsPartToView(); results.Show(); } }
// Update is called once per frame void Update() { //按F7键打开与关闭输入框 if (Input.GetKeyDown(KeyCode.F7)) { inputFiledObject.SetActive(!inputFiledObject.activeSelf); } //当输入框打开时,回车发送消息 if (inputFiledObject.activeSelf == true && (Input.GetKeyDown(KeyCode.Return) || Input.GetKeyDown(KeyCode.KeypadEnter))) { string output = TulingChatMachine.Chat(inputField.text); //发送消息,并接受消息 // 获取情绪识别的结果,并计算rate值 Dictionary <string, float> recogRet = EmotionRecognition.Recognition(output); rate = recogRet["optimistic"] - recogRet["pessimistic"]; flag = true; ////语音转文字,存储语音 //if (inputField.text != null) //{ // try // { // WordToVoice.Transform(output, Application.dataPath + "/Resources/Sounds/test" + index + ".mp3"); // long time = System.DateTime.Now.Ticks; // while(true) // { // long timeTemp = System.DateTime.Now.Ticks; // if ((timeTemp - time) / 10000000 > 5) // { // break; // } // } // LoadAndPlayAudio(index); //播放语音 // index++; // } // catch (AipException exception) // { // print("调用失败Message: " + exception.Message + ", Code: " + exception.Code + "\n"); // print("Num: " + index + "\n"); // } //} inputField.text = null; //输入框清空 textObject.SetActive(true); //显示文字 timePassed = 0; //计时器清0 text.color = new UnityEngine.Color(1, 1, 1, 1); //字体颜色设为初始状态 text.text = output; //显示收到的消息 } //如果文字被显示出来了,计时,修改字体不可见度 if (textObject.activeSelf == true) { timePassed += Time.deltaTime; //过了一段时间后才开始变色 if (timePassed >= T_BEGIN) { text.color = new Color(1, 1, 1, 1 - (timePassed - T_BEGIN) / (T_TOTAL - T_BEGIN)); } } //时间到了,计时器清0,文字取消显示 if (timePassed >= T_TOTAL) { timePassed = 0; textObject.SetActive(false); } }