public void Display(Character speakerOne, Character speakerTwo, FacialExpression speakerOneExpression, FacialExpression speakerTwoExpression, string text, bool isCharacterOneSpeaking, string speakerNameOverride) { // Make sure choice buttons are disabled foreach (var choice in _choices) { choice.gameObject.SetActive(false); } if (speakerOne != Character.NARRATOR && speakerTwo != Character.NARRATOR) { _imageSpeakerOne.sprite = GetSprite(speakerOne, speakerOneExpression); _imageSpeakerTwo.sprite = GetSprite(speakerTwo, speakerTwoExpression); // We reduce the alpha of the sprite of the character who isn't speaking by 0.5 _imageSpeakerOne.color = isCharacterOneSpeaking ? Color.white : new Color(0f, 0f, 0f, .5f); _imageSpeakerTwo.color = isCharacterOneSpeaking ? new Color(0f, 0f, 0f, .5f) : Color.white; } else { _imageSpeakerOne.sprite = _sprites.Empty; _imageSpeakerTwo.sprite = _sprites.Empty; } // Name is value of enum to lower with first character upper string name = (speakerNameOverride == null ? (isCharacterOneSpeaking ? speakerOne : speakerTwo).ToString().ToLower() : speakerNameOverride); name = char.ToUpper(name[0]) + string.Join("", name.Skip(1)); _textName.text = name; _textContent.text = text; _popup.SetActive(true); }
private void SaveFacialExpressionData(PXCMFaceData.Face face) { FacialExpression fe = new FacialExpression(); PXCMFaceData.ExpressionsData edata = face.QueryExpressions(); if (edata == null) { #if DEBUG Console.WriteLine("no expression this frame"); #endif return; } #if DEBUG else { Console.WriteLine("catch expression"); } #endif for (int i = 0; i < 22; i++) { PXCMFaceData.ExpressionsData.FaceExpressionResult score; edata.QueryExpression((PXCMFaceData.ExpressionsData.FaceExpression)i, out score); fe.facialExpressionIndensity[i] = score.intensity; } //dbhelper.saveEntity(fe); }
public void PreviewFacialExpression(FacialExpression expression) { Debug.Log(expression.name); current = expression; Eye_Upper.sprite = expression.Eye_Upper; Eye_Upper.transform.localPosition = current.Eye_Upper_Position; Eye_Upper.transform.localRotation = current.Eye_Upper_Rotation; Eye_Upper.transform.localScale = current.Eye_Upper_Scale; Eye_Under.sprite = expression.Eye_Under; Eye_Under.transform.localPosition = current.Eye_Under_Position; Eye_Under.transform.localRotation = current.Eye_Under_Rotation; Eye_Under.transform.localScale = current.Eye_Under_Scale; Pupil_Upper.sprite = expression.Pupil_Upper; Pupil_Upper.transform.localPosition = current.Pupil_Upper_Position; Pupil_Upper.transform.localRotation = current.Pupil_Upper_Rotation; Pupil_Upper.transform.localScale = current.Pupil_Upper_Scale; Pupil_Under.sprite = expression.Pupil_Under; Pupil_Under.transform.localPosition = current.Pupil_Under_Position; Pupil_Under.transform.localRotation = current.Pupil_Under_Rotation; Pupil_Under.transform.localScale = current.Pupil_Under_Scale; Mouth.sprite = expression.Mouth; Mouth.transform.localPosition = current.Mouth_Position; Mouth.transform.localRotation = current.Mouth_Rotation; Mouth.transform.localScale = current.Mouth_Scale; }
protected virtual void OnFacialExpresssionChanged(FacialExpression old, FacialExpression @new) { var handler = FacialExpresssionChanged; if (handler != null) { handler(this, new FacialExpressionEventArgs(old, @new)); } }
public NormalDialogue(bool isSpeaking, string text, FacialExpression exp, string nameOverride = null) { IsSpeaking = isSpeaking; Text = text; NameOverride = nameOverride; Expression = exp; Speaker = Character.NONE; }
private void BtnExprAdd_Click(object sender, EventArgs e) { FacialExpression exp2; var exp = GetSelectedFacialExpression(); if (exp == null) { exp2 = new FacialExpression(); } else { exp2 = exp.Clone(); } // Find the next available key. { var set = new HashSet <int>(); foreach (var t in _config.Expressions) { set.Add(t.Key); } int newKey; if (exp == null) { newKey = 0; } else { newKey = exp.Key + 1; } while (set.Contains(newKey)) { newKey = newKey + 1; } exp2.Key = newKey; } _config.Expressions.Add(exp2); UpdateExpressionList(); lvExpressions.Items[lvExpressions.Items.Count - 1].Selected = true; }
/// <summary> /// 构造函数,将窗体传进来 /// </summary> /// <param name="form"></param> public TrackModule(MainForm form) { m_form = form; Expression = new FacialExpression(); Landmarks = new FacialLandmarks(); //dbhelper = new DBHelper(); //InteractionEvent ievent = new InteractionEvent(); //ievent.eventType = EventType.PauseEvent; //ievent.eventParams = new Dictionary<string, string>(); //ievent.eventParams.Add("key", "value"); //ievent.happenTS = new Data.CustomTime(); //ievent.happenTS.absTS = DateTime.Now; //ievent.happenTS.videoTS = 70; //dbhelper.saveEntity(ievent); }
/// <summary> /// 获取实时面部表情 /// </summary> /// <returns></returns> public FacialExpression GetExpression() { int nFace = faceData.QueryNumberOfDetectedFaces(); if (nFace == 0) { #if DEBUG //Console.WriteLine("No face in current frame"); #endif return(null); } this.face = this.faceData.QueryFaceByIndex(0); if (face == null) { return(null); } FacialExpression fe = new FacialExpression(); PXCMFaceData.ExpressionsData edata = face.QueryExpressions(); if (edata == null) { #if DEBUG //Console.WriteLine("no expression this frame"); #endif return(null); } #if DEBUG else { //Console.WriteLine("catch expression"); } #endif for (int i = 0; i < 22; i++) { PXCMFaceData.ExpressionsData.FaceExpressionResult score; edata.QueryExpression((PXCMFaceData.ExpressionsData.FaceExpression)i, out score); fe.facialExpressionIndensity[i] = score.intensity; } return(fe); }
protected virtual void OnFacialExpresssionChanged(FacialExpression old, FacialExpression @new) { var handler = FacialExpresssionChanged; if (handler != null) handler(this, new FacialExpressionEventArgs(old, @new)); }
public FacialExpressionEventArgs(FacialExpression oldFacialExpression, FacialExpression newFacialExpression) { OldFacialExpression = oldFacialExpression; NewFacialExpression = newFacialExpression; }
// Function returning true if the list corresponding to the facial pose to be edited contains blend shape values to display a warning message in the blenshapes mapper editor when saving data. public List <float> GetBlenShapeValues(FacialExpression facialPoseToEdit) { switch (facialPoseToEdit) { case FacialExpression.ANGER: return(m_angerBlendShapeValues); case FacialExpression.DISGUST: return(m_disgustBlendShapeValues); case FacialExpression.FEAR: return(m_fearBlendShapeValues); case FacialExpression.HAPPINESS: return(m_happinessBlendShapeValues); case FacialExpression.SADNESS: return(m_sadnessBlendShapeValues); case FacialExpression.SURPRISE: return(m_surpriseBlendShapeValues); case FacialExpression.BLINK: return(m_blinkBlendShapeValues); case FacialExpression.GAZEUP: return(m_gazeUpBlendShapeValues); case FacialExpression.GAZEDOWN: return(m_gazeDownBlendShapeValues); case FacialExpression.VISEME_sil: return(m_viseme_sil_BlendShapeValues); case FacialExpression.VISEME_PP: return(m_viseme_PP_BlendShapeValues); case FacialExpression.VISEME_FF: return(m_viseme_FF_BlendShapeValues); case FacialExpression.VISEME_TH: return(m_viseme_TH_BlendShapeValues); case FacialExpression.VISEME_DD: return(m_viseme_DD_BlendShapeValues); case FacialExpression.VISEME_kk: return(m_viseme_kk_BlendShapeValues); case FacialExpression.VISEME_CH: return(m_viseme_CH_BlendShapeValues); case FacialExpression.VISEME_SS: return(m_viseme_SS_BlendShapeValues); case FacialExpression.VISEME_nn: return(m_viseme_nn_BlendShapeValues); case FacialExpression.VISEME_RR: return(m_viseme_RR_BlendShapeValues); case FacialExpression.VISEME_aa: return(m_viseme_aa_BlendShapeValues); case FacialExpression.VISEME_E: return(m_viseme_E_BlendShapeValues); case FacialExpression.VISEME_I: return(m_viseme_I_BlendShapeValues); case FacialExpression.VISEME_O: return(m_viseme_O_BlendShapeValues); case FacialExpression.VISEME_U: return(m_viseme_U_BlendShapeValues); default: return(null); } }
// Function allowing to save the blend shape values for each facial expression from the blend shapes mapper editor. public void SetBlendShapeValues(FacialExpression facialPoseToEdit, List <float> skinnedMeshBlendShapeValues) { switch (facialPoseToEdit) { case FacialExpression.ANGER: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_angerBlendShapeValues); break; case FacialExpression.DISGUST: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_disgustBlendShapeValues); break; case FacialExpression.FEAR: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_fearBlendShapeValues); break; case FacialExpression.HAPPINESS: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_happinessBlendShapeValues); break; case FacialExpression.SADNESS: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_sadnessBlendShapeValues); break; case FacialExpression.SURPRISE: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_surpriseBlendShapeValues); break; case FacialExpression.BLINK: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_blinkBlendShapeValues); break; case FacialExpression.GAZEUP: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_gazeUpBlendShapeValues); break; case FacialExpression.GAZEDOWN: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_gazeDownBlendShapeValues); break; case FacialExpression.VISEME_sil: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_sil_BlendShapeValues); break; case FacialExpression.VISEME_PP: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_PP_BlendShapeValues); break; case FacialExpression.VISEME_FF: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_FF_BlendShapeValues); break; case FacialExpression.VISEME_TH: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_TH_BlendShapeValues); break; case FacialExpression.VISEME_DD: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_DD_BlendShapeValues); break; case FacialExpression.VISEME_kk: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_kk_BlendShapeValues); break; case FacialExpression.VISEME_CH: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_CH_BlendShapeValues); break; case FacialExpression.VISEME_SS: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_SS_BlendShapeValues); break; case FacialExpression.VISEME_nn: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_nn_BlendShapeValues); break; case FacialExpression.VISEME_RR: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_RR_BlendShapeValues); break; case FacialExpression.VISEME_aa: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_aa_BlendShapeValues); break; case FacialExpression.VISEME_E: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_E_BlendShapeValues); break; case FacialExpression.VISEME_I: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_I_BlendShapeValues); break; case FacialExpression.VISEME_O: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_O_BlendShapeValues); break; case FacialExpression.VISEME_U: AddBlendShapeValues(skinnedMeshBlendShapeValues, m_viseme_U_BlendShapeValues); break; default: break; } }
public bool saveFacialExpression(FacialExpression facialExpression) { throw new NotImplementedException(); }
public FacialController(SkinnedMeshRenderer mesh, FacialExpression exp) { _mesh = mesh; _facial = exp; }
private void DoStreaming_SaveData_Do(string file) { manager.captureManager.SetRealtime(false); manager.captureManager.SetFileName(file, true); //manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, 640, 480, 30); //manager.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, 320, 240, 30); Console.WriteLine("handling file:\t" + file); Console.WriteLine(manager.captureManager.QueryNumberOfFrames().ToString()); FacialLandmarks fl; FacialExpression fe; while (!m_stopped) { if (manager.AcquireFrame(true).IsError()) { break; } //Console.WriteLine(sps.ToString()+"\t"+ manager.captureManager.QueryFrameIndex().ToString()); this.sample = manager.QuerySample(); //if (sample == null) { manager.ReleaseFrame(); continue; } /****************************************************** * * 1.获取当前Frame * 2.判断当前Frame是否有深度或者颜色信息 * 2.1 如果两者都有,判断时间是否为下一秒 * 2.1.1 如果是下一秒,处理当前Frame * 2.1.2 如果不是,结束本次循环 * 2.2 如果没有,则结束本次循环 * ****************************************************/ if (sample.color != null) { this.m_timestamp = sample.color.timeStamp; } else { continue; } //if (sample.depth != null) // this.m_timestamp = (sample.depth.timeStamp); //else if (sample.color != null) // this.m_timestamp = sample.color.timeStamp; //else // continue; Console.WriteLine(m_timestamp.ToString()); if (sample.depth != null || sample.color != null) { //if (m_timestamp == m_timestamp_last) //{ // break; //} m_timestamp_last = m_timestamp; } // 仅当下一秒时调用检测算法 m_timestamp_sec = m_timestamp / 10000000; Console.WriteLine("Curframe time :" + m_timestamp_sec); if (m_timestamp_sec_init == -1) { m_timestamp_sec_init = m_timestamp_sec; } if (m_timestamp_sec_last == -1) { m_timestamp_sec_last = m_timestamp_sec - 1; } long interval = m_timestamp_sec - m_timestamp_sec_last; //if(interval==0) //{ // break; //} if (interval > 0) { if (interval > 1) { for (int i = 1; i < interval; i++) { buffer += (m_timestamp_sec_last + i - m_timestamp_sec_init).ToString() + " "; buffer += "\n"; Console.WriteLine((m_timestamp_sec_last + i - m_timestamp_sec_init).ToString()); } } buffer += (m_timestamp_sec - m_timestamp_sec_init).ToString() + " "; // 原生算法调用处理,并缓存实时数据 faceData.Update(); fl = this.GetFaceLandmarks(); fe = this.GetExpression(); if (fl != null) { buffer += fl.ToString(); } else { buffer += FacialLandmarks.generateBlank(); } if (fe != null) { buffer += fe.ToString(); } else { buffer += FacialExpression.generateBlank(); } buffer += "\n"; m_timestamp_sec_last = m_timestamp_sec; //Console.WriteLine((m_timestamp_sec- m_timestamp_sec_init).ToString()); } //if(m_timestamp_sec>m_timestamp_sec_last) //{ // // 原生算法调用处理,并缓存实时数据 // faceData.Update(); // fl = this.GetFaceLandmarks(); // fe = this.GetExpression(); // if (fl != null) // buffer += fl.ToString(); // if (fe != null) // buffer += fe.ToString(); // buffer += "\n"; // m_timestamp_sec_last = m_timestamp_sec; // Console.WriteLine(m_timestamp_sec.ToString()); //} // 用于显示视频流功能 if (m_display) { this.DoRender(); } manager.ReleaseFrame(); } }
/// <summary> /// Get character sprite from a character and its facial expression /// </summary> private Sprite GetSprite(Character c, FacialExpression fe) { SO.DialogueSpritesCharacter?charac = null; if (c == Character.MC) { charac = _sprites.MC; } else if (c == Character.ETAHNIA) { charac = _sprites.Etahnia; } else if (c == Character.ANAEL) { charac = _sprites.Anael; } else if (c == Character.SALENAE) { charac = _sprites.Salenae; } else if (c == Character.NACHI) { charac = _sprites.Nachi; } else if (c == Character.UNAR) { charac = _sprites.Unar; } else if (c == Character.EXPL_GOD) { charac = _sprites.ExplGod; } else if (c == Character.ERANEL) { charac = _sprites.Eranel; } else if (c == Character.HURIANE) { charac = _sprites.Huriane; } else if (c == Character.YUMENA) { charac = _sprites.Yumena; } if (charac == null) { return(_sprites.Empty); } if (fe == FacialExpression.NEUTRAL) { return(charac.Value.Neutral); } if (fe == FacialExpression.SMILE) { return(charac.Value.Smile); } if (fe == FacialExpression.MAD) { return(charac.Value.Mad); } throw new ArgumentException("Invalid expression " + fe.ToString()); }
private void DoStreaming_SaveData() { this.m_stopped = false; InitStreamState(); // 设置Playback模式 manager.captureManager.SetFileName(this.PlaybackFile, false); manager.captureManager.SetRealtime(false); int nOf = manager.captureManager.QueryNumberOfFrames(); switch (m_algoOption) { // 面部算法 case AlgoOption.Face: this.faceModule = manager.QueryFace(); if (faceModule == null) { MessageBox.Show("QueryFace failed"); return; } InitFaceState(); this.faceData = this.faceModule.CreateOutput(); if (faceData == null) { MessageBox.Show("CreateOutput failed"); return; } break; } if (manager.Init() < pxcmStatus.PXCM_STATUS_NO_ERROR) { #if DEBUG System.Windows.Forms.MessageBox.Show("init failed"); #endif return; } FacialLandmarks fl; FacialExpression fe; while (!m_stopped) { if (manager.AcquireFrame(false).IsError()) { break; } this.sample = manager.QuerySample(); //if (sample == null) { manager.ReleaseFrame(); continue; } if (sample.depth != null) { this.m_timestamp = (sample.depth.timeStamp); } else if (sample.color != null) { this.m_timestamp = sample.color.timeStamp; } else { continue; } // 仅当下一秒时调用检测算法 m_timestamp_sec = m_timestamp / 10000000; if (m_timestamp_sec_init != -1 && m_timestamp_sec - m_timestamp_sec_init >= nOf) { break; } Console.WriteLine("curframe:" + m_timestamp_sec); if (m_timestamp_sec_init == -1) { m_timestamp_sec_init = m_timestamp_sec; } if (m_timestamp_sec_last == -1) { m_timestamp_sec_last = m_timestamp_sec - 1; } long interval = m_timestamp_sec - m_timestamp_sec_last; if (interval > 0) { if (interval > 1) { for (int i = 1; i < interval; i++) { buffer += (m_timestamp_sec_last + i - m_timestamp_sec_init).ToString() + " "; buffer += "\n"; Console.WriteLine((m_timestamp_sec_last + i - m_timestamp_sec_init).ToString()); } } buffer += (m_timestamp_sec - m_timestamp_sec_init).ToString() + " "; // 原生算法调用处理,并缓存实时数据 faceData.Update(); fl = this.GetFaceLandmarks(); fe = this.GetExpression(); if (fl != null) { buffer += fl.ToString(); } else { buffer += FacialLandmarks.generateBlank(); } if (fe != null) { buffer += fe.ToString(); } else { buffer += FacialExpression.generateBlank(); } buffer += "\n"; m_timestamp_sec_last = m_timestamp_sec; //Console.WriteLine((m_timestamp_sec- m_timestamp_sec_init).ToString()); } //if(m_timestamp_sec>m_timestamp_sec_last) //{ // // 原生算法调用处理,并缓存实时数据 // faceData.Update(); // fl = this.GetFaceLandmarks(); // fe = this.GetExpression(); // if (fl != null) // buffer += fl.ToString(); // if (fe != null) // buffer += fe.ToString(); // buffer += "\n"; // m_timestamp_sec_last = m_timestamp_sec; // Console.WriteLine(m_timestamp_sec.ToString()); //} // 用于显示视频流功能 if (m_display) { this.DoRender(); } manager.ReleaseFrame(); } faceData.Dispose(); manager.Dispose(); Console.WriteLine("done!!!"); }
public FacialController(SpriteRenderer mesh, FacialExpression exp, Sprite neutralSprite) { _mesh = mesh; _facial = exp; _defaultSprite = neutralSprite; }