private IEnumerator PlayCoroutine(bool useVisemes) { int index = 0; var audio = audioPlayer; float time = 0; audio.loop = false; audio.Play(); if (useVisemes) { BuildExpressionCache(); while (index < _cachedVisemes.Count) { if (_isPaused) { yield return(new WaitWhile(() => _isPaused)); } var currentViseme = _cachedVisemes[index]; float startTime = currentViseme.Time; float endTime = currentViseme.Time + currentViseme.Duration; if (time >= startTime) { if (_currentExpression == null) { //Debug.Log(currentViseme.Viseme); _currentExpression = _expressionCache[currentViseme.Viseme]; } //const float PEAK = 0.25f; //float p = Mathf.Clamp01((audio.time - startTime) / (endTime - startTime)); //float blendAmount = p >= PEAK ? 1f- BlendEase((p - PEAK)/ (1f-PEAK)) : BlendEase(p/PEAK); //_currentExpression.Amount = blendAmount; _currentExpression.TargetAmount = 1; } yield return(null); time += Time.deltaTime; if (endTime < time) { if (_currentExpression != null) { _currentExpression.TargetAmount = 0; } _currentExpression = null; index++; } } } yield return(new WaitWhile(() => audio.isPlaying)); StopCurrentPlay(); }
private void StopCurrentPlay() { if (_currentExpression != null) { _currentExpression.TargetAmount = 0; } _currentExpression = null; _playCoroutine = null; audioPlayer.Stop(); _cachedVisemes.Clear(); }
public void SetExpression(string emotion, float amount) { var exp = _faceController.GetExpressionController(emotion); if (_currentEmotion != null && _currentEmotion != exp) { _currentEmotion.TargetAmount = 0; } _currentEmotion = exp; _currentEmotion.TargetAmount = amount; }
public void Pause() { if (!IsPlaying) { throw new Exception("No lip sync data currently playing."); } if (_isPaused) { return; } _isPaused = true; audioPlayer.Pause(); if (_currentExpression != null) { if (_currentExpression != null) { _currentExpression.TargetAmount = 0; } _currentExpression = null; } }