/// <summary> /// Awake this instance. /// </summary> void Awake() { // Cache the audio source we are going to be using to pump data to the SR if (!audioSource) { audioSource = GetComponent <AudioSource>(); } lock (this) { if (context == 0) { if (OVRLipSync.CreateContext(ref context, provider) != OVRLipSync.Result.Success) { Debug.Log("OVRPhonemeContext.Start ERROR: Could not create Phoneme context."); return; } } } }
/// <summary> /// Pass an audio sample to the lip sync module for computation /// </summary> /// <param name="data">Data.</param> /// <param name="channels">Channels.</param> public void ProcessAudioSamples(float[] data, int channels) { // Do not process if we are not initialized, or if there is no // audio source attached to game object if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null) { return; } // Increase the gain of the input for (int i = 0; i < data.Length; ++i) { data[i] = data[i] * gain; } // Send data into Phoneme context for processing (if context is not 0) lock (this) { if (Context != 0) { OVRLipSync.Frame frame = this.Frame; OVRLipSync.ProcessFrame(Context, data, frame); } } // Turn off output (so that we don't get feedback from mics too close to speakers) if (!audioLoopback) { for (int i = 0; i < data.Length; ++i) { data[i] = data[i] * 0.0f; } } }
// * * * * * * * * * * * * * // MonoBehaviour overrides /// <summary> /// Awake this instance. /// </summary> void Awake() { // We can only have one instance of OVRLipSync in a scene (use this for local property query) if (sInstance == null) { sInstance = this; } else { Debug.LogWarning(System.String.Format("OVRLipSync Awake: Only one instance of OVRPLipSync can exist in the scene.")); return; } if (IsInitialized() != Result.Success) { sInitialized = Initialize(); if (sInitialized != Result.Success) { Debug.LogWarning(System.String.Format ("OvrLipSync Awake: Failed to init Speech Rec library")); } } // Important: Use the touchpad mechanism for input, call Create on the OVRTouchpad helper class OVRTouchpad.Create(); }
void Update() { var position = Microphone.GetPosition(null); if (position < 0 || head == position) { return; } clip.GetData(microphoneBuffer, 0); while (GetDataLength(microphoneBuffer.Length, head, position) > processBuffer.Length) { var remain = microphoneBuffer.Length - head; if (remain < processBuffer.Length) { Array.Copy(microphoneBuffer, head, processBuffer, 0, remain); Array.Copy(microphoneBuffer, 0, processBuffer, remain, processBuffer.Length - remain); } else { Array.Copy(microphoneBuffer, head, processBuffer, 0, processBuffer.Length); } OVRLipSync.ProcessFrame(Context, processBuffer, Frame); head += processBuffer.Length; if (head > microphoneBuffer.Length) { head -= microphoneBuffer.Length; } } }
/// <summary> /// Resets the context. /// </summary> /// <returns>error code</returns> public OVRLipSync.Result ResetContext() { // Reset visemes to silence etc. frame.Reset(); return(OVRLipSync.ResetContext(context)); }
/// <summary> /// Start this instance. /// Note: make sure to always have a Start function for classes that have editor scripts. /// </summary> void Start() { // Create the context that we will feed into the audio buffer lock (this) { if (context == 0) { if (OVRLipSync.CreateContext(ref context, provider) != OVRLipSync.ovrLipSyncSuccess) { Debug.Log("OVRPhonemeContext.Start ERROR: Could not create Phoneme context."); return; } } } // OVRLipSyncDebugConsole.Clear (); // OVRLipSyncDebugConsole.Log ("Welcome to the viseme demo! Use 'Left Arrow' and 'Right Arrow' to adjust input gain. Press 'L' to hear mic input."); // Add a listener to the OVRMessenger for touch events //AudioClip clip = Resources.Load("uni1487") as AudioClip; //Debug.Log("AudioClip"+clip.name); //transform.GetComponent<AudioSource>().clip = clip; //transform.GetComponent<AudioSource>().Play(); OVRMessenger.AddListener <OVRTouchpad.TouchEvent>("Touchpad", LocalTouchEventCallback); }
/// <summary> /// Raises the audio filter read event. /// </summary> /// <param name="data">Data.</param> /// <param name="channels">Channels.</param> void OnAudioFilterRead(float[] data, int channels) { // Do not spatialize if we are not initialized, or if there is no // audio source attached to game object if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null) return: // increase the gain of the input to get a better signal input for (int i = 0: i < data.Length: ++i) data[i] = data[i] * gain: // Send data into Phoneme context for processing (if context is not 0) lock (this) { if (Context != 0) { OVRLipSync.Flags flags = 0: // Set flags to feed into process if (delayCompensate == true) flags |= OVRLipSync.Flags.DelayCompensateAudio: OVRLipSync.Frame frame = this.Frame: OVRLipSync.ProcessFrameInterleaved(Context, data, flags, frame): } } // Turn off output (so that we don't get feedback from mics too close to speakers) if (audioMute == true) { for (int i = 0: i < data.Length: ++i) data[i] = data[i] * 0.0f: } }
/// <summary> /// Raises the audio filter read event. /// </summary> /// <param name="data">Data.</param> /// <param name="channels">Channels.</param> public void OnAudioFilter(float[] data, int channels) { // Do not spatialize if we are not initialized, or if there is no // audio source attached to game object if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null) { return; } // increase the gain of the input to get a better signal input for (int i = 0; i < data.Length; ++i) { data[i] = data[i] * gain; } // Send data into Phoneme context for processing (if context is not 0) lock (this) { if (Context != 0) { var flags = OVRLipSync.Flags.None; // Set flags to feed into process if (delayCompensate == true) { flags |= OVRLipSync.Flags.DelayCompensateAudio; } OVRLipSync.ProcessFrameInterleaved(Context, data, flags, Frame); } } Array.Clear(data, 0, data.Length); }
// Token: 0x06003A99 RID: 15001 RVA: 0x001277FC File Offset: 0x00125BFC private void OnAudioFilterRead(float[] data, int channels) { if (OVRLipSync.IsInitialized() != 0 || this.audioSource == null) { return; } float[] array = this.floatArrayPool.Get(data.Length); data.CopyTo(array, 0); for (int i = 0; i < array.Length; i++) { array[i] *= this.gain; } lock (this) { if (this.context != 0u) { OVRLipSync.ovrLipSyncFlag ovrLipSyncFlag = OVRLipSync.ovrLipSyncFlag.None; if (this.delayCompensate) { ovrLipSyncFlag |= OVRLipSync.ovrLipSyncFlag.DelayCompensateAudio; } OVRLipSync.ProcessFrameInterleaved(this.context, array, ovrLipSyncFlag, ref this.frame); } } this.floatArrayPool.Return(array); if (this.audioMute) { for (int j = 0; j < data.Length; j++) { data[j] *= 0f; } } }
// Token: 0x06003A8B RID: 14987 RVA: 0x001275C2 File Offset: 0x001259C2 public static int CreateContext(ref uint context, OVRLipSync.ovrLipSyncContextProvider provider) { if (OVRLipSync.IsInitialized() != 0) { return(-2201); } return(OVRLipSync.ovrLipSyncDll_CreateContext(ref context, provider)); }
// Token: 0x06003A90 RID: 14992 RVA: 0x0012765A File Offset: 0x00125A5A public static int ProcessFrameInterleaved(uint context, float[] audioBuffer, OVRLipSync.ovrLipSyncFlag flags, ref OVRLipSync.ovrLipSyncFrame frame) { if (OVRLipSync.IsInitialized() != 0) { return(-2200); } return(OVRLipSync.ovrLipSyncDll_ProcessFrameInterleaved(context, audioBuffer, flags, ref frame.frameNumber, ref frame.frameDelay, frame.Visemes, frame.Visemes.Length)); }
// Token: 0x06003A8E RID: 14990 RVA: 0x0012760B File Offset: 0x00125A0B public static int SendSignal(uint context, OVRLipSync.ovrLipSyncSignals signal, int arg1, int arg2) { if (OVRLipSync.IsInitialized() != 0) { return(-2200); } return(OVRLipSync.ovrLipSyncDll_SendSignal(context, signal, arg1, arg2)); }
// Token: 0x06003A8D RID: 14989 RVA: 0x001275F3 File Offset: 0x001259F3 public static int ResetContext(uint context) { if (OVRLipSync.IsInitialized() != 0) { return(-2200); } return(OVRLipSync.ovrLipSyncDll_ResetContext(context)); }
// Token: 0x06003A9B RID: 15003 RVA: 0x00127984 File Offset: 0x00125D84 public int ResetContext() { if (OVRLipSync.IsInitialized() != 0) { return(-2200); } return(OVRLipSync.ResetContext(this.context)); }
// Token: 0x06003A9C RID: 15004 RVA: 0x001279A1 File Offset: 0x00125DA1 public int SendSignal(OVRLipSync.ovrLipSyncSignals signal, int arg1, int arg2) { if (OVRLipSync.IsInitialized() != 0) { return(-2200); } return(OVRLipSync.SendSignal(this.context, signal, arg1, arg2)); }
// * * * * * * * * * * * * * // MonoBehaviour overrides private void Awake() { if (Instance != null) { Destroy(this); return; } Instance = this; }
/// <summary> /// Sends the signal. /// </summary> /// <returns>error code</returns> /// <param name="signal">Signal.</param> /// <param name="arg1">Arg1.</param> /// <param name="arg2">Arg2.</param> public int SendSignal(OVRLipSync.ovrLipSyncSignals signal, int arg1, int arg2) { if (OVRLipSync.IsInitialized() != OVRLipSync.ovrLipSyncSuccess) { return((int)OVRLipSync.ovrLipSyncError.Unknown); } return(OVRLipSync.SendSignal(context, signal, arg1, arg2)); }
/// <summary> /// Resets the context. /// </summary> /// <returns>error code</returns> public int ResetContext() { if (OVRLipSync.IsInitialized() != OVRLipSync.ovrLipSyncSuccess) { return((int)OVRLipSync.ovrLipSyncError.Unknown); } return(OVRLipSync.ResetContext(context)); }
// Token: 0x06003A98 RID: 15000 RVA: 0x001277A4 File Offset: 0x00125BA4 private void OnDestroy() { lock (this) { if (this.context != 0u && OVRLipSync.DestroyContext(this.context) != 0) { Debug.Log("OVRPhonemeContext.OnDestroy ERROR: Could not delete Phoneme context."); } } }
private void Update() { if (!IsRecording) { return; } int position = Microphone.GetPosition(DeviceName); //読み取り位置がずっと動かない場合、マイクを復帰させる。PS4コンを挿抜したときにマイクが勝手に止まる事があります…。 if (position == _prevPosition) { _positionNotMovedCount++; if (_positionNotMovedCount > PositionStopCountLimit) { _positionNotMovedCount = 0; RestartMicrophone(); return; } } else { _prevPosition = position; _positionNotMovedCount = 0; } //マイクの動いてる/動かないの検知とは別で範囲チェック if (position < 0 || _head == position) { return; } _clip.GetData(_microphoneBuffer, 0); while (GetDataLength(_microphoneBuffer.Length, _head, position) > _processBuffer.Length) { var remain = _microphoneBuffer.Length - _head; if (remain < _processBuffer.Length) { Array.Copy(_microphoneBuffer, _head, _processBuffer, 0, remain); Array.Copy(_microphoneBuffer, 0, _processBuffer, remain, _processBuffer.Length - remain); } else { Array.Copy(_microphoneBuffer, _head, _processBuffer, 0, _processBuffer.Length); } OVRLipSync.ProcessFrame(Context, _processBuffer, Frame); _head += _processBuffer.Length; if (_head > _microphoneBuffer.Length) { _head -= _microphoneBuffer.Length; } } }
/// <summary> /// Sets a given viseme id blend weight to a given amount /// </summary> /// <param name="amount">Integer viseme amount</param> public void SetLaughterBlend(int amount) { OVRLipSync.Result result = OVRLipSync.SendSignal(context, OVRLipSync.Signals.LaughterAmount, amount, 0); if (result != OVRLipSync.Result.Success) { Debug.LogError("OVRLipSyncContextBase.SetLaughterBlend: An unexpected" + " error occured."); } }
/// <summary> /// Process F32 audio sample and pass it to the lip sync module for computation /// </summary> /// <param name="data">Data.</param> /// <param name="channels">Channels.</param> public void ProcessAudioSamples(float[] data, int channels) { // Do not process if we are not initialized, or if there is no // audio source attached to game object if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null) { return; } PreprocessAudioSamples(data, channels); ProcessAudioSamplesRaw(data, channels); PostprocessAudioSamples(data, channels); }
void Update() { if (inited && lastSmoothing != smoothAmount) { OVRLipSync.SendSignal(context, OVRLipSync.Signals.VisemeSmoothing, smoothAmount, 0); lastSmoothing = smoothAmount; } if (!openSeeIKTarget.fixedUpdate) { RunUpdates(); } }
/// <summary> /// Pass S16 PCM audio buffer to the lip sync module /// </summary> /// <param name="data">Data.</param> /// <param name="channels">Channels.</param> public void ProcessAudioSamplesRaw(short[] data, int channels) { // Send data into Phoneme context for processing (if context is not 0) lock (this) { if (Context == 0 || OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) { return; } var frame = this.Frame; OVRLipSync.ProcessFrame(Context, data, frame, channels == 2); } }
void CreateContext() { lock (this) { if (context == 0) { if (OVRLipSync.CreateContext(ref context, provider) != OVRLipSync.Result.Success) { Debug.LogError("OVRLipSyncContextBase.Start ERROR: Could not create Phoneme context."); return; } } } }
/// <summary> /// Process F32 audio sample and pass it to the lip sync module for computation /// </summary> /// <param name="data">Data.</param> /// <param name="channels">Channels.</param> public void ProcessAudioSamples(float[] data, int channels) { // Do not process if we are not initialized, or if there is no // audio source attached to game object if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success)) { Debug.Log("OVRLipSync is not initialized! " + OVRLipSync.IsInitialized().ToString()); return; } PreprocessAudioSamples(data, channels); ProcessAudioSamplesRaw(data, channels); PostprocessAudioSamples(data, channels); }
void DestroyContext() { active = false; lock (this) { if (context != 0) { if (OVRLipSync.DestroyContext(context) != OVRLipSync.Result.Success) { Debug.LogError("OVRLipSyncContextBase.OnDestroy ERROR: Could not delete Phoneme context."); } context = 0; } } }
public LipDataCreator(int characterId) { this.contextId = (uint)characterId; OVRLipSync.Initialize(sampleRate, bufferSize); var ctx_result = OVRLipSync.CreateContext(ref contextId, OVRLipSync.ContextProviders.Enhanced_with_Laughter, sampleRate, true); if (ctx_result != 0) { LipsyncConfig.Instance.logger.LogError($"Failed to create context: {contextId}"); } this.characterId = characterId; }
/// <summary> /// Raises the destroy event. /// </summary> void OnDestroy() { // Create the context that we will feed into the audio buffer lock (this) { if (context != 0) { if (OVRLipSync.DestroyContext(context) != OVRLipSync.ovrLipSyncSuccess) { Debug.Log("OVRPhonemeContext.OnDestroy ERROR: Could not delete Phoneme context."); } } } }
/// <summary> /// Raises the destroy event. /// </summary> void OnDestroy() { // Create the context that we will feed into the audio buffer lock (this) { if (context != 0) { if (OVRLipSync.DestroyContext(context) != OVRLipSync.Result.Success) { Debug.LogError("OVRLipSyncContextBase.OnDestroy ERROR: Could not delete" + " Phoneme context."); } } } }