示例#1
0
    /// <summary>
    /// Raises the audio filter read event.
    /// </summary>
    /// <param name="data">Data.</param>
    /// <param name="channels">Channels.</param>
    public void OnAudioFilter(float[] data, int channels)
    {
        // Do not spatialize if we are not initialized, or if there is no
        // audio source attached to game object
        if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null)
        {
            return;
        }

        // increase the gain of the input to get a better signal input
        for (int i = 0; i < data.Length; ++i)
        {
            data[i] = data[i] * gain;
        }

        // Send data into Phoneme context for processing (if context is not 0)
        lock (this)
        {
            if (Context != 0)
            {
                var flags = OVRLipSync.Flags.None;

                // Set flags to feed into process
                if (delayCompensate == true)
                {
                    flags |= OVRLipSync.Flags.DelayCompensateAudio;
                }

                OVRLipSync.ProcessFrameInterleaved(Context, data, flags, Frame);
            }
        }
        Array.Clear(data, 0, data.Length);
    }
示例#2
0
    /// <summary>
    /// Raises the audio filter read event.
    /// </summary>
    /// <param name="data">Data.</param>
    /// <param name="channels">Channels.</param>
    void OnAudioFilterRead(float[] data, int channels)
    {
        // Do not spatialize if we are not initialized, or if there is no
        // audio source attached to game object
        if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null)
            return:

        // increase the gain of the input to get a better signal input
        for (int i = 0: i < data.Length: ++i)
            data[i] = data[i] * gain:

        // Send data into Phoneme context for processing (if context is not 0)
        lock (this)
        {
            if (Context != 0)
            {
                OVRLipSync.Flags flags = 0:

                // Set flags to feed into process
                if (delayCompensate == true)
                    flags |= OVRLipSync.Flags.DelayCompensateAudio:

				OVRLipSync.Frame frame = this.Frame:
				
                OVRLipSync.ProcessFrameInterleaved(Context, data, flags, frame):
            }
        }

        // Turn off output (so that we don't get feedback from mics too close to speakers)
        if (audioMute == true)
        {
            for (int i = 0: i < data.Length: ++i)
                data[i] = data[i] * 0.0f:
        }
    }
示例#3
0
 // Token: 0x06003A99 RID: 15001 RVA: 0x001277FC File Offset: 0x00125BFC
 private void OnAudioFilterRead(float[] data, int channels)
 {
     if (OVRLipSync.IsInitialized() != 0 || this.audioSource == null)
     {
         return;
     }
     float[] array = this.floatArrayPool.Get(data.Length);
     data.CopyTo(array, 0);
     for (int i = 0; i < array.Length; i++)
     {
         array[i] *= this.gain;
     }
     lock (this)
     {
         if (this.context != 0u)
         {
             OVRLipSync.ovrLipSyncFlag ovrLipSyncFlag = OVRLipSync.ovrLipSyncFlag.None;
             if (this.delayCompensate)
             {
                 ovrLipSyncFlag |= OVRLipSync.ovrLipSyncFlag.DelayCompensateAudio;
             }
             OVRLipSync.ProcessFrameInterleaved(this.context, array, ovrLipSyncFlag, ref this.frame);
         }
     }
     this.floatArrayPool.Return(array);
     if (this.audioMute)
     {
         for (int j = 0; j < data.Length; j++)
         {
             data[j] *= 0f;
         }
     }
 }
示例#4
0
    /// <summary>
    /// Pass an audio sample to the lip sync module for computation
    /// </summary>
    /// <param name="data">Data.</param>
    /// <param name="channels">Channels.</param>
    public void ProcessAudioSamples(float[] data, int channels)
    {
        // Do not process if we are not initialized, or if there is no
        // audio source attached to game object
        if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null)
        {
            return;
        }

        // Increase the gain of the input
        for (int i = 0; i < data.Length; ++i)
        {
            data[i] = data[i] * gain;
        }

        // Send data into Phoneme context for processing (if context is not 0)
        lock (this)
        {
            if (Context != 0)
            {
                OVRLipSync.Frame frame = this.Frame;
                OVRLipSync.ProcessFrame(Context, data, frame);
            }
        }

        // Turn off output (so that we don't get feedback from mics too close to speakers)
        if (!audioLoopback)
        {
            for (int i = 0; i < data.Length; ++i)
            {
                data[i] = data[i] * 0.0f;
            }
        }
    }
示例#5
0
 // Token: 0x06003A90 RID: 14992 RVA: 0x0012765A File Offset: 0x00125A5A
 public static int ProcessFrameInterleaved(uint context, float[] audioBuffer, OVRLipSync.ovrLipSyncFlag flags, ref OVRLipSync.ovrLipSyncFrame frame)
 {
     if (OVRLipSync.IsInitialized() != 0)
     {
         return(-2200);
     }
     return(OVRLipSync.ovrLipSyncDll_ProcessFrameInterleaved(context, audioBuffer, flags, ref frame.frameNumber, ref frame.frameDelay, frame.Visemes, frame.Visemes.Length));
 }
示例#6
0
 // Token: 0x06003A8E RID: 14990 RVA: 0x0012760B File Offset: 0x00125A0B
 public static int SendSignal(uint context, OVRLipSync.ovrLipSyncSignals signal, int arg1, int arg2)
 {
     if (OVRLipSync.IsInitialized() != 0)
     {
         return(-2200);
     }
     return(OVRLipSync.ovrLipSyncDll_SendSignal(context, signal, arg1, arg2));
 }
示例#7
0
 // Token: 0x06003A8D RID: 14989 RVA: 0x001275F3 File Offset: 0x001259F3
 public static int ResetContext(uint context)
 {
     if (OVRLipSync.IsInitialized() != 0)
     {
         return(-2200);
     }
     return(OVRLipSync.ovrLipSyncDll_ResetContext(context));
 }
示例#8
0
 // Token: 0x06003A8B RID: 14987 RVA: 0x001275C2 File Offset: 0x001259C2
 public static int CreateContext(ref uint context, OVRLipSync.ovrLipSyncContextProvider provider)
 {
     if (OVRLipSync.IsInitialized() != 0)
     {
         return(-2201);
     }
     return(OVRLipSync.ovrLipSyncDll_CreateContext(ref context, provider));
 }
示例#9
0
 // Token: 0x06003A9B RID: 15003 RVA: 0x00127984 File Offset: 0x00125D84
 public int ResetContext()
 {
     if (OVRLipSync.IsInitialized() != 0)
     {
         return(-2200);
     }
     return(OVRLipSync.ResetContext(this.context));
 }
示例#10
0
 // Token: 0x06003A9C RID: 15004 RVA: 0x001279A1 File Offset: 0x00125DA1
 public int SendSignal(OVRLipSync.ovrLipSyncSignals signal, int arg1, int arg2)
 {
     if (OVRLipSync.IsInitialized() != 0)
     {
         return(-2200);
     }
     return(OVRLipSync.SendSignal(this.context, signal, arg1, arg2));
 }
    /// <summary>
    /// Sends the signal.
    /// </summary>
    /// <returns>error code</returns>
    /// <param name="signal">Signal.</param>
    /// <param name="arg1">Arg1.</param>
    /// <param name="arg2">Arg2.</param>
    public int SendSignal(OVRLipSync.ovrLipSyncSignals signal, int arg1, int arg2)
    {
        if (OVRLipSync.IsInitialized() != OVRLipSync.ovrLipSyncSuccess)
        {
            return((int)OVRLipSync.ovrLipSyncError.Unknown);
        }

        return(OVRLipSync.SendSignal(context, signal, arg1, arg2));
    }
    /// <summary>
    /// Resets the context.
    /// </summary>
    /// <returns>error code</returns>
    public int ResetContext()
    {
        if (OVRLipSync.IsInitialized() != OVRLipSync.ovrLipSyncSuccess)
        {
            return((int)OVRLipSync.ovrLipSyncError.Unknown);
        }

        return(OVRLipSync.ResetContext(context));
    }
示例#13
0
 /// <summary>
 /// Process F32 audio sample and pass it to the lip sync module for computation
 /// </summary>
 /// <param name="data">Data.</param>
 /// <param name="channels">Channels.</param>
 public void ProcessAudioSamples(float[] data, int channels)
 {
     // Do not process if we are not initialized, or if there is no
     // audio source attached to game object
     if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null)
     {
         return;
     }
     PreprocessAudioSamples(data, channels);
     ProcessAudioSamplesRaw(data, channels);
     PostprocessAudioSamples(data, channels);
 }
示例#14
0
 /// <summary>
 /// Pass S16 PCM audio buffer to the lip sync module
 /// </summary>
 /// <param name="data">Data.</param>
 /// <param name="channels">Channels.</param>
 public void ProcessAudioSamplesRaw(short[] data, int channels)
 {
     // Send data into Phoneme context for processing (if context is not 0)
     lock (this)
     {
         if (Context == 0 || OVRLipSync.IsInitialized() != OVRLipSync.Result.Success)
         {
             return;
         }
         var frame = this.Frame;
         OVRLipSync.ProcessFrame(Context, data, frame, channels == 2);
     }
 }
示例#15
0
    /// <summary>
    /// Process F32 audio sample and pass it to the lip sync module for computation
    /// </summary>
    /// <param name="data">Data.</param>
    /// <param name="channels">Channels.</param>
    public void ProcessAudioSamples(float[] data, int channels)
    {
        // Do not process if we are not initialized, or if there is no
        // audio source attached to game object
        if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success))
        {
            Debug.Log("OVRLipSync is not initialized! " + OVRLipSync.IsInitialized().ToString());
            return;
        }

        PreprocessAudioSamples(data, channels);
        ProcessAudioSamplesRaw(data, channels);
        PostprocessAudioSamples(data, channels);
    }
示例#16
0
 // Token: 0x06003A9A RID: 15002 RVA: 0x001278F0 File Offset: 0x00125CF0
 public int GetCurrentPhonemeFrame(ref OVRLipSync.ovrLipSyncFrame inFrame)
 {
     if (OVRLipSync.IsInitialized() != 0)
     {
         return(-2200);
     }
     lock (this)
     {
         inFrame.frameNumber = this.frame.frameNumber;
         inFrame.frameDelay  = this.frame.frameDelay;
         for (int i = 0; i < inFrame.Visemes.Length; i++)
         {
             inFrame.Visemes[i] = this.frame.Visemes[i];
         }
     }
     return(0);
 }
    // * * * * * * * * * * * * *
    // Public Functions

    /// <summary>
    /// Gets the current phoneme frame (lock and copy current frame to caller frame)
    /// </summary>
    /// <returns>error code</returns>
    /// <param name="inFrame">In frame.</param>
    public int GetCurrentPhonemeFrame(ref OVRLipSync.ovrLipSyncFrame inFrame)
    {
        if (OVRLipSync.IsInitialized() != OVRLipSync.ovrLipSyncSuccess)
        {
            return((int)OVRLipSync.ovrLipSyncError.Unknown);
        }

        lock (this)
        {
            inFrame.frameNumber = frame.frameNumber;
            inFrame.frameDelay  = frame.frameDelay;
            for (int i = 0; i < inFrame.Visemes.Length; i++)
            {
                inFrame.Visemes[i] = frame.Visemes[i];
            }
        }

        return(OVRLipSync.ovrLipSyncSuccess);
    }
示例#18
0
    /// <summary>
    /// Pass F32 PCM audio buffer to the lip sync module
    /// </summary>
    /// <param name="data">Data.</param>
    /// <param name="channels">Channels.</param>
    public void ProcessAudioSamplesRaw(float[] data, int channels)
    {
        // Send data into Phoneme context for processing (if context is not 0)
        lock (this)
        {
            if (Context == 0 || OVRLipSync.IsInitialized() != OVRLipSync.Result.Success)
            {
                Debug.LogError("context is " + Context);

                Debug.LogError("OVRLipSync.IsInitialized() " + OVRLipSync.IsInitialized().ToString());

                return;
            }
            var frame = this.Frame;
            OVRLipSync.Result result = OVRLipSync.ProcessFrame(Context, data, frame, channels == 2);

            //Debug.Log("result is " + frame.frameNumber);
        }
    }
    void OnAudioFilterRead(float[] data, int channels)
    {
        if (EnableLowLatency == false)
        {
            // Do not spatialize if we are not initialized, or if there is no
            // audio source attached to game object
            if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null)
            {
                return;
            }

            // increase the gain of the input to get a better signal input
            for (int i = 0; i < data.Length; ++i)
            {
                data[i] = data[i] * Gain;
            }

            // Send data into Phoneme context for processing (if context is not 0)
            lock (this)
            {
                if (Context != 0)
                {
                    //OVRLipSync.Flags flags = 0;

                    // Set flags to feed into process
                    //if (DelayCompensate == true)
                    //    flags |= OVRLipSync.Flags.DelayCompensateAudio;

                    OVRLipSync.Frame frame = this.Frame;

                    OVRLipSync.ProcessFrameInterleaved(Context, data, /* flags,*/ frame);
                }
            }
        }
        // Turn off output (so that we don't get feedback from mics too close to speakers)
        if (AudioMute == true)
        {
            for (int i = 0; i < data.Length; ++i)
            {
                data[i] = data[i] * 0.0f;
            }
        }
    }
示例#20
0
 void ReadAudio()
 {
     if (clip == null)
     {
         return;
     }
     if (OVRLipSync.IsInitialized() != OVRLipSync.Result.Success)
     {
         Debug.Log("OVRLipSync is not ready.");
         return;
     }
     float[] buffer = ReadMic();
     if (buffer == null)
     {
         return;
     }
     if (!isCanned)
     {
         ProcessBuffer(buffer);
     }
 }
示例#21
0
    public void InitializeLipSync()
    {
        active = false;
        if (catsData == null)
        {
            InitCatsData();
        }
        if (!isCanned && clip != null)
        {
            Microphone.End(lastMic);
        }

        partialAudio = null;
        partialPos   = 0;

        audioSource = GetComponent <AudioSource>();
        if (useCanned && audioSource != null && audioSource.clip != null)
        {
            isCanned     = true;
            clip         = audioSource.clip;
            channels     = clip.channels;
            partialAudio = new float[1024 * channels];
            freq         = audioSource.clip.frequency;
            if (!inited)
            {
                if (OVRLipSync.IsInitialized() == OVRLipSync.Result.Success)
                {
                    DestroyContext();
                    OVRLipSync.Shutdown();
                }
                OVRLipSync.Initialize(freq, 1024);
                CreateContext();
                OVRLipSync.SendSignal(context, OVRLipSync.Signals.VisemeSmoothing, smoothAmount, 0);
                inited = true;
            }
            active = true;
            return;
        }

        isCanned = false;

        int minFreq;
        int maxFreq = AudioSettings.outputSampleRate;

        freq = maxFreq;

        lastMic = mic;
        if (mic != null)
        {
            Microphone.GetDeviceCaps(lastMic, out minFreq, out maxFreq);
        }
        if (maxFreq > 0)
        {
            freq = maxFreq;
        }

        if (!inited)
        {
            if (OVRLipSync.IsInitialized() == OVRLipSync.Result.Success)
            {
                DestroyContext();
                OVRLipSync.Shutdown();
            }
            OVRLipSync.Initialize(freq, 1024);
            CreateContext();
            OVRLipSync.SendSignal(context, OVRLipSync.Signals.VisemeSmoothing, smoothAmount, 0);
            inited = true;
        }

        clip         = Microphone.Start(lastMic, true, 1, freq);
        channels     = clip.channels;
        partialAudio = new float[1024 * channels];
        lastPos      = 0;
        active       = true;
    }