Пример #1
0
    /// <summary>
    /// Raises the audio filter read event.
    /// </summary>
    /// <param name="data">Data.</param>
    /// <param name="channels">Channels.</param>
    void OnAudioFilterRead(float[] data, int channels)
    {
        // Do not spatialize if we are not initialized, or if there is no
        // audio source attached to game object
        if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null)
            return:

        // increase the gain of the input to get a better signal input
        for (int i = 0: i < data.Length: ++i)
            data[i] = data[i] * gain:

        // Send data into Phoneme context for processing (if context is not 0)
        lock (this)
        {
            if (Context != 0)
            {
                OVRLipSync.Flags flags = 0:

                // Set flags to feed into process
                if (delayCompensate == true)
                    flags |= OVRLipSync.Flags.DelayCompensateAudio:

				OVRLipSync.Frame frame = this.Frame:
				
                OVRLipSync.ProcessFrameInterleaved(Context, data, flags, frame):
            }
        }

        // Turn off output (so that we don't get feedback from mics too close to speakers)
        if (audioMute == true)
        {
            for (int i = 0: i < data.Length: ++i)
                data[i] = data[i] * 0.0f:
        }
    }
Пример #2
0
    /// <summary>
    /// Pass an audio sample to the lip sync module for computation
    /// </summary>
    /// <param name="data">Data.</param>
    /// <param name="channels">Channels.</param>
    void ProcessAudioSamples(float[] data, int channels)
    {
        // Do not process if we are not initialized, or if there is no
        // audio source attached to game object
        if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null)
        {
            return;
        }

        // Increase the gain of the input
        for (int i = 0; i < data.Length; ++i)
        {
            data[i] = data[i] * gain;
        }

        // Send data into Phoneme context for processing (if context is not 0)
        lock (this)
        {
            if (Context != 0)
            {
                OVRLipSync.Frame frame = this.Frame;
                OVRLipSync.ProcessFrameInterleaved(Context, data, frame);
            }
        }

        // Turn off output (so that we don't get feedback from mics too close to speakers)
        //if (!audioLoopback)
        if (true)
        {
            for (int i = 0; i < data.Length; ++i)
            {
                data[i] = data[i] * 0.0f;
            }
        }
    }
Пример #3
0
    /// <summary>
    /// Raises the audio filter read event.
    /// </summary>
    /// <param name="data">Data.</param>
    /// <param name="channels">Channels.</param>
    public void OnAudioFilter(float[] data, int channels)
    {
        // Do not spatialize if we are not initialized, or if there is no
        // audio source attached to game object
        if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null)
        {
            return;
        }

        // increase the gain of the input to get a better signal input
        for (int i = 0; i < data.Length; ++i)
        {
            data[i] = data[i] * gain;
        }

        // Send data into Phoneme context for processing (if context is not 0)
        lock (this)
        {
            if (Context != 0)
            {
                var flags = OVRLipSync.Flags.None;

                // Set flags to feed into process
                if (delayCompensate == true)
                {
                    flags |= OVRLipSync.Flags.DelayCompensateAudio;
                }

                OVRLipSync.ProcessFrameInterleaved(Context, data, flags, Frame);
            }
        }
        Array.Clear(data, 0, data.Length);
    }
Пример #4
0
 // Token: 0x06003A99 RID: 15001 RVA: 0x001277FC File Offset: 0x00125BFC
 private void OnAudioFilterRead(float[] data, int channels)
 {
     if (OVRLipSync.IsInitialized() != 0 || this.audioSource == null)
     {
         return;
     }
     float[] array = this.floatArrayPool.Get(data.Length);
     data.CopyTo(array, 0);
     for (int i = 0; i < array.Length; i++)
     {
         array[i] *= this.gain;
     }
     lock (this)
     {
         if (this.context != 0u)
         {
             OVRLipSync.ovrLipSyncFlag ovrLipSyncFlag = OVRLipSync.ovrLipSyncFlag.None;
             if (this.delayCompensate)
             {
                 ovrLipSyncFlag |= OVRLipSync.ovrLipSyncFlag.DelayCompensateAudio;
             }
             OVRLipSync.ProcessFrameInterleaved(this.context, array, ovrLipSyncFlag, ref this.frame);
         }
     }
     this.floatArrayPool.Return(array);
     if (this.audioMute)
     {
         for (int j = 0; j < data.Length; j++)
         {
             data[j] *= 0f;
         }
     }
 }
    void OnAudioFilterRead(float[] data, int channels)
    {
        if (EnableLowLatency == false)
        {
            // Do not spatialize if we are not initialized, or if there is no
            // audio source attached to game object
            if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null)
            {
                return;
            }

            // increase the gain of the input to get a better signal input
            for (int i = 0; i < data.Length; ++i)
            {
                data[i] = data[i] * Gain;
            }

            // Send data into Phoneme context for processing (if context is not 0)
            lock (this)
            {
                if (Context != 0)
                {
                    //OVRLipSync.Flags flags = 0;

                    // Set flags to feed into process
                    //if (DelayCompensate == true)
                    //    flags |= OVRLipSync.Flags.DelayCompensateAudio;

                    OVRLipSync.Frame frame = this.Frame;

                    OVRLipSync.ProcessFrameInterleaved(Context, data, /* flags,*/ frame);
                }
            }
        }
        // Turn off output (so that we don't get feedback from mics too close to speakers)
        if (AudioMute == true)
        {
            for (int i = 0; i < data.Length; ++i)
            {
                data[i] = data[i] * 0.0f;
            }
        }
    }
Пример #6
0
 /// <summary>
 /// Raises the audio filter read event.
 /// </summary>
 /// <param name="data">Data.</param>
 /// <param name="channels">Channels.</param>
 void OnAudioFilterRead(float[] data, int channels)
 {
     if (mute)
     {
         // 音がループバックしないように消去
         for (int i = 0; i < data.Length; ++i)
         {
             data[i] = 0.0f;
         }
     }
     if (!useMicrophone)
     {
         lock (this) {
             if (Context != 0)
             {
                 OVRLipSync.ProcessFrameInterleaved(Context, data, Frame);
             }
         }
     }
 }
Пример #7
0
    public static OVRLipSyncSequence CreateSequenceFromAudioClip(AudioClip clip)
    {
        OVRLipSyncSequence sequence = null;

        if (clip.loadType != AudioClipLoadType.DecompressOnLoad || clip.channels > 2)
        {
            // todo: just fix the clip
            Debug.LogError("Cannot process phonemes from an audio clip unless its load type is set to DecompressOnLoad.");
        }
        else
        {
            if (OVRLipSync.Initialize(clip.frequency, sSampleSize) != OVRLipSync.Result.Success)
            {
                Debug.LogError("Could not create Lip Sync engine.");
            }
            else
            {
                uint context             = 0;
                OVRLipSync.Result result = OVRLipSync.CreateContext(ref context, OVRLipSync.ContextProviders.Main);
                if (result != OVRLipSync.Result.Success)
                {
                    Debug.LogError("Could not create Phoneme context. (" + result + ")");
                    OVRLipSync.Shutdown();
                }
                else
                {
                    List <OVRLipSync.Frame> frames = new List <OVRLipSync.Frame>();
                    float[] samples      = new float[sSampleSize * clip.channels];
                    int     totalSamples = clip.samples;
                    for (int x = 0; x < totalSamples; x += sSampleSize)
                    {
                        // GetData loops at the end of the read.  Prevent that when it happens.
                        if (x + samples.Length > totalSamples)
                        {
                            samples = new float[(totalSamples - x) * clip.channels];
                        }
                        clip.GetData(samples, x);
                        OVRLipSync.Frame frame = new OVRLipSync.Frame();
                        if (clip.channels == 2)
                        {
                            // interleaved = stereo data, alternating floats
                            OVRLipSync.ProcessFrameInterleaved(context, samples, 0, frame);
                        }
                        else
                        {
                            // mono
                            OVRLipSync.ProcessFrame(context, samples, 0, frame);
                        }

                        frames.Add(frame);
                    }

                    Debug.Log(clip.name + " produced " + frames.Count + " viseme frames, playback rate is " + (frames.Count / clip.length) + " fps");
                    OVRLipSync.DestroyContext(context);
                    OVRLipSync.Shutdown();

                    sequence         = ScriptableObject.CreateInstance <OVRLipSyncSequence>();
                    sequence.entries = frames;
                    sequence.length  = clip.length;
                }
            }
        }
        return(sequence);
    }
Пример #8
0
    void ProcessBuffer(float[] buffer)
    {
        if (buffer == null)
        {
            return;
        }

        audioVolume = 0f;
        foreach (float v in buffer)
        {
            audioVolume += Mathf.Abs(v);
        }
        audioVolume /= buffer.Length;

        int totalLen  = partialPos + buffer.Length;
        int bufferPos = 0;

        if (totalLen >= 1024 * channels)
        {
            volume = 0f;
            while (totalLen >= 1024 * channels)
            {
                int sliceLen = 1024 - partialPos;
                Array.Copy(buffer, bufferPos, partialAudio, partialPos, sliceLen * channels);
                totalLen -= 1024 * channels;
                if (totalLen < 1024 * channels)
                {
                    for (int i = 0; i < partialAudio.Length; i++)
                    {
                        partialAudio[i] = partialAudio[i] * gain;//Mathf.Clamp(partialAudio[i] * gain, 0f, 1f);
                        volume         += Mathf.Abs(partialAudio[i]);
                    }
                    lock (this) {
                        if (context != 0)
                        {
                            OVRLipSync.Frame frame = this.visemeData;
                            if (channels == 2)
                            {
                                OVRLipSync.ProcessFrameInterleaved(context, partialAudio, frame);
                            }
                            else
                            {
                                OVRLipSync.ProcessFrame(context, partialAudio, frame);
                            }
                        }
                        else
                        {
                            Debug.Log("OVRLipSync context is 0");
                        }
                    }
                }
                bufferPos += sliceLen;
                partialPos = 0;
            }
            volume /= (float)buffer.Length;
        }
        if (totalLen > 0)
        {
            Array.Copy(buffer, bufferPos, partialAudio, partialPos, buffer.Length - bufferPos);
            partialPos += (buffer.Length - bufferPos) / channels;
        }
    }