/** * Load a SoundFX for reading audio data from the given file, based on the given type. */ public SoundFX loadAudio(byte[] audioData, AudioDataType type) { if (!supportedMemoryTypes.Contains(type)) { throw new UnsupportedAudioException(I18NString.Lookup(LoadAudioFailedTypeMsg)); } switch (type) { case AudioDataType.AAC: return(loadAAC(audioData)); case AudioDataType.AIFF: return(loadAIFF(audioData)); case AudioDataType.AU: return(loadAU(audioData)); case AudioDataType.FLAC: return(loadFLAC(audioData)); case AudioDataType.M4A: return(loadM4A(audioData)); case AudioDataType.MIDI: return(loadMIDI(audioData)); case AudioDataType.MP3: return(loadMP3(audioData)); case AudioDataType.OGG: return(loadOGG(audioData)); case AudioDataType.RAW: return(loadRAW(audioData)); case AudioDataType.WAVE: return(loadWAVE(audioData)); case AudioDataType.WEBM: return(loadWEBM(audioData)); case AudioDataType.WMA: return(loadWMA(audioData)); default: throw new UnsupportedAudioException(I18NString.Lookup(LoadAudioFailedTypeMsg)); } }
private static extern int ovrLipSyncDll_ProcessFrameEx( uint context, IntPtr audioBuffer, uint bufferSize, AudioDataType dataType, ref int frameNumber, ref int frameDelay, float[] visemes, int visemeCount, ref float laughterScore, float[] laughterCategories, int laughterCategoriesLength);
public static Result ProcessFrame( uint context, float[] audioBuffer, Frame frame, AudioDataType dataType = AudioDataType.F32_Stereo) { if (IsInitialized() != Result.Success) { return(Result.Unknown); } return((Result)ovrLipSyncDll_ProcessFrameEx(context, audioBuffer, 0, dataType, ref frame.frameNumber, ref frame.frameDelay, frame.Visemes, frame.Visemes.Length, ref frame.laughterScore, null, 0 )); }