Пример #1
0
        /// <summary>
        /// Loads by copying Unity-imported `AudioClip`'s raw audio memory to native side. You are free to unload the `AudioClip`'s audio data without affecting what's loaded at the native side.
        ///
        /// Hard requirements :
        /// - Load type MUST be Decompress On Load so Native Audio could read raw PCM byte array from your compressed audio.
        /// - If you use Load In Background, you must call `audioClip.LoadAudioData()` beforehand and ensure that `audioClip.loadState` is `AudioDataLoadState.Loaded` before calling `NativeAudio.Load`. Otherwise it would throw an exception. If you are not using Load In Background but also not using Preload Audio Data, Native Audio can load for you if not yet loaded.
        /// - Must not be ambisonic.
        ///
        /// It supports all compression format, force to mono, overriding to any sample rate, and quality slider.
        ///
        /// If this is the first time loading any audio it will call `NativeAudio.Initialize()` automatically which might take a bit more time.
        ///
        /// [iOS] Loads an audio into OpenAL's output audio buffer. (Max 256) This buffer will be paired to one of 16 OpenAL source when you play it.
        ///
        /// [Android] Loads an audio into a `short*` array at unmanaged native side. This array will be pushed into one of available `SLAndroidSimpleBufferQueue` when you play it.
        /// The resampling of audio will occur at this moment to match your player's device native rate. The SLES audio player must be created to match the device rate
        /// to enable the special "fast path" audio. What's left is to make our audio compatible with that fast path player, which the resampler will take care of.
        ///
        /// You can change the sampling quality of SRC (libsamplerate) library per audio basis with the `LoadOptions` overload.
        /// </summary>
        /// <param name="audioClip">
        /// Hard requirements :
        /// - Load type MUST be Decompress On Load so Native Audio could read raw PCM byte array from your compressed audio.
        /// - If you use Load In Background, you must call `audioClip.LoadAudioData()` beforehand and ensure that `audioClip.loadState` is `AudioDataLoadState.Loaded` before calling `NativeAudio.Load`. Otherwise it would throw an exception. If you are not using Load In Background but also not using Preload Audio Data, Native Audio can load for you if not yet loaded.
        /// - Must not be ambisonic.
        /// </param>
        /// <returns> An object that stores a number. Native side can pair this number with an actual loaded audio data when you want to play it. You can `Play`, `Prepare`, or `Unload` with this object. `Load` returns null on error, for example : wrong name, or calling in Editor </returns>
        public static NativeAudioPointer Load(AudioClip audioClip, LoadOptions loadOptions)
        {
            AssertAudioClip(audioClip);
            if (!initialized)
            {
                NativeAudio.Initialize();
            }

            //We have to wait for GC to collect this big array, or you could do `GC.Collect()` immediately after.
            short[] shortArray = AudioClipToShortArray(audioClip);

#if UNITY_IOS
            int startingIndex = _SendByteArray(shortArray, shortArray.Length * 2, audioClip.channels, audioClip.frequency, loadOptions.resamplingQuality);
            if (startingIndex == -1)
            {
                throw new Exception("Error loading NativeAudio with AudioClip named : " + audioClip.name);
            }
            else
            {
                float length = _LengthBySource(startingIndex);
                return(new NativeAudioPointer(audioClip.name, startingIndex, length));
            }
#elif UNITY_ANDROID
            //The native side will interpret short array as byte array, thus we double the length.
            int startingIndex = sendByteArray(shortArray, shortArray.Length * 2, audioClip.channels, audioClip.frequency, loadOptions.resamplingQuality);


            if (startingIndex == -1)
            {
                throw new Exception("Error loading NativeAudio with AudioClip named : " + audioClip.name);
            }
            else
            {
                float length = lengthBySource(startingIndex);
                return(new NativeAudioPointer(audioClip.name, startingIndex, length));
            }
#else
            //Load is defined on editor so that autocomplete shows up, but it is a stub. If you mistakenly use the pointer in editor instead of forwarding to normal sound playing method you will get a null reference error.
            return(null);
#endif
        }
Пример #2
0
        /// <summary>
        /// (**Advanced**) Loads an audio from `StreamingAssets` folder's desination at runtime. Most of the case you should use the `AudioClip` overload instead.
        /// It only supports .wav PCM 16-bit format, stereo or mono, in any sampling rate since it will be resampled to fit the device.
        ///
        /// If this is the first time loading any audio it will call `NativeAudio.Initialize()` automatically which might take a bit more time.
        ///
        /// [iOS] Loads an audio into OpenAL's output audio buffer. (Max 256) This buffer will be paired to one of 16 OpenAL source when you play it.
        ///
        /// [Android] Loads an audio into a `short*` array at unmanaged native side. This array will be pushed into one of available `SLAndroidSimpleBufferQueue` when you play it.
        /// The resampling of audio will occur at this moment to match your player's device native rate. The SLES audio player must be created to match the device rate
        /// to enable the special "fast path" audio. What's left is to make our audio compatible with that fast path player, which the resampler will take care of.
        ///
        /// You can change the sampling quality of SRC (libsamplerate) library per audio basis with the `LoadOptions` overload.
        ///
        /// If the audio is not found in the main app's persistent space (the destination of `StreamingAssets`) it will continue to search for the audio
        /// in all OBB packages you might have. (Often if your game is a split OBB, things in `StreamingAssets` will go there by default even if the main one is not that large.)
        /// </summary>
        /// <param name="streamingAssetsRelativePath">If the file is `SteamingAssets/Hit.wav` use "Hit.wav" (WITH the extension).</param>
        /// <returns> An object that stores a number. Native side can pair this number with an actual loaded audio data when you want to play it. You can `Play`, `Prepare`, or `Unload` with this object. `Load` returns null on error, for example : wrong name, not existing in StreamingAssets, calling in Editor </returns>
        public static NativeAudioPointer Load(string audioPath, LoadOptions loadOptions)
        {
            if (!initialized)
            {
                NativeAudio.Initialize();
            }

            if (System.IO.Path.GetExtension(audioPath).ToLower() == ".ogg")
            {
                throw new Exception("Loading via StreamingAssets does not support OGG. Please use the AudioClip overload and set the import settings to Vorbis.");
            }

#if UNITY_IOS
            int startingIndex = _LoadAudio(audioPath, loadOptions.resamplingQuality);
            if (startingIndex == -1)
            {
                throw new Exception("Error loading audio at path : " + audioPath);
            }
            else
            {
                float length = _LengthBySource(startingIndex);
                return(new NativeAudioPointer(audioPath, startingIndex, length));
            }
#elif UNITY_ANDROID
            int startingIndex = AndroidNativeAudio.CallStatic <int>(AndroidLoadAudio, audioPath, loadOptions.resamplingQuality);

            if (startingIndex == -1)
            {
                throw new Exception("Error loading audio at path : " + audioPath);
            }
            else
            {
                float length = lengthBySource(startingIndex);
                return(new NativeAudioPointer(audioPath, startingIndex, length));
            }
#else
            //Load is defined on editor so that autocomplete shows up, but it is a stub. If you mistakenly use the pointer in editor instead of forwarding to normal sound playing method you will get a null reference error.
            return(null);
#endif
        }
Пример #3
0
        /// <summary>
        /// Loads audio at `audioPath`. If this is the first time loading any audio it will call `NativeAudio.Initialize()` automatically which might take a bit more time.
        /// [iOS] Loads an audio into OpenAL's output audio buffer. (Max 256) This buffer will be paired to one of 32 OpenAL source when you play it.
        /// [Android] Loads an audio into a `byte[]` array at native side. This array will be `write` into one of available `AudioTrack` when you play it.
        /// </summary>
        /// <param name="audioPath">The file must be in `StreamingAssets` folder and in .wav PCM 16-bit format with 44100 Hz sampling rate. If the file is `SteamingAssets/Hit.wav` use "Hit.wav" (WITH the extension).</param>
        /// <returns> An object that stores a number. Native side can pair this number with an actual loaded audio data when you want to play it. You can `Play`, `Prepare`, or `Unload` with this object. `Load` returns null on error, for example : wrong name, not existing in StreamingAssets, calling in Editor </returns>
        public static NativeAudioPointer Load(string audioPath)
        {
            if (!initialized)
            {
                NativeAudio.Initialize();
            }

#if UNITY_IOS
            int startingIndex = _LoadAudio(audioPath);
            if (startingIndex == -1)
            {
                //Debug.LogError("You have either Load-ed a nonexistent audio file or allocated over the iOS's OpenAL buffer amount hard limit. Check your StreamingAssets folder for the correct name, or call nativeAudioPointer.Unload() to free up the quota.");
                return(null);
            }
            else
            {
                return(new NativeAudioPointer(audioPath, startingIndex));
            }
#elif UNITY_ANDROID
            int startingIndex = AndroidNativeAudio.CallStatic <int>(AndroidLoadAudio, audioPath);
            if (startingIndex == -1)
            {
                //This "error" is expected, it is not recomended to make it verbose but you should handle the returned null.

                //Debug.LogError("You have either Load-ed a nonexistent audio file or allocated over the Android's AudioTrack hard limit. Check your StreamingAssets folder for the correct name, or call nativeAudioPointer.Unload() to free up the quota.");
                return(null);
            }
            else
            {
                return(new NativeAudioPointer(audioPath, startingIndex));
            }
#else
            //Load is defined on editor so that autocomplete shows up, but it is a stub. If you mistakenly use the pointer in editor instead of forwarding to normal sound playing method you will get a null reference error.
            return(null);
#endif
        }