Esempio n. 1
0
        /// <summary>
        /// Open the sound device, sets up an audio context, and makes the new context
        /// the current context. Note that this method will stop the playback of
        /// music that was running prior to the game start. If any error occurs, then
        /// the state of the controller is reset.
        /// </summary>
        /// <returns>True if the sound controller was setup, and false if not.</returns>
        private bool Open()
        {
            lock (InitMutex)
            {
                try
                {
                    Device = ALC.OpenDevice(string.Empty);
                }
                catch (DllNotFoundException)
                {
                    throw;
                }
                catch (Exception ex)
                {
                    throw new AudioHardwareException("OpenAL device could not be initialized.", ex);
                }

                ALCHelper.CheckError("Could not open OpenAL device.");

                if (Device == IntPtr.Zero)
                {
                    return(false);
                }

#if ANDROID
                // Attach activity event handlers so we can pause and resume all playing sounds
                MonoGameAndroidGameView.OnGameThreadPause  += Activity_Paused;
                MonoGameAndroidGameView.OnGameThreadResume += Activity_Resumed;

                // Query the device for the ideal frequency and update buffer size so
                // we can get the low latency sound path.

                /*
                 * The recommended sequence is:
                 *
                 * Check for feature "android.hardware.audio.low_latency" using code such as this:
                 * import android.content.pm.PackageManager;
                 * ...
                 * PackageManager pm = getContext().getPackageManager();
                 * boolean claimsFeature = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
                 * Check for API level 17 or higher, to confirm use of android.media.AudioManager.getProperty().
                 * Get the native or optimal output sample rate and buffer size for this device's primary output stream, using code such as this:
                 * import android.media.AudioManager;
                 * ...
                 * AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
                 * String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE));
                 * String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER));
                 * Note that sampleRate and framesPerBuffer are Strings. First check for null and then convert to int using Integer.parseInt().
                 * Now use OpenSL ES to create an AudioPlayer with PCM buffer queue data locator.
                 *
                 * See http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android
                 */

                int frequency     = DEFAULT_FREQUENCY;
                int updateSize    = DEFAULT_UPDATE_SIZE;
                int updateBuffers = DEFAULT_UPDATE_BUFFER_COUNT;
                if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBeanMr1)
                {
                    Android.Util.Log.Debug(
                        "OAL", AndroidGameActivity.Instance.PackageManager.HasSystemFeature(PackageManager.FeatureAudioLowLatency)
                        ? "Supports low latency audio playback."
                        : "Does not support low latency audio playback.");

                    if (AndroidGameActivity.Instance.GetSystemService(Context.AudioService) is AudioManager audioManager)
                    {
                        var result = audioManager.GetProperty(AudioManager.PropertyOutputSampleRate);
                        if (!string.IsNullOrEmpty(result))
                        {
                            frequency = int.Parse(result, CultureInfo.InvariantCulture);
                        }

                        result = audioManager.GetProperty(AudioManager.PropertyOutputFramesPerBuffer);
                        if (!string.IsNullOrEmpty(result))
                        {
                            updateSize = int.Parse(result, CultureInfo.InvariantCulture);
                        }
                    }

                    // If 4.4 or higher, then we don't need to double buffer on the application side.
                    // See http://stackoverflow.com/a/15006327
                    if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.Kitkat)
                    {
                        updateBuffers = 1;
                    }
                }
                else
                {
                    Android.Util.Log.Debug("OAL", "Android 4.2 or higher required for low latency audio playback.");
                }
                Android.Util.Log.Debug(
                    "OAL", "Using sample rate " + frequency + "Hz and " + updateBuffers + " buffers of " + updateSize + " frames.");

                // These are missing and non-standard ALC constants
                const int AlcFrequency     = 0x1007;
                const int AlcUpdateSize    = 0x1014;
                const int AlcUpdateBuffers = 0x1015;

                Span <int> attribute = stackalloc int[]
                {
                    AlcFrequency, frequency,
                    AlcUpdateSize, updateSize,
                    AlcUpdateBuffers, updateBuffers,
                    0
                };
#elif IOS
                AVAudioSession.SharedInstance().Init();

                // NOTE: Do not override AVAudioSessionCategory set by the game developer:
                //       see https://github.com/MonoGame/MonoGame/issues/6595

                EventHandler <AVAudioSessionInterruptionEventArgs> handler = () => (sender, e)
                {
                    switch (e.InterruptionType)
                    {
                    case AVAudioSessionInterruptionType.Began:
                        AVAudioSession.SharedInstance().SetActive(false);
                        ALC.MakeContextCurrent(IntPtr.Zero);
                        ALC.SuspendContext(_context);
                        break;

                    case AVAudioSessionInterruptionType.Ended:
                        AVAudioSession.SharedInstance().SetActive(true);
                        ALC.MakeContextCurrent(_context);
                        ALC.ProcessContext(_context);
                        break;
                    }
                };

                AVAudioSession.Notifications.ObserveInterruption(handler);

                // Activate the instance or else the interruption handler will not be called.
                AVAudioSession.SharedInstance().SetActive(true);

                int[] attribute = Array.Empty <int>();
#else
                int[] attribute = Array.Empty <int>();
#endif

                _context = ALC.CreateContext(Device, attribute);
                ALCHelper.CheckError("Could not create OpenAL context.");

                if (_context != IntPtr.Zero)
                {
                    ALC.MakeContextCurrent(_context);
                    ALCHelper.CheckError("Could not make OpenAL context current.");

                    SupportsIma4    = AL.IsExtensionPresent("AL_EXT_IMA4");
                    SupportsAdpcm   = AL.IsExtensionPresent("AL_SOFT_MSADPCM");
                    SupportsEfx     = AL.IsExtensionPresent("AL_EXT_EFX");
                    SupportsFloat32 = AL.IsExtensionPresent("AL_EXT_float32");
                    return(true);
                }
                return(false);
            }
        }

#if ANDROID
        void Activity_Paused(MonoGameAndroidGameView view)
        {
            // Pause all currently playing sounds by pausing the mixer
            ALC.DevicePause(Device);
        }

        void Activity_Resumed(MonoGameAndroidGameView view)
        {
            // Resume all sounds that were playing when the activity was paused
            ALC.DeviceResume(Device);
        }