Beispiel #1
0
        // --- initialization and deinitialization ---

        /// <summary>Initializes audio. A call to Deinitialize must be made when terminating the program.</summary>
        /// <returns>Whether initializing audio was successful.</returns>
        public void Initialize(HostInterface host, SoundRange range)
        {
            if (host.Platform == HostPlatform.MicrosoftWindows)
            {
                /*
                 *  If shipping an AnyCPU build and OpenALSoft / SDL, these are architecture specific PInvokes
                 *  Add the appropriate search path so this will work (common convention)
                 */
                string path = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
                path = Path.Combine(path, IntPtr.Size == 4 ? "x86" : "x64");
                bool ok = SetDllDirectory(path);
                if (!ok)
                {
                    throw new System.ComponentModel.Win32Exception();
                }
            }
            Deinitialize();

            CurrentHost = host;

            switch (range)
            {
            case SoundRange.Low:
                OuterRadiusFactorMinimum      = 2.0;
                OuterRadiusFactorMaximum      = 8.0;
                OuterRadiusFactorMaximumSpeed = 1.0;
                break;

            case SoundRange.Medium:
                OuterRadiusFactorMinimum      = 4.0;
                OuterRadiusFactorMaximum      = 16.0;
                OuterRadiusFactorMaximumSpeed = 2.0;
                break;

            case SoundRange.High:
                OuterRadiusFactorMinimum      = 6.0;
                OuterRadiusFactorMaximum      = 24.0;
                OuterRadiusFactorMaximumSpeed = 3.0;
                break;
            }
            OuterRadiusFactor      = Math.Sqrt(OuterRadiusFactorMinimum * OuterRadiusFactorMaximum);
            OuterRadiusFactorSpeed = 0.0;
            OpenAlDevice           = Alc.OpenDevice(null);
            string deviceName = Alc.GetString(OpenAlDevice, AlcGetString.DefaultDeviceSpecifier);

            if ((Environment.OSVersion.Platform == PlatformID.Win32S | Environment.OSVersion.Platform == PlatformID.Win32Windows | Environment.OSVersion.Platform == PlatformID.Win32NT) && deviceName == "Generic Software")
            {
                /*
                 * Creative OpenAL implementation on Windows seems to be limited to max 16 simulataneous sounds
                 * Now shipping OpenAL Soft, but detect this and don't glitch
                 * Further note that the current version of OpenAL Soft (1.20.0 at the time of writing) does not like OpenTK
                 * The version in use is 1.17.0 found here: https://openal-soft.org/openal-binaries/
                 */
                systemMaxSounds = 16;
            }
            try
            {
                OpenAlMic = new AudioCapture(AudioCapture.DefaultDevice, SamplingRate, ALFormat.Mono16, BufferSize);
            }
            catch
            {
                OpenAlMic = null;
            }

            if (OpenAlDevice != IntPtr.Zero)
            {
                OpenAlContext = Alc.CreateContext(OpenAlDevice, (int[])null);
                if (OpenAlContext != ContextHandle.Zero)
                {
                    Alc.MakeContextCurrent(OpenAlContext);
                    try
                    {
                        AL.SpeedOfSound(343.0f);
                    }
                    catch
                    {
                        MessageBox.Show(Translations.GetInterfaceString("errors_sound_openal_version"), Translations.GetInterfaceString("program_title"), MessageBoxButtons.OK, MessageBoxIcon.Hand);
                    }
                    AL.DistanceModel(ALDistanceModel.None);
                    return;
                }
                Alc.CloseDevice(OpenAlDevice);
                OpenAlDevice = IntPtr.Zero;
                if (OpenAlMic != null)
                {
                    OpenAlMic.Dispose();
                    OpenAlMic = null;
                }
                MessageBox.Show(Translations.GetInterfaceString("errors_sound_openal_context"), Translations.GetInterfaceString("program_title"), MessageBoxButtons.OK, MessageBoxIcon.Hand);
                return;
            }
            OpenAlContext = ContextHandle.Zero;
            MessageBox.Show(Translations.GetInterfaceString("errors_sound_openal_device"), Translations.GetInterfaceString("program_title"), MessageBoxButtons.OK, MessageBoxIcon.Hand);
        }
Beispiel #2
0
        // --- initialization and deinitialization ---

        /// <summary>Initializes audio. A call to Deinitialize must be made when terminating the program.</summary>
        /// <returns>Whether initializing audio was successful.</returns>
        public void Initialize(HostInterface host, SoundRange range)
        {
            Deinitialize();

            CurrentHost = host;

            switch (range)
            {
            case SoundRange.Low:
                OuterRadiusFactorMinimum      = 2.0;
                OuterRadiusFactorMaximum      = 8.0;
                OuterRadiusFactorMaximumSpeed = 1.0;
                break;

            case SoundRange.Medium:
                OuterRadiusFactorMinimum      = 4.0;
                OuterRadiusFactorMaximum      = 16.0;
                OuterRadiusFactorMaximumSpeed = 2.0;
                break;

            case SoundRange.High:
                OuterRadiusFactorMinimum      = 6.0;
                OuterRadiusFactorMaximum      = 24.0;
                OuterRadiusFactorMaximumSpeed = 3.0;
                break;
            }
            OuterRadiusFactor      = Math.Sqrt(OuterRadiusFactorMinimum * OuterRadiusFactorMaximum);
            OuterRadiusFactorSpeed = 0.0;
            OpenAlDevice           = Alc.OpenDevice(null);
            try
            {
                OpenAlMic = new AudioCapture(AudioCapture.DefaultDevice, SamplingRate, ALFormat.Mono16, BufferSize);
            }
            catch
            {
            }

            if (OpenAlDevice != IntPtr.Zero)
            {
                OpenAlContext = Alc.CreateContext(OpenAlDevice, (int[])null);
                if (OpenAlContext != ContextHandle.Zero)
                {
                    Alc.MakeContextCurrent(OpenAlContext);
                    try
                    {
                        AL.SpeedOfSound(343.0f);
                    }
                    catch
                    {
                        MessageBox.Show(Translations.GetInterfaceString("errors_sound_openal_version"), Translations.GetInterfaceString("program_title"), MessageBoxButtons.OK, MessageBoxIcon.Hand);
                    }
                    AL.DistanceModel(ALDistanceModel.None);
                    return;
                }
                Alc.CloseDevice(OpenAlDevice);
                OpenAlDevice = IntPtr.Zero;
                OpenAlMic.Dispose();
                OpenAlMic = null;
                MessageBox.Show(Translations.GetInterfaceString("errors_sound_openal_context"), Translations.GetInterfaceString("program_title"), MessageBoxButtons.OK, MessageBoxIcon.Hand);
                return;
            }
            OpenAlContext = ContextHandle.Zero;
            MessageBox.Show(Translations.GetInterfaceString("errors_sound_openal_device"), Translations.GetInterfaceString("program_title"), MessageBoxButtons.OK, MessageBoxIcon.Hand);
        }
Beispiel #3
0
        public SoundManager()
        {
            loadedSounds      = new List <Sound>();
            streamingThread   = null;
            categoryModifiers = null;

            alcDevice = Alc.OpenDevice(null);
            if (alcDevice == null)
            {
                DebugConsole.ThrowError("Failed to open an ALC device! Disabling audio playback...");
                Disabled = true;
                return;
            }

            int alcError = Alc.GetError(alcDevice);

            if (alcError != Alc.NoError)
            {
                //The audio device probably wasn't ready, this happens quite often
                //Just wait a while and try again
                Thread.Sleep(100);

                alcDevice = Alc.OpenDevice(null);

                alcError = Alc.GetError(alcDevice);
                if (alcError != Alc.NoError)
                {
                    DebugConsole.ThrowError("Error initializing ALC device: " + alcError.ToString() + ". Disabling audio playback...");
                    Disabled = true;
                    return;
                }
            }

            int[] alcContextAttrs = new int[] { };
            alcContext = Alc.CreateContext(alcDevice, alcContextAttrs);
            if (alcContext == null)
            {
                DebugConsole.ThrowError("Failed to create an ALC context! (error code: " + Alc.GetError(alcDevice).ToString() + "). Disabling audio playback...");
                Disabled = true;
                return;
            }

            if (!Alc.MakeContextCurrent(alcContext))
            {
                DebugConsole.ThrowError("Failed to assign the current ALC context! (error code: " + Alc.GetError(alcDevice).ToString() + "). Disabling audio playback...");
                Disabled = true;
                return;
            }

            alcError = Alc.GetError(alcDevice);
            if (alcError != Alc.NoError)
            {
                DebugConsole.ThrowError("Error after assigning ALC context: " + alcError.ToString() + ". Disabling audio playback...");
                Disabled = true;
                return;
            }

            int alError = Al.NoError;

            sourcePools = new SoundSourcePool[2];
            sourcePools[(int)SourcePoolIndex.Default]     = new SoundSourcePool(SOURCE_COUNT);
            playingChannels[(int)SourcePoolIndex.Default] = new SoundChannel[SOURCE_COUNT];

            sourcePools[(int)SourcePoolIndex.Voice]     = new SoundSourcePool(8);
            playingChannels[(int)SourcePoolIndex.Voice] = new SoundChannel[8];

            Al.DistanceModel(Al.LinearDistanceClamped);

            alError = Al.GetError();
            if (alError != Al.NoError)
            {
                DebugConsole.ThrowError("Error setting distance model: " + Al.GetErrorString(alError) + ". Disabling audio playback...");
                Disabled = true;
                return;
            }

            ListenerPosition     = Vector3.Zero;
            ListenerTargetVector = new Vector3(0.0f, 0.0f, 1.0f);
            ListenerUpVector     = new Vector3(0.0f, -1.0f, 0.0f);
        }
Beispiel #4
0
        /// <summary>
        /// Open the sound device, sets up an audio context, and makes the new context
        /// the current context. Note that this method will stop the playback of
        /// music that was running prior to the game start. If any error occurs, then
        /// the state of the controller is reset.
        /// </summary>
        /// <returns>True if the sound controller was setup, and false if not.</returns>
        private bool OpenSoundController()
        {
            try
            {
                _device = Alc.OpenDevice(string.Empty);
                EffectsExtension.device = _device;
            }
            catch (DllNotFoundException ex)
            {
                throw ex;
            }
            catch (Exception ex)
            {
                throw new NoAudioHardwareException("OpenAL device could not be initialized.", ex);
            }

            AlcHelper.CheckError("Could not open OpenAL device");

            if (_device != IntPtr.Zero)
            {
#if ANDROID
                // Attach activity event handlers so we can pause and resume all playing sounds
                MonoGameAndroidGameView.OnPauseGameThread  += Activity_Paused;
                MonoGameAndroidGameView.OnResumeGameThread += Activity_Resumed;

                // Query the device for the ideal frequency and update buffer size so
                // we can get the low latency sound path.

                /*
                 * The recommended sequence is:
                 *
                 * Check for feature "android.hardware.audio.low_latency" using code such as this:
                 * import android.content.pm.PackageManager;
                 * ...
                 * PackageManager pm = getContext().getPackageManager();
                 * boolean claimsFeature = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
                 * Check for API level 17 or higher, to confirm use of android.media.AudioManager.getProperty().
                 * Get the native or optimal output sample rate and buffer size for this device's primary output stream, using code such as this:
                 * import android.media.AudioManager;
                 * ...
                 * AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
                 * String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE));
                 * String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER));
                 * Note that sampleRate and framesPerBuffer are Strings. First check for null and then convert to int using Integer.parseInt().
                 * Now use OpenSL ES to create an AudioPlayer with PCM buffer queue data locator.
                 *
                 * See http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android
                 */

                int frequency     = DEFAULT_FREQUENCY;
                int updateSize    = DEFAULT_UPDATE_SIZE;
                int updateBuffers = DEFAULT_UPDATE_BUFFER_COUNT;
                if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBeanMr1)
                {
                    Android.Util.Log.Debug("OAL", Game.Activity.PackageManager.HasSystemFeature(PackageManager.FeatureAudioLowLatency) ? "Supports low latency audio playback." : "Does not support low latency audio playback.");

                    var audioManager = Game.Activity.GetSystemService(Context.AudioService) as AudioManager;
                    if (audioManager != null)
                    {
                        var result = audioManager.GetProperty(AudioManager.PropertyOutputSampleRate);
                        if (!string.IsNullOrEmpty(result))
                        {
                            frequency = int.Parse(result, CultureInfo.InvariantCulture);
                        }
                        result = audioManager.GetProperty(AudioManager.PropertyOutputFramesPerBuffer);
                        if (!string.IsNullOrEmpty(result))
                        {
                            updateSize = int.Parse(result, CultureInfo.InvariantCulture);
                        }
                    }

                    // If 4.4 or higher, then we don't need to double buffer on the application side.
                    // See http://stackoverflow.com/a/15006327
                    if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.Kitkat)
                    {
                        updateBuffers = 1;
                    }
                }
                else
                {
                    Android.Util.Log.Debug("OAL", "Android 4.2 or higher required for low latency audio playback.");
                }
                Android.Util.Log.Debug("OAL", "Using sample rate " + frequency + "Hz and " + updateBuffers + " buffers of " + updateSize + " frames.");

                // These are missing and non-standard ALC constants
                const int AlcFrequency     = 0x1007;
                const int AlcUpdateSize    = 0x1014;
                const int AlcUpdateBuffers = 0x1015;

                int[] attribute = new[]
                {
                    AlcFrequency, frequency,
                    AlcUpdateSize, updateSize,
                    AlcUpdateBuffers, updateBuffers,
                    0
                };
#elif IOS
                EventHandler <AVAudioSessionInterruptionEventArgs> handler = delegate(object sender, AVAudioSessionInterruptionEventArgs e) {
                    switch (e.InterruptionType)
                    {
                    case AVAudioSessionInterruptionType.Began:
                        AVAudioSession.SharedInstance().SetActive(false);
                        Alc.MakeContextCurrent(IntPtr.Zero);
                        Alc.SuspendContext(_context);
                        break;

                    case AVAudioSessionInterruptionType.Ended:
                        AVAudioSession.SharedInstance().SetActive(true);
                        Alc.MakeContextCurrent(_context);
                        Alc.ProcessContext(_context);
                        break;
                    }
                };
                AVAudioSession.Notifications.ObserveInterruption(handler);

                int[] attribute = new int[0];
#else
                int[] attribute = new int[0];
#endif

                _context = Alc.CreateContext(_device, attribute);
#if DESKTOPGL
                _oggstreamer = new OggStreamer();
#endif

                AlcHelper.CheckError("Could not create OpenAL context");

                if (_context != NullContext)
                {
                    Alc.MakeContextCurrent(_context);
                    AlcHelper.CheckError("Could not make OpenAL context current");
                    SupportsIma4  = AL.IsExtensionPresent("AL_EXT_IMA4");
                    SupportsAdpcm = AL.IsExtensionPresent("AL_SOFT_MSADPCM");
                    SupportsEfx   = AL.IsExtensionPresent("AL_EXT_EFX");
                    SupportsIeee  = AL.IsExtensionPresent("AL_EXT_float32");
                    return(true);
                }
            }
            return(false);
        }
Beispiel #5
0
        public SoundManager()
        {
            loadedSounds    = new List <Sound>();
            playingChannels = new SoundChannel[SOURCE_COUNT];

            streamingThread = null;

            categoryModifiers = null;

            alcDevice = Alc.OpenDevice(null);
            if (alcDevice == null)
            {
                throw new Exception("Failed to open an ALC device!");
            }

            AlcError alcError = Alc.GetError(alcDevice);

            if (alcError != AlcError.NoError)
            {
                //The audio device probably wasn't ready, this happens quite often
                //Just wait a while and try again
                Thread.Sleep(100);

                alcDevice = Alc.OpenDevice(null);

                alcError = Alc.GetError(alcDevice);
                if (alcError != AlcError.NoError)
                {
                    throw new Exception("Error initializing ALC device: " + alcError.ToString());
                }
            }

            int[] alcContextAttrs = new int[] { };
            alcContext = Alc.CreateContext(alcDevice, alcContextAttrs);
            if (alcContext == null)
            {
                throw new Exception("Failed to create an ALC context! (error code: " + Alc.GetError(alcDevice).ToString() + ")");
            }

            if (!Alc.MakeContextCurrent(alcContext))
            {
                throw new Exception("Failed to assign the current ALC context! (error code: " + Alc.GetError(alcDevice).ToString() + ")");
            }

            alcError = Alc.GetError(alcDevice);
            if (alcError != AlcError.NoError)
            {
                throw new Exception("Error after assigning ALC context: " + alcError.ToString());
            }

            ALError alError = ALError.NoError;

            alSources = new uint[SOURCE_COUNT];
            for (int i = 0; i < SOURCE_COUNT; i++)
            {
                AL.GenSource(out alSources[i]);
                alError = AL.GetError();
                if (alError != ALError.NoError)
                {
                    throw new Exception("Error generating alSource[" + i.ToString() + "]: " + AL.GetErrorString(alError));
                }

                if (!AL.IsSource(alSources[i]))
                {
                    throw new Exception("Generated alSource[" + i.ToString() + "] is invalid!");
                }

                AL.SourceStop(alSources[i]);
                alError = AL.GetError();
                if (alError != ALError.NoError)
                {
                    throw new Exception("Error stopping newly generated alSource[" + i.ToString() + "]: " + AL.GetErrorString(alError));
                }

                AL.Source(alSources[i], ALSourcef.MinGain, 0.0f);
                alError = AL.GetError();
                if (alError != ALError.NoError)
                {
                    throw new Exception("Error setting min gain: " + AL.GetErrorString(alError));
                }

                AL.Source(alSources[i], ALSourcef.MaxGain, 1.0f);
                alError = AL.GetError();
                if (alError != ALError.NoError)
                {
                    throw new Exception("Error setting max gain: " + AL.GetErrorString(alError));
                }

                AL.Source(alSources[i], ALSourcef.RolloffFactor, 1.0f);
                alError = AL.GetError();
                if (alError != ALError.NoError)
                {
                    throw new Exception("Error setting rolloff factor: " + AL.GetErrorString(alError));
                }
            }

            AL.DistanceModel(ALDistanceModel.LinearDistanceClamped);

            alError = AL.GetError();
            if (alError != ALError.NoError)
            {
                throw new Exception("Error setting distance model: " + AL.GetErrorString(alError));
            }

            if (Alc.IsExtensionPresent(IntPtr.Zero, "ALC_EXT_CAPTURE"))
            {
                alcCaptureDeviceNames = new List <string>(Alc.GetString(IntPtr.Zero, AlcGetStringList.CaptureDeviceSpecifier));
            }
            else
            {
                alcCaptureDeviceNames = null;
            }

            listenerOrientation  = new float[6];
            ListenerPosition     = Vector3.Zero;
            ListenerTargetVector = new Vector3(0.0f, 0.0f, 1.0f);
            ListenerUpVector     = new Vector3(0.0f, -1.0f, 0.0f);
        }
Beispiel #6
0
        // メソッド

        public unsafe CSoundDeviceOpenAL(IntPtr hWnd, long n遅延時間ms, bool bUseOSTimer)
        {
            Trace.TraceInformation("OpenAL の初期化を開始します。");

            this.e出力デバイス     = ESoundDeviceType.Unknown;
            this.n実バッファサイズms = this.n実出力遅延ms = n遅延時間ms;
            this.tmシステムタイマ   = new CTimer(CTimer.E種別.MultiMedia);

            #region [ OpenAL サウンドデバイスの作成]

            //Initialize
            this.device  = Alc.OpenDevice(null);
            this.context = Alc.CreateContext(this.device, (int *)null);
            Alc.MakeContextCurrent(this.context);

            //Versionの確認
            var version  = AL.Get(ALGetString.Version);
            var vendor   = AL.Get(ALGetString.Vendor);
            var renderer = AL.Get(ALGetString.Renderer);
            Console.WriteLine("OpenAL Version=" + version);
            Console.WriteLine("OpenAL Vendor=" + vendor);
            Console.WriteLine("OpenAL Renderer=" + renderer);

            // デバイス作成完了。
            this.e出力デバイス = ESoundDeviceType.OpenAL;
            #endregion

            if (!bUseOSTimer)
            {
                #region [ 経過時間計測用サウンドバッファを作成し、ループ再生を開始する。]
                //-----------------

                // 単位繰り上げ間隔[秒]の長さを持つ無音のサウンドを作成。

                uint nデータサイズbyte = n単位繰り上げ間隔sec * 44100 * 2 * 2;
                var  ms          = new MemoryStream();
                var  bw          = new BinaryWriter(ms);
                bw.Write((uint)0x46464952);                                     // 'RIFF'
                bw.Write((uint)(44 + nデータサイズbyte - 8));                         // ファイルサイズ - 8
                bw.Write((uint)0x45564157);                                     // 'WAVE'
                bw.Write((uint)0x20746d66);                                     // 'fmt '
                bw.Write((uint)16);                                             // バイト数
                bw.Write((ushort)1);                                            // フォーマットID(リニアPCM)
                bw.Write((ushort)2);                                            // チャンネル数
                bw.Write((uint)44100);                                          // サンプリング周波数
                bw.Write((uint)(44100 * 2 * 2));                                // bytes/sec
                bw.Write((ushort)(2 * 2));                                      // blockサイズ
                bw.Write((ushort)16);                                           // bit/sample
                bw.Write((uint)0x61746164);                                     // 'data'
                bw.Write((uint)nデータサイズbyte);                                    // データ長
                for (int i = 0; i < nデータサイズbyte / sizeof(long); i++)            // PCMデータ
                {
                    bw.Write((long)0);
                }
                var byArrWaveFleImage = ms.ToArray();
                bw.Close();
                ms = null;
                bw = null;
                this.sd経過時間計測用サウンドバッファ = this.tサウンドを作成する(byArrWaveFleImage, ESoundGroup.Unknown);

                CSound.listインスタンス.Remove(this.sd経過時間計測用サウンドバッファ);                       // 特殊用途なのでインスタンスリストからは除外する。

                // サウンドのループ再生開始。

                this.nループ回数 = 0;
                this.n前回の位置 = 0;
                AL.Source(this.sd経過時間計測用サウンドバッファ.SourceOpen[0], ALSourceb.Looping, true);
                AL.SourcePlay(this.sd経過時間計測用サウンドバッファ.SourceOpen[0]);
                this.n前に経過時間を測定したシステム時刻ms = this.tmシステムタイマ.nシステム時刻ms;
                //-----------------
                #endregion
            }
            else
            {
                ctimer = new CTimer(CTimer.E種別.MultiMedia);
            }
            Trace.TraceInformation("OpenAL を初期化しました。({0})", bUseOSTimer? "OStimer" : "FDKtimer");
        }
        // Loads all available audio devices into the available_*_devices lists.
        static AudioDeviceEnumerator()
        {
            IntPtr        dummy_device  = IntPtr.Zero;
            ContextHandle dummy_context = ContextHandle.Zero;

            try
            {
                Debug.WriteLine("Enumerating audio devices.");
                Debug.Indent();

                // need a dummy context for correct results
                dummy_device  = Alc.OpenDevice(null);
                dummy_context = Alc.CreateContext(dummy_device, (int[])null);
                bool     dummy_success = Alc.MakeContextCurrent(dummy_context);
                AlcError dummy_error   = Alc.GetError(dummy_device);
                if (!dummy_success || dummy_error != AlcError.NoError)
                {
                    throw new AudioContextException("Failed to create dummy Context. Device (" + dummy_device.ToString() +
                                                    ") Context (" + dummy_context.Handle.ToString() +
                                                    ") MakeContextCurrent " + (dummy_success ? "succeeded" : "failed") +
                                                    ", Alc Error (" + dummy_error.ToString() + ") " + Alc.GetString(IntPtr.Zero, (AlcGetString)dummy_error));
                }

                // Get a list of all known playback devices, using best extension available
                if (Alc.IsExtensionPresent(IntPtr.Zero, "ALC_ENUMERATION_EXT"))
                {
                    Version = AlcVersion.Alc1_1;
                    if (Alc.IsExtensionPresent(IntPtr.Zero, "ALC_ENUMERATE_ALL_EXT"))
                    {
                        available_playback_devices.AddRange(Alc.GetString(IntPtr.Zero, AlcGetStringList.AllDevicesSpecifier));
                        DefaultPlaybackDevice = Alc.GetString(IntPtr.Zero, AlcGetString.DefaultAllDevicesSpecifier);
                    }
                    else
                    {
                        available_playback_devices.AddRange(Alc.GetString(IntPtr.Zero, AlcGetStringList.DeviceSpecifier));
                        DefaultPlaybackDevice = Alc.GetString(IntPtr.Zero, AlcGetString.DefaultDeviceSpecifier);
                    }
                }
                else
                {
                    Version = AlcVersion.Alc1_0;
                    Debug.Print("Device enumeration extension not available. Failed to enumerate playback devices.");
                }
                AlcError playback_err = Alc.GetError(dummy_device);
                if (playback_err != AlcError.NoError)
                {
                    throw new AudioContextException("Alc Error occured when querying available playback devices. " + playback_err.ToString());
                }

                // Get a list of all known recording devices, at least ALC_ENUMERATION_EXT is needed too
                if (Version == AlcVersion.Alc1_1 && Alc.IsExtensionPresent(IntPtr.Zero, "ALC_EXT_CAPTURE"))
                {
                    available_recording_devices.AddRange(Alc.GetString(IntPtr.Zero, AlcGetStringList.CaptureDeviceSpecifier));
                    DefaultRecordingDevice = Alc.GetString(IntPtr.Zero, AlcGetString.CaptureDefaultDeviceSpecifier);
                }
                else
                {
                    Debug.Print("Capture extension not available. Failed to enumerate recording devices.");
                }
                AlcError record_err = Alc.GetError(dummy_device);
                if (record_err != AlcError.NoError)
                {
                    throw new AudioContextException("Alc Error occured when querying available recording devices. " + record_err.ToString());
                }

#if DEBUG
                Debug.WriteLine("Found playback devices:");
                foreach (string s in available_playback_devices)
                {
                    Debug.WriteLine(s);
                }

                Debug.WriteLine("Default playback device: " + DefaultPlaybackDevice);

                Debug.WriteLine("Found recording devices:");
                foreach (string s in available_recording_devices)
                {
                    Debug.WriteLine(s);
                }

                Debug.WriteLine("Default recording device: " + DefaultRecordingDevice);
#endif
            }
            catch (DllNotFoundException e)
            {
                Trace.WriteLine(e.ToString());
                IsOpenALSupported = false;
            }
            catch (AudioContextException ace)
            {
                Trace.WriteLine(ace.ToString());
                IsOpenALSupported = false;
            }
            finally
            {
                Debug.Unindent();

                if (IsOpenALSupported)
                {
                    try
                    {
                        // clean up the dummy context
                        Alc.MakeContextCurrent(ContextHandle.Zero);
                        if (dummy_context != ContextHandle.Zero && dummy_context.Handle != IntPtr.Zero)
                        {
                            Alc.DestroyContext(dummy_context);
                        }
                        if (dummy_device != IntPtr.Zero)
                        {
                            Alc.CloseDevice(dummy_device);
                        }
                    }
                    catch
                    {
                        IsOpenALSupported = false;
                    }
                }
            }
        }
Beispiel #8
0
 public static string GetDevice()
 {
     return(Alc.GetString(Alc.OpenDevice(null), AlcGetString.DeviceSpecifier));
 }
Beispiel #9
0
        public SoundManager()
        {
            loadedSounds      = new List <Sound>();
            streamingThread   = null;
            categoryModifiers = null;

            int alcError = Alc.NoError;

            string deviceName = Alc.GetString(IntPtr.Zero, Alc.DefaultDeviceSpecifier);

            DebugConsole.NewMessage($"Attempting to open ALC device \"{deviceName}\"");

            alcDevice = IntPtr.Zero;
            for (int i = 0; i < 3; i++)
            {
                alcDevice = Alc.OpenDevice(deviceName);
                if (alcDevice == IntPtr.Zero)
                {
                    DebugConsole.NewMessage($"ALC device initialization attempt #{i + 1} failed: device is null");
                }
                else
                {
                    alcError = Alc.GetError(alcDevice);
                    if (alcError != Alc.NoError)
                    {
                        DebugConsole.NewMessage($"ALC device initialization attempt #{i + 1} failed: error code {Alc.GetErrorString(alcError)}");
                        bool closed = Alc.CloseDevice(alcDevice);
                        if (!closed)
                        {
                            DebugConsole.NewMessage($"Failed to close ALC device");
                        }
                        alcDevice = IntPtr.Zero;
                    }
                }
            }
            if (alcDevice == IntPtr.Zero)
            {
                DebugConsole.ThrowError("ALC device creation failed too many times!");
                Disabled = true;
                return;
            }

            int[] alcContextAttrs = new int[] { };
            alcContext = Alc.CreateContext(alcDevice, alcContextAttrs);
            if (alcContext == null)
            {
                DebugConsole.ThrowError("Failed to create an ALC context! (error code: " + Alc.GetError(alcDevice).ToString() + "). Disabling audio playback...");
                Disabled = true;
                return;
            }

            if (!Alc.MakeContextCurrent(alcContext))
            {
                DebugConsole.ThrowError("Failed to assign the current ALC context! (error code: " + Alc.GetError(alcDevice).ToString() + "). Disabling audio playback...");
                Disabled = true;
                return;
            }

            alcError = Alc.GetError(alcDevice);
            if (alcError != Alc.NoError)
            {
                DebugConsole.ThrowError("Error after assigning ALC context: " + Alc.GetErrorString(alcError) + ". Disabling audio playback...");
                Disabled = true;
                return;
            }

            sourcePools = new SoundSourcePool[2];
            sourcePools[(int)SourcePoolIndex.Default]     = new SoundSourcePool(SOURCE_COUNT);
            playingChannels[(int)SourcePoolIndex.Default] = new SoundChannel[SOURCE_COUNT];

            sourcePools[(int)SourcePoolIndex.Voice]     = new SoundSourcePool(16);
            playingChannels[(int)SourcePoolIndex.Voice] = new SoundChannel[16];

            Al.DistanceModel(Al.LinearDistanceClamped);

            int alError = Al.GetError();

            if (alError != Al.NoError)
            {
                DebugConsole.ThrowError("Error setting distance model: " + Al.GetErrorString(alError) + ". Disabling audio playback...");
                Disabled = true;
                return;
            }

            ListenerPosition     = Vector3.Zero;
            ListenerTargetVector = new Vector3(0.0f, 0.0f, 1.0f);
            ListenerUpVector     = new Vector3(0.0f, -1.0f, 0.0f);

            CompressionDynamicRangeGain = 1.0f;
        }
        /// <summary>
        /// Open the sound device, sets up an audio context, and makes the new context
        /// the current context. Note that this method will stop the playback of
        /// music that was running prior to the game start. If any error occurs, then
        /// the state of the controller is reset.
        /// </summary>
        /// <returns>True if the sound controller was setup, and false if not.</returns>
        private bool OpenSoundController()
        {
#if MONOMAC
            alcMacOSXMixerOutputRate(PREFERRED_MIX_RATE);
#endif
            try
            {
                _device = Alc.OpenDevice(string.Empty);
            }
            catch (Exception ex)
            {
                _SoundInitException = ex;
                return(false);
            }
            if (CheckALError("Could not open AL device"))
            {
                return(false);
            }
            if (_device != IntPtr.Zero)
            {
#if ANDROID
                // Attach activity event handlers so we can pause and resume all playing sounds
                AndroidGameActivity.Paused  += Activity_Paused;
                AndroidGameActivity.Resumed += Activity_Resumed;

                // Query the device for the ideal frequency and update buffer size so
                // we can get the low latency sound path.

                /*
                 * The recommended sequence is:
                 *
                 * Check for feature "android.hardware.audio.low_latency" using code such as this:
                 * import android.content.pm.PackageManager;
                 * ...
                 * PackageManager pm = getContext().getPackageManager();
                 * boolean claimsFeature = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
                 * Check for API level 17 or higher, to confirm use of android.media.AudioManager.getProperty().
                 * Get the native or optimal output sample rate and buffer size for this device's primary output stream, using code such as this:
                 * import android.media.AudioManager;
                 * ...
                 * AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
                 * String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE));
                 * String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER));
                 * Note that sampleRate and framesPerBuffer are Strings. First check for null and then convert to int using Integer.parseInt().
                 * Now use OpenSL ES to create an AudioPlayer with PCM buffer queue data locator.
                 *
                 * See http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android
                 */

                int frequency     = DEFAULT_FREQUENCY;
                int updateSize    = DEFAULT_UPDATE_SIZE;
                int updateBuffers = DEFAULT_UPDATE_BUFFER_COUNT;
                if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBeanMr1)
                {
                    Android.Util.Log.Debug("OAL", Game.Activity.PackageManager.HasSystemFeature(PackageManager.FeatureAudioLowLatency) ? "Supports low latency audio playback." : "Does not support low latency audio playback.");

                    var audioManager = Game.Activity.GetSystemService(Context.AudioService) as AudioManager;
                    if (audioManager != null)
                    {
                        var result = audioManager.GetProperty(AudioManager.PropertyOutputSampleRate);
                        if (!string.IsNullOrEmpty(result))
                        {
                            frequency = int.Parse(result, CultureInfo.InvariantCulture);
                        }
                        result = audioManager.GetProperty(AudioManager.PropertyOutputFramesPerBuffer);
                        if (!string.IsNullOrEmpty(result))
                        {
                            updateSize = int.Parse(result, CultureInfo.InvariantCulture);
                        }
                    }

                    // If 4.4 or higher, then we don't need to double buffer on the application side.
                    // See http://stackoverflow.com/a/15006327
                    // Use the explicit value rather than a constant as the 4.2 SDK (the build SDK) does not define a constant for 4.4.
                    if ((int)Android.OS.Build.VERSION.SdkInt >= 19)
                    {
                        updateBuffers = 1;
                    }
                }
                else
                {
                    Android.Util.Log.Debug("OAL", "Android 4.2 or higher required for low latency audio playback.");
                }
                Android.Util.Log.Debug("OAL", "Using sample rate " + frequency + "Hz and " + updateBuffers + " buffers of " + updateSize + " frames.");

                // These are missing and non-standard ALC constants
                const int AlcFrequency     = 0x1007;
                const int AlcUpdateSize    = 0x1014;
                const int AlcUpdateBuffers = 0x1015;

                int[] attribute = new[]
                {
                    AlcFrequency, frequency,
                    AlcUpdateSize, updateSize,
                    AlcUpdateBuffers, updateBuffers,
                    0
                };
#elif IOS
                AudioSession.Initialize();

                AudioSession.Interrupted += (sender, e) => {
                    AudioSession.SetActive(false);
                    Alc.MakeContextCurrent(ContextHandle.Zero);
                    Alc.SuspendContext(_context);
                };
                AudioSession.Resumed += (sender, e) => {
                    AudioSession.SetActive(true);
                    Alc.MakeContextCurrent(_context);
                    Alc.ProcessContext(_context);
                };

                int[] attribute = new int[0];
#else
                int[] attribute = new int[0];
#endif
                _context = Alc.CreateContext(_device, attribute);
                if (CheckALError("Could not create AL context"))
                {
                    CleanUpOpenAL();
                    return(false);
                }

                if (_context != ContextHandle.Zero)
                {
                    Alc.MakeContextCurrent(_context);
                    if (CheckALError("Could not make AL context current"))
                    {
                        CleanUpOpenAL();
                        return(false);
                    }
                    return(true);
                }
            }
            return(false);
        }
Beispiel #11
0
        /// \internal
        /// <summary>Creates the audio context using the specified device.</summary>
        /// <param name="device">The device descriptor obtained through AudioContext.AvailableDevices, or null for the default device.</param>
        /// <param name="freq">Frequency for mixing output buffer, in units of Hz. Pass 0 for driver default.</param>
        /// <param name="refresh">Refresh intervals, in units of Hz. Pass 0 for driver default.</param>
        /// <param name="sync">Flag, indicating a synchronous context.</param>
        /// <param name="enableEfx">Indicates whether the EFX extension should be initialized, if present.</param>
        /// <param name="efxAuxiliarySends">Requires EFX enabled. The number of desired Auxiliary Sends per source.</param>
        /// <exception cref="ArgumentOutOfRangeException">Occurs when a specified parameter is invalid.</exception>
        /// <exception cref="AudioDeviceException">
        /// Occurs when the specified device is not available, or is in use by another program.
        /// </exception>
        /// <exception cref="AudioContextException">
        /// Occurs when an audio context could not be created with the specified parameters.
        /// </exception>
        /// <exception cref="NotSupportedException">
        /// Occurs when an AudioContext already exists.</exception>
        /// <remarks>
        /// <para>For maximum compatibility, you are strongly recommended to use the default constructor.</para>
        /// <para>Multiple AudioContexts are not supported at this point.</para>
        /// <para>
        /// The number of auxilliary EFX sends depends on the audio hardware and drivers. Most Realtek devices, as well
        /// as the Creative SB Live!, support 1 auxilliary send. Creative's Audigy and X-Fi series support 4 sends.
        /// Values higher than supported will be clamped by the driver.
        /// </para>
        /// </remarks>
        void CreateContext(string device, int freq, int refresh, bool sync, bool enableEfx, MaxAuxiliarySends efxAuxiliarySends)
        {
            if (!AudioDeviceEnumerator.IsOpenALSupported)
            {
                throw new DllNotFoundException(AL.Lib);
            }

            if (AudioDeviceEnumerator.Version == AudioDeviceEnumerator.AlcVersion.Alc1_1 && AudioDeviceEnumerator.AvailablePlaybackDevices.Count == 0)    // Alc 1.0 does not support device enumeration.
            {
                throw new NotSupportedException("No audio hardware is available.");
            }
            if (context_exists)
            {
                throw new NotSupportedException("Multiple AudioContexts are not supported.");
            }
            if (freq < 0)
            {
                throw new ArgumentOutOfRangeException("freq", freq, "Should be greater than zero.");
            }
            if (refresh < 0)
            {
                throw new ArgumentOutOfRangeException("refresh", refresh, "Should be greater than zero.");
            }


            if (!String.IsNullOrEmpty(device))
            {
                device_name   = device;
                device_handle = Alc.OpenDevice(device); // try to open device by name
            }
            if (device_handle == IntPtr.Zero)
            {
                device_name   = "IntPtr.Zero (null string)";
                device_handle = Alc.OpenDevice(null); // try to open unnamed default device
            }
            if (device_handle == IntPtr.Zero)
            {
                device_name   = AudioContext.DefaultDevice;
                device_handle = Alc.OpenDevice(AudioContext.DefaultDevice); // try to open named default device
            }
            if (device_handle == IntPtr.Zero)
            {
                device_name = "None";
                throw new AudioDeviceException(String.Format("Audio device '{0}' does not exist or is tied up by another application.",
                                                             String.IsNullOrEmpty(device) ? "default" : device));
            }

            CheckErrors();

            // Build the attribute list
            List <int> attributes = new List <int>();

            if (freq != 0)
            {
                attributes.Add((int)AlcContextAttributes.Frequency);
                attributes.Add(freq);
            }

            if (refresh != 0)
            {
                attributes.Add((int)AlcContextAttributes.Refresh);
                attributes.Add(refresh);
            }

            attributes.Add((int)AlcContextAttributes.Sync);
            attributes.Add(sync ? 1 : 0);

            if (enableEfx && Alc.IsExtensionPresent(device_handle, "ALC_EXT_EFX"))
            {
                int num_slots;
                switch (efxAuxiliarySends)
                {
                case MaxAuxiliarySends.One:
                case MaxAuxiliarySends.Two:
                case MaxAuxiliarySends.Three:
                case MaxAuxiliarySends.Four:
                    num_slots = (int)efxAuxiliarySends;
                    break;

                default:
                case MaxAuxiliarySends.UseDriverDefault:
                    Alc.GetInteger(device_handle, AlcGetInteger.EfxMaxAuxiliarySends, 1, out num_slots);
                    break;
                }

                attributes.Add((int)AlcContextAttributes.EfxMaxAuxiliarySends);
                attributes.Add(num_slots);
            }
            attributes.Add(0);

            context_handle = Alc.CreateContext(device_handle, attributes.ToArray());

            if (context_handle == ContextHandle.Zero)
            {
                Alc.CloseDevice(device_handle);
                throw new AudioContextException("The audio context could not be created with the specified parameters.");
            }

            CheckErrors();

            // HACK: OpenAL SI on Linux/ALSA crashes on MakeCurrent. This hack avoids calling MakeCurrent when
            // an old OpenAL version is detect - it may affect outdated OpenAL versions different than OpenAL SI,
            // but it looks like a good compromise for now.
            if (AudioDeviceEnumerator.AvailablePlaybackDevices.Count > 0)
            {
                MakeCurrent();
            }

            CheckErrors();

            device_name = Alc.GetString(device_handle, AlcGetString.DeviceSpecifier);


            lock (audio_context_lock)
            {
                available_contexts.Add(this.context_handle, this);
                context_exists = true;
            }

            // Register this resource as a disposable resource.
            ResourcesManager.AddDisposableResource(this);
        }
Beispiel #12
0
        // --- initialization and deinitialization ---

        /// <summary>Initializes audio. A call to Deinitialize must be made when terminating the program.</summary>
        /// <returns>Whether initializing audio was successful.</returns>
        public void Initialize(HostInterface host, SoundRange range)
        {
            Deinitialize();

            CurrentHost = host;

            switch (range)
            {
            case SoundRange.Low:
                OuterRadiusFactorMinimum      = 2.0;
                OuterRadiusFactorMaximum      = 8.0;
                OuterRadiusFactorMaximumSpeed = 1.0;
                break;

            case SoundRange.Medium:
                OuterRadiusFactorMinimum      = 4.0;
                OuterRadiusFactorMaximum      = 16.0;
                OuterRadiusFactorMaximumSpeed = 2.0;
                break;

            case SoundRange.High:
                OuterRadiusFactorMinimum      = 6.0;
                OuterRadiusFactorMaximum      = 24.0;
                OuterRadiusFactorMaximumSpeed = 3.0;
                break;
            }
            OuterRadiusFactor      = Math.Sqrt(OuterRadiusFactorMinimum * OuterRadiusFactorMaximum);
            OuterRadiusFactorSpeed = 0.0;
            OpenAlDevice           = Alc.OpenDevice(null);
            string deviceName = Alc.GetString(OpenAlDevice, AlcGetString.DefaultDeviceSpecifier);

            if ((Environment.OSVersion.Platform == PlatformID.Win32S | Environment.OSVersion.Platform == PlatformID.Win32Windows | Environment.OSVersion.Platform == PlatformID.Win32NT) && deviceName == "Generic Software")
            {
                /*
                 * Creative OpenAL implementation on Windows seems to be limited to max 16 simulataneous sounds
                 * Now shipping OpenAL Soft, but detect this and don't glitch
                 * Further note that the current version of OpenAL Soft (1.20.0 at the time of writing) does not like OpenTK
                 * The version in use is 1.17.0 found here: https://openal-soft.org/openal-binaries/
                 */
                systemMaxSounds = 16;
            }
            try
            {
                OpenAlMic = new AudioCapture(AudioCapture.DefaultDevice, SamplingRate, ALFormat.Mono16, BufferSize);
            }
            catch
            {
            }

            if (OpenAlDevice != IntPtr.Zero)
            {
                OpenAlContext = Alc.CreateContext(OpenAlDevice, (int[])null);
                if (OpenAlContext != ContextHandle.Zero)
                {
                    Alc.MakeContextCurrent(OpenAlContext);
                    try
                    {
                        AL.SpeedOfSound(343.0f);
                    }
                    catch
                    {
                        MessageBox.Show(Translations.GetInterfaceString("errors_sound_openal_version"), Translations.GetInterfaceString("program_title"), MessageBoxButtons.OK, MessageBoxIcon.Hand);
                    }
                    AL.DistanceModel(ALDistanceModel.None);
                    return;
                }
                Alc.CloseDevice(OpenAlDevice);
                OpenAlDevice = IntPtr.Zero;
                OpenAlMic.Dispose();
                OpenAlMic = null;
                MessageBox.Show(Translations.GetInterfaceString("errors_sound_openal_context"), Translations.GetInterfaceString("program_title"), MessageBoxButtons.OK, MessageBoxIcon.Hand);
                return;
            }
            OpenAlContext = ContextHandle.Zero;
            MessageBox.Show(Translations.GetInterfaceString("errors_sound_openal_device"), Translations.GetInterfaceString("program_title"), MessageBoxButtons.OK, MessageBoxIcon.Hand);
        }
        private OpenALSoundController()
        {
#if IPHONE
            AudioSession.Initialize();

            // NOTE: iOS 5.1 simulator throws an exception when setting the category
            // to SoloAmbientSound.  This could be removed if that bug gets fixed.
            try
            {
                if (AudioSession.OtherAudioIsPlaying)
                {
                    AudioSession.Category = AudioSessionCategory.AmbientSound;
                }
                else
                {
                    AudioSession.Category = AudioSessionCategory.SoloAmbientSound;
                }
            }
            catch (AudioSessionException) { }
#endif
            alcMacOSXMixerOutputRate(PREFERRED_MIX_RATE);
            _device = Alc.OpenDevice(string.Empty);
            CheckALError("Could not open AL device");
            if (_device != IntPtr.Zero)
            {
                int[] attribute = new int[0];
                _context = Alc.CreateContext(_device, attribute);
                CheckALError("Could not open AL context");

                if (_context != ContextHandle.Zero)
                {
                    Alc.MakeContextCurrent(_context);
                    CheckALError("Could not make AL context current");
                }
            }
            else
            {
                return;
            }

            allSourcesArray = new int[MAX_NUMBER_OF_SOURCES];
            AL.GenSources(allSourcesArray);

            availableSourcesCollection = new HashSet <int> ();
            inUseSourcesCollection     = new HashSet <OALSoundBuffer> ();
            playingSourcesCollection   = new HashSet <OALSoundBuffer> ();


            for (int x = 0; x < MAX_NUMBER_OF_SOURCES; x++)
            {
                availableSourcesCollection.Add(allSourcesArray [x]);
            }
#if IPHONE
            AudioSession.Interrupted += (sender, e) =>
            {
                AudioSession.SetActive(false);

                Alc.MakeContextCurrent(ContextHandle.Zero);
                Alc.SuspendContext(_context);
            };

            AudioSession.Resumed += (sender, e) =>
            {
                // That is, without this, the code wont work :(
                // It will fail on the next line of code
                // Maybe you could ask for an explanation
                // to someone at xamarin
                System.Threading.Thread.Sleep(100);

                AudioSession.SetActive(true);
                AudioSession.Category = AudioSessionCategory.SoloAmbientSound;

                Alc.MakeContextCurrent(_context);
                Alc.ProcessContext(_context);
            };
#endif
        }