/// <summary> /// Suspends processing of audio events. /// </summary> /// <remarks> /// <para> /// To avoid audio artifacts when calling this function, set audio gain to zero before /// suspending an AudioContext. /// </para> /// <para> /// In some implementations, it can be faster to suspend processing before changing /// AudioContext state. /// </para> /// <para> /// In some implementations this function may have no effect. /// </para> /// </remarks> /// <exception cref="ObjectDisposedException">Occurs when this function is called after the AudioContext had been disposed.</exception> /// <seealso cref="Process"/> /// <seealso cref="IsProcessing"/> /// <seealso cref="IsSynchronized"/> public void Suspend() { if (disposed) { throw new ObjectDisposedException(this.GetType().FullName); } Alc.SuspendContext(this.context_handle); IsProcessing = false; }
/// <summary> /// Open the sound device, sets up an audio context, and makes the new context /// the current context. Note that this method will stop the playback of /// music that was running prior to the game start. If any error occurs, then /// the state of the controller is reset. /// </summary> /// <returns>True if the sound controller was setup, and false if not.</returns> private bool OpenSoundController() { #if MONOMAC || IOS alcMacOSXMixerOutputRate(PREFERRED_MIX_RATE); #endif try { _device = Alc.OpenDevice(string.Empty); } catch (Exception ex) { _SoundInitException = ex; return(false); } if (CheckALError("Could not open AL device")) { return(false); } if (_device != IntPtr.Zero) { #if ANDROID // Attach activity event handlers so we can pause and resume all playing sounds AndroidGameActivity.Paused += Activity_Paused; AndroidGameActivity.Resumed += Activity_Resumed; // Query the device for the ideal frequency and update buffer size so // we can get the low latency sound path. /* * The recommended sequence is: * * Check for feature "android.hardware.audio.low_latency" using code such as this: * import android.content.pm.PackageManager; * ... * PackageManager pm = getContext().getPackageManager(); * boolean claimsFeature = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY); * Check for API level 17 or higher, to confirm use of android.media.AudioManager.getProperty(). * Get the native or optimal output sample rate and buffer size for this device's primary output stream, using code such as this: * import android.media.AudioManager; * ... * AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE); * String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE)); * String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); * Note that sampleRate and framesPerBuffer are Strings. First check for null and then convert to int using Integer.parseInt(). * Now use OpenSL ES to create an AudioPlayer with PCM buffer queue data locator. * * See http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android */ int frequency = DEFAULT_FREQUENCY; int updateSize = DEFAULT_UPDATE_SIZE; int updateBuffers = DEFAULT_UPDATE_BUFFER_COUNT; if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBeanMr1) { Android.Util.Log.Debug("OAL", Game.Activity.PackageManager.HasSystemFeature(PackageManager.FeatureAudioLowLatency) ? "Supports low latency audio playback." : "Does not support low latency audio playback."); var audioManager = Game.Activity.GetSystemService(Context.AudioService) as AudioManager; if (audioManager != null) { var result = audioManager.GetProperty(AudioManager.PropertyOutputSampleRate); if (!string.IsNullOrEmpty(result)) { frequency = int.Parse(result, CultureInfo.InvariantCulture); } result = audioManager.GetProperty(AudioManager.PropertyOutputFramesPerBuffer); if (!string.IsNullOrEmpty(result)) { updateSize = int.Parse(result, CultureInfo.InvariantCulture); } } // If 4.4 or higher, then we don't need to double buffer on the application side. // See http://stackoverflow.com/a/15006327 // Use the explicit value rather than a constant as the 4.2 SDK (the build SDK) does not define a constant for 4.4. if ((int)Android.OS.Build.VERSION.SdkInt >= 19) { updateBuffers = 1; } } else { Android.Util.Log.Debug("OAL", "Android 4.2 or higher required for low latency audio playback."); } Android.Util.Log.Debug("OAL", "Using sample rate " + frequency + "Hz and " + updateBuffers + " buffers of " + updateSize + " frames."); // These are missing and non-standard ALC constants const int AlcFrequency = 0x1007; const int AlcUpdateSize = 0x1014; const int AlcUpdateBuffers = 0x1015; int[] attribute = new[] { AlcFrequency, frequency, AlcUpdateSize, updateSize, AlcUpdateBuffers, updateBuffers, 0 }; #elif IOS AudioSession.Initialize(); AudioSession.Interrupted += (sender, e) => { AudioSession.SetActive(false); Alc.MakeContextCurrent(ContextHandle.Zero); Alc.SuspendContext(_context); }; AudioSession.Resumed += (sender, e) => { AudioSession.SetActive(true); Alc.MakeContextCurrent(_context); Alc.ProcessContext(_context); }; int[] attribute = new int[0]; #elif !DESKTOPGL int[] attribute = new int[0]; #endif #if DESKTOPGL _acontext = new AudioContext(); _context = Alc.GetCurrentContext(); _oggstreamer = new OggStreamer(); #else _context = Alc.CreateContext(_device, attribute); #endif if (CheckALError("Could not create AL context")) { CleanUpOpenAL(); return(false); } if (_context != ContextHandle.Zero) { Alc.MakeContextCurrent(_context); if (CheckALError("Could not make AL context current")) { CleanUpOpenAL(); return(false); } return(true); } } return(false); }
/// <summary> /// Open the sound device, sets up an audio context, and makes the new context /// the current context. Note that this method will stop the playback of /// music that was running prior to the game start. If any error occurs, then /// the state of the controller is reset. /// </summary> /// <returns>True if the sound controller was setup, and false if not.</returns> private bool OpenSoundController() { try { _device = Alc.OpenDevice(string.Empty); EffectsExtension.device = _device; } catch (Exception ex) { throw new NoAudioHardwareException("OpenAL device could not be initialized.", ex); } AlcHelper.CheckError("Could not open OpenAL device"); if (_device != IntPtr.Zero) { #if ANDROID // Attach activity event handlers so we can pause and resume all playing sounds MonoGameAndroidGameView.OnPauseGameThread += Activity_Paused; MonoGameAndroidGameView.OnResumeGameThread += Activity_Resumed; // Query the device for the ideal frequency and update buffer size so // we can get the low latency sound path. /* * The recommended sequence is: * * Check for feature "android.hardware.audio.low_latency" using code such as this: * import android.content.pm.PackageManager; * ... * PackageManager pm = getContext().getPackageManager(); * boolean claimsFeature = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY); * Check for API level 17 or higher, to confirm use of android.media.AudioManager.getProperty(). * Get the native or optimal output sample rate and buffer size for this device's primary output stream, using code such as this: * import android.media.AudioManager; * ... * AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE); * String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE)); * String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); * Note that sampleRate and framesPerBuffer are Strings. First check for null and then convert to int using Integer.parseInt(). * Now use OpenSL ES to create an AudioPlayer with PCM buffer queue data locator. * * See http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android */ int frequency = DEFAULT_FREQUENCY; int updateSize = DEFAULT_UPDATE_SIZE; int updateBuffers = DEFAULT_UPDATE_BUFFER_COUNT; if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBeanMr1) { Android.Util.Log.Debug("OAL", Game.Activity.PackageManager.HasSystemFeature(PackageManager.FeatureAudioLowLatency) ? "Supports low latency audio playback." : "Does not support low latency audio playback."); var audioManager = Game.Activity.GetSystemService(Context.AudioService) as AudioManager; if (audioManager != null) { var result = audioManager.GetProperty(AudioManager.PropertyOutputSampleRate); if (!string.IsNullOrEmpty(result)) { frequency = int.Parse(result, CultureInfo.InvariantCulture); } result = audioManager.GetProperty(AudioManager.PropertyOutputFramesPerBuffer); if (!string.IsNullOrEmpty(result)) { updateSize = int.Parse(result, CultureInfo.InvariantCulture); } } // If 4.4 or higher, then we don't need to double buffer on the application side. // See http://stackoverflow.com/a/15006327 if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.Kitkat) { updateBuffers = 1; } } else { Android.Util.Log.Debug("OAL", "Android 4.2 or higher required for low latency audio playback."); } Android.Util.Log.Debug("OAL", "Using sample rate " + frequency + "Hz and " + updateBuffers + " buffers of " + updateSize + " frames."); // These are missing and non-standard ALC constants const int AlcFrequency = 0x1007; const int AlcUpdateSize = 0x1014; const int AlcUpdateBuffers = 0x1015; int[] attribute = new[] { AlcFrequency, frequency, AlcUpdateSize, updateSize, AlcUpdateBuffers, updateBuffers, 0 }; #elif IOS AVAudioSession.SharedInstance().Init(); // NOTE: Do not override AVAudioSessionCategory set by the game developer: // see https://github.com/MonoGame/MonoGame/issues/6595 EventHandler <AVAudioSessionInterruptionEventArgs> handler = delegate(object sender, AVAudioSessionInterruptionEventArgs e) { switch (e.InterruptionType) { case AVAudioSessionInterruptionType.Began: AVAudioSession.SharedInstance().SetActive(false); Alc.MakeContextCurrent(IntPtr.Zero); Alc.SuspendContext(_context); break; case AVAudioSessionInterruptionType.Ended: AVAudioSession.SharedInstance().SetActive(true); Alc.MakeContextCurrent(_context); Alc.ProcessContext(_context); break; } }; AVAudioSession.Notifications.ObserveInterruption(handler); // Activate the instance or else the interruption handler will not be called. AVAudioSession.SharedInstance().SetActive(true); int[] attribute = new int[0]; #else int[] attribute = new int[0]; #endif _context = Alc.CreateContext(_device, attribute); #if DESKTOPGL _oggstreamer = new OggStreamer(); #endif AlcHelper.CheckError("Could not create OpenAL context"); if (_context != NullContext) { Alc.MakeContextCurrent(_context); AlcHelper.CheckError("Could not make OpenAL context current"); SupportsIma4 = AL.IsExtensionPresent("AL_EXT_IMA4"); SupportsAdpcm = AL.IsExtensionPresent("AL_SOFT_MSADPCM"); SupportsEfx = AL.IsExtensionPresent("AL_EXT_EFX"); SupportsIeee = AL.IsExtensionPresent("AL_EXT_float32"); return(true); } } return(false); }
public static void AlcUnitTestFunc() { AudioContext context = new AudioContext(); Trace.WriteLine("Testing AudioContext functions."); Trace.Indent(); // Trace.WriteLine("Suspend()..."); // context.Suspend(); // Trace.Assert(!context.IsProcessing); // // Trace.WriteLine("Process()..."); // context.Process(); // Trace.Assert(context.IsProcessing); //Trace.WriteLine("MakeCurrent()..."); //context.MakeCurrent(); //Trace.Assert(context.IsCurrent); //Trace.WriteLine("IsCurrent = false..."); //context.IsCurrent = false; //Trace.Assert(!context.IsCurrent); //Trace.WriteLine("IsCurrent = true..."); //context.IsCurrent = true; //Trace.Assert(context.IsCurrent); Trace.WriteLine("AudioContext.CurrentContext..."); Trace.Assert(AudioContext.CurrentContext == context); #region Get Attribs //int AttribCount; //Alc.GetInteger(context.Device, AlcGetInteger.AttributesSize, sizeof(int), out AttribCount); //Trace.WriteLine("AttributeSize: " + AttribCount); //if (AttribCount > 0) //{ // int[] Attribs = new int[AttribCount]; // Alc.GetInteger(context.Device, AlcGetInteger.AllAttributes, AttribCount, out Attribs[0]); // for (int i = 0; i < Attribs.Length; i++) // { // Trace.Write(Attribs[i]); // Trace.Write(" "); // } // Trace.WriteLine(); //} #endregion Get Attribs #if false AlDevice MyDevice; AlContext MyContext; // Initialize Open AL MyDevice = Alc.OpenDevice(null); // open default device if (MyDevice != Al.Null) { Trace.WriteLine("Device allocation succeeded."); MyContext = Alc.CreateContext(MyDevice, Al.Null); // create context if (MyContext != Al.Null) { Trace.WriteLine("Context allocation succeeded."); GetOpenALErrors(MyDevice); Alc.SuspendContext(MyContext); // disable context Alc.ProcessContext(MyContext); // enable context. The default state of a context created by alcCreateContext is that it is processing. Al.Bool result = Alc.MakeContextCurrent(MyContext); // set active context Trace.WriteLine("MakeContextCurrent succeeded? " + result); GetOpenALErrors(MyDevice); Trace.WriteLine("Default: " + Alc.GetString(MyDevice, Enums.AlcGetString.DefaultDeviceSpecifier)); Trace.WriteLine("Device: " + Alc.GetString(MyDevice, Enums.AlcGetString.DeviceSpecifier)); Trace.WriteLine("Extensions: " + Alc.GetString(MyDevice, Enums.AlcGetString.Extensions)); GetOpenALErrors(MyDevice); #region Get Attribs int AttribCount; Alc.GetInteger(MyDevice, Enums.AlcGetInteger.AttributesSize, sizeof(int), out AttribCount); Trace.WriteLine("AttributeSize: " + AttribCount); if (AttribCount > 0) { int[] Attribs = new int[AttribCount]; Alc.GetInteger(MyDevice, Enums.AlcGetInteger.AttributesSize, AttribCount, out Attribs[0]); for (int i = 0; i < Attribs.Length; i++) { Trace.Write(", " + Attribs[i]); } Trace.WriteLine( ); } #endregion Get Attribs GetOpenALErrors(MyDevice); AlDevice currdev = Alc.GetContextsDevice(MyContext); AlContext currcon = Alc.GetCurrentContext( ); if (MyDevice == currdev) { Trace.WriteLine("Devices match."); } else { Trace.WriteLine("Error: Devices do not match."); } if (MyContext == currcon) { Trace.WriteLine("Context match."); } else { Trace.WriteLine("Error: Contexts do not match."); } // exit Alc.MakeContextCurrent(Al.Null); // results in no context being current Alc.DestroyContext(MyContext); result = Alc.CloseDevice(MyDevice); Trace.WriteLine("Result: " + result); Trace.ReadLine( ); } else { Trace.WriteLine("Context creation failed."); } } else { Trace.WriteLine("Failed to find suitable Device."); } #endif /* * include <stdlib.h> * include <AL/alut.h> * * int * main (int argc, char **argv) * { * ALuint helloBuffer, helloSource; * alutInit (&argc, argv); * helloBuffer = alutCreateBufferHelloWorld (); alGenSources (1, &helloSource); * alSourcei (helloSource, AL_Buffer, helloBuffer); * alSourcePlay (helloSource); * alutSleep (1); * alutExit (); * return EXIT_SUCCESS; * }*/ /* * * Processing Loop Example: * // PlaceCamera - places OpenGL camera & updates OpenAL listener buffer * void AVEnvironment::PlaceCamera() * { * // update OpenGL camera position * glMatrixMode(GL_PROJECTION); * glLoadIdentity(); * glFrustum(-0.1333, 0.1333, -0.1, 0.1, 0.2, 50.0); * gluLookAt(listenerPos[0], listenerPos[1], listenerPos[2], * (listenerPos[0] + sin(listenerAngle)), listenerPos[1], * (listenerPos[2] - cos(listenerAngle)), * 0.0, 1.0, 0.0); * // update OpenAL * // place listener at camera * alListener3f(AL_POSITION, listenerPos[0], listenerPos[1], listenerPos[2]); * float directionvect[6]; * directionvect[0] = (float) sin(listenerAngle); * directionvect[1] = 0; * directionvect[2] = (float) cos(listenerAngle); * directionvect[3] = 0; * directionvect[4] = 1; * directionvect[5] = 0; * alListenerfv(AL_ORIENTATION, directionvect); * } * */ }
private OpenALSoundController() { #if IPHONE AudioSession.Initialize(); // NOTE: iOS 5.1 simulator throws an exception when setting the category // to SoloAmbientSound. This could be removed if that bug gets fixed. try { if (AudioSession.OtherAudioIsPlaying) { AudioSession.Category = AudioSessionCategory.AmbientSound; } else { AudioSession.Category = AudioSessionCategory.SoloAmbientSound; } } catch (AudioSessionException) { } #endif alcMacOSXMixerOutputRate(PREFERRED_MIX_RATE); _device = Alc.OpenDevice(string.Empty); CheckALError("Could not open AL device"); if (_device != IntPtr.Zero) { int[] attribute = new int[0]; _context = Alc.CreateContext(_device, attribute); CheckALError("Could not open AL context"); if (_context != ContextHandle.Zero) { Alc.MakeContextCurrent(_context); CheckALError("Could not make AL context current"); } } else { return; } allSourcesArray = new int[MAX_NUMBER_OF_SOURCES]; AL.GenSources(allSourcesArray); availableSourcesCollection = new HashSet <int> (); inUseSourcesCollection = new HashSet <OALSoundBuffer> (); playingSourcesCollection = new HashSet <OALSoundBuffer> (); for (int x = 0; x < MAX_NUMBER_OF_SOURCES; x++) { availableSourcesCollection.Add(allSourcesArray [x]); } #if IPHONE AudioSession.Interrupted += (sender, e) => { AudioSession.SetActive(false); Alc.MakeContextCurrent(ContextHandle.Zero); Alc.SuspendContext(_context); }; AudioSession.Resumed += (sender, e) => { // That is, without this, the code wont work :( // It will fail on the next line of code // Maybe you could ask for an explanation // to someone at xamarin System.Threading.Thread.Sleep(100); AudioSession.SetActive(true); AudioSession.Category = AudioSessionCategory.SoloAmbientSound; Alc.MakeContextCurrent(_context); Alc.ProcessContext(_context); }; #endif }