internal int GetQueuedSampleCount() { if (_state == MicrophoneState.Stopped || BufferReady == null) { return(0); } int[] values = new int[1]; Alc.GetInteger(_captureDevice, AlcGetInteger.CaptureSamples, 1, values); CheckALCError("Failed to query capture samples."); return(values[0]); }
//public static void DestroyInstance() //{ // if (_instance != null) // { // _instance.Dispose(); // _instance = null; // } //} /// <summary> /// Destroys the AL context and closes the device, when they exist. /// </summary> private void CleanUpOpenAL() { Alc.MakeContextCurrent(NullContext); if (_context != NullContext) { Alc.DestroyContext(_context); _context = NullContext; } if (_device != IntPtr.Zero) { Alc.CloseDevice(_device); _device = IntPtr.Zero; } }
internal void CheckALCError(string operation) { AlcError error = Alc.GetError(_captureDevice); if (error == AlcError.NoError) { return; } string errorFmt = "OpenAL Error: {0}"; throw new NoMicrophoneConnectedException(String.Format("{0} - {1}", operation, string.Format(errorFmt, error))); }
internal int PlatformGetData(byte[] buffer, int offset, int count) { int sampleCount = GetQueuedSampleCount(); sampleCount = Math.Min(count / 2, sampleCount); // 16bit adjust if (sampleCount > 0) { GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); Alc.CaptureSamples(_captureDevice, handle.AddrOfPinnedObject() + offset, sampleCount); handle.Free(); CheckALCError("Failed to capture samples."); return(sampleCount * 2); // 16bit adjust } return(0); }
internal static void PopulateCaptureDevices() { // clear microphones if (_allMicrophones != null) { _allMicrophones.Clear(); } else { _allMicrophones = new List <Microphone>(); } _default = null; // default device string defaultDevice = Alc.GetString(IntPtr.Zero, AlcGetString.CaptureDefaultDeviceSpecifier); #if true //DESKTOPGL // enumarating capture devices IntPtr deviceList = Alc.alGetString(IntPtr.Zero, (int)AlcGetString.CaptureDeviceSpecifier); // we need to marshal a string array string deviceIdentifier = Marshal.PtrToStringAnsi(deviceList); while (!String.IsNullOrEmpty(deviceIdentifier)) { Microphone microphone = new Microphone(deviceIdentifier); _allMicrophones.Add(microphone); if (deviceIdentifier == defaultDevice) { _default = microphone; } deviceList += deviceIdentifier.Length + 1; deviceIdentifier = Marshal.PtrToStringAnsi(deviceList); } #else // Xamarin platforms don't provide an handle to alGetString that allow to marshal string arrays // so we're basically only adding the default microphone Microphone microphone = new Microphone(defaultDevice); _allMicrophones.Add(microphone); _default = microphone; #endif }
/// <summary> /// Sets up the hardware resources used by the controller. /// </summary> private OpenALSoundController() { #if WINDOWS // On Windows, set the DLL search path for correct native binaries NativeHelper.InitDllDirectory(); #endif if (!OpenSoundController()) { throw new NoAudioHardwareException("OpenAL device could not be initialized, see console output for details."); } if (Alc.IsExtensionPresent(_device, "ALC_EXT_CAPTURE")) { Microphone.PopulateCaptureDevices(); } // We have hardware here and it is ready allSourcesArray = new int[MAX_NUMBER_OF_SOURCES]; AL.GenSources(allSourcesArray); ALHelper.CheckError("Failed to generate sources."); Filter = 0; //if (Efx.IsInitialized) //{ // Filter = Efx.GenFilter(); //} availableSourcesCollection = new List <int>(allSourcesArray); inUseSourcesCollection = new List <int>(); for (var i = 0; i < availableSourcesCollection.Count; i++) { sourcePool.Add(availableSourcesCollection[i], new PoolSlot() { IsActive = false }); } }
internal EffectsExtension() { IsInitialized = false; if (!Alc.IsExtensionPresent(device, "ALC_EXT_EFX")) { return; } alGenEffects = (alGenEffectsDelegate)Marshal.GetDelegateForFunctionPointer(AL.GetProcAddress("alGenEffects"), typeof(alGenEffectsDelegate)); alDeleteEffects = (alDeleteEffectsDelegate)Marshal.GetDelegateForFunctionPointer(AL.GetProcAddress("alDeleteEffects"), typeof(alDeleteEffectsDelegate)); alEffectf = (alEffectfDelegate)Marshal.GetDelegateForFunctionPointer(AL.GetProcAddress("alEffectf"), typeof(alEffectfDelegate)); alEffecti = (alEffectiDelegate)Marshal.GetDelegateForFunctionPointer(AL.GetProcAddress("alEffecti"), typeof(alEffectiDelegate)); alGenAuxiliaryEffectSlots = (alGenAuxiliaryEffectSlotsDelegate)Marshal.GetDelegateForFunctionPointer(AL.GetProcAddress("alGenAuxiliaryEffectSlots"), typeof(alGenAuxiliaryEffectSlotsDelegate)); alDeleteAuxiliaryEffectSlots = (alDeleteAuxiliaryEffectSlotsDelegate)Marshal.GetDelegateForFunctionPointer(AL.GetProcAddress("alDeleteAuxiliaryEffectSlots"), typeof(alDeleteAuxiliaryEffectSlotsDelegate)); alAuxiliaryEffectSloti = (alAuxiliaryEffectSlotiDelegate)Marshal.GetDelegateForFunctionPointer(AL.GetProcAddress("alAuxiliaryEffectSloti"), typeof(alAuxiliaryEffectSlotiDelegate)); alAuxiliaryEffectSlotf = (alAuxiliaryEffectSlotfDelegate)Marshal.GetDelegateForFunctionPointer(AL.GetProcAddress("alAuxiliaryEffectSlotf"), typeof(alAuxiliaryEffectSlotfDelegate)); alGenFilters = (alGenFiltersDelegate)Marshal.GetDelegateForFunctionPointer(AL.GetProcAddress("alGenFilters"), typeof(alGenFiltersDelegate)); alFilteri = (alFilteriDelegate)Marshal.GetDelegateForFunctionPointer(AL.GetProcAddress("alFilteri"), typeof(alFilteriDelegate)); alFilterf = (alFilterfDelegate)Marshal.GetDelegateForFunctionPointer(AL.GetProcAddress("alFilterf"), typeof(alFilterfDelegate)); alDeleteFilters = (alDeleteFiltersDelegate)Marshal.GetDelegateForFunctionPointer(AL.GetProcAddress("alDeleteFilters"), typeof(alDeleteFiltersDelegate)); IsInitialized = true; }
void Activity_Resumed(object sender, EventArgs e) { // Resume all sounds that were playing when the activity was paused Alc.DeviceResume(_device); }
void Activity_Paused(object sender, EventArgs e) { // Pause all currently playing sounds by pausing the mixer Alc.DevicePause(_device); }
/// <summary> /// Open the sound device, sets up an audio context, and makes the new context /// the current context. Note that this method will stop the playback of /// music that was running prior to the game start. If any error occurs, then /// the state of the controller is reset. /// </summary> /// <returns>True if the sound controller was setup, and false if not.</returns> private bool OpenSoundController() { try { _device = Alc.OpenDevice(string.Empty); EffectsExtension.device = _device; } catch (DllNotFoundException ex) { throw ex; } catch (Exception ex) { throw new NoAudioHardwareException("OpenAL device could not be initialized.", ex); } AlcHelper.CheckError("Could not open OpenAL device"); if (_device != IntPtr.Zero) { #if ANDROID // Attach activity event handlers so we can pause and resume all playing sounds AndroidGameView.OnPauseGameThread += Activity_Paused; AndroidGameView.OnResumeGameThread += Activity_Resumed; // Query the device for the ideal frequency and update buffer size so // we can get the low latency sound path. /* * The recommended sequence is: * * Check for feature "android.hardware.audio.low_latency" using code such as this: * import android.content.pm.PackageManager; * ... * PackageManager pm = getContext().getPackageManager(); * boolean claimsFeature = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY); * Check for API level 17 or higher, to confirm use of android.media.AudioManager.getProperty(). * Get the native or optimal output sample rate and buffer size for this device's primary output stream, using code such as this: * import android.media.AudioManager; * ... * AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE); * String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE)); * String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); * Note that sampleRate and framesPerBuffer are Strings. First check for null and then convert to int using Integer.parseInt(). * Now use OpenSL ES to create an AudioPlayer with PCM buffer queue data locator. * * See http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android */ int frequency = DEFAULT_FREQUENCY; int updateSize = DEFAULT_UPDATE_SIZE; int updateBuffers = DEFAULT_UPDATE_BUFFER_COUNT; if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBeanMr1) { Android.Util.Log.Debug("OAL", Game.Activity.PackageManager.HasSystemFeature(PackageManager.FeatureAudioLowLatency) ? "Supports low latency audio playback." : "Does not support low latency audio playback."); var audioManager = Game.Activity.GetSystemService(Context.AudioService) as AudioManager; if (audioManager != null) { var result = audioManager.GetProperty(AudioManager.PropertyOutputSampleRate); if (!string.IsNullOrEmpty(result)) { frequency = int.Parse(result, CultureInfo.InvariantCulture); } result = audioManager.GetProperty(AudioManager.PropertyOutputFramesPerBuffer); if (!string.IsNullOrEmpty(result)) { updateSize = int.Parse(result, CultureInfo.InvariantCulture); } } // If 4.4 or higher, then we don't need to double buffer on the application side. // See http://stackoverflow.com/a/15006327 //if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.Kitkat) //{ // updateBuffers = 1; //} } else { Android.Util.Log.Debug("OAL", "Android 4.2 or higher required for low latency audio playback."); } Android.Util.Log.Debug("OAL", "Using sample rate " + frequency + "Hz and " + updateBuffers + " buffers of " + updateSize + " frames."); // These are missing and non-standard ALC constants const int AlcFrequency = 0x1007; const int AlcUpdateSize = 0x1014; const int AlcUpdateBuffers = 0x1015; int[] attribute = new[] { AlcFrequency, frequency, AlcUpdateSize, updateSize, AlcUpdateBuffers, updateBuffers, 0 }; #elif IOS EventHandler <AVAudioSessionInterruptionEventArgs> handler = delegate(object sender, AVAudioSessionInterruptionEventArgs e) { switch (e.InterruptionType) { case AVAudioSessionInterruptionType.Began: AVAudioSession.SharedInstance().SetActive(false); Alc.MakeContextCurrent(IntPtr.Zero); Alc.SuspendContext(_context); break; case AVAudioSessionInterruptionType.Ended: AVAudioSession.SharedInstance().SetActive(true); Alc.MakeContextCurrent(_context); Alc.ProcessContext(_context); break; } }; AVAudioSession.Notifications.ObserveInterruption(handler); int[] attribute = new int[0]; #else int[] attribute = new int[0]; #endif _context = Alc.CreateContext(_device, attribute); #if DESKTOPGL _oggstreamer = new OggStreamer(); #endif AlcHelper.CheckError("Could not create OpenAL context"); if (_context != NullContext) { Alc.MakeContextCurrent(_context); AlcHelper.CheckError("Could not make OpenAL context current"); SupportsIma4 = AL.IsExtensionPresent("AL_EXT_IMA4"); SupportsAdpcm = AL.IsExtensionPresent("AL_SOFT_MSADPCM"); SupportsEfx = AL.IsExtensionPresent("AL_EXT_EFX"); SupportsIeee = AL.IsExtensionPresent("AL_EXT_float32"); return(true); } } return(false); }