static void Main() { #if TRACE Trace.Listeners.Add(new ConsoleTraceListener()); #endif Console.WindowHeight = StreamFiles.Length + 12; Console.WriteLine("Pr[e]pare, [P]lay, [S]top, Pa[u]se, [R]esume, [L]oop toggle, [Q]uit"); Console.WriteLine("Faders (in/out) : Low-pass filter [F]/[G], Volume [V]/[B]"); Console.WriteLine("[Up], [Down] : Change current sample"); Console.WriteLine("[Shift] + Action : Do for all " + StreamFiles.Length + " streams"); var logger = new ConsoleLogger(); logger.Write(" # FX Buffering", 0, 8); using (new AudioContext()) using (var streamer = new OggStreamer(65536)) { streamer.Logger = logger; ALHelper.CheckCapabilities(logger); bool quit = false; var streams = new OggStream[StreamFiles.Length]; for (int i = 0; i < StreamFiles.Length; i++) { streams[i] = new OggStream(StreamFiles[i]) { Logger = logger }; logger.SetStreamIndex(streams[i], i); logger.Write((i + 1).ToString(), 1, 10 + i); } logger.Write(">", 0, 10); foreach (var s in streams) { s.Prepare(); } int sIdx = 0; var activeSet = new List <OggStream>(); while (!quit) { var input = Console.ReadKey(true); activeSet.Clear(); if ((input.Modifiers & ConsoleModifiers.Shift) == ConsoleModifiers.Shift) { activeSet.AddRange(streams); } else { activeSet.Add(streams[sIdx]); } var lower = char.ToLower(input.KeyChar); if (input.Key == ConsoleKey.UpArrow) { lower = '-'; } if (input.Key == ConsoleKey.DownArrow) { lower = '+'; } switch (lower) { case 'e': activeSet.ForEach(x => x.Prepare()); break; case 'p': activeSet.ForEach(x => x.Play()); break; case 'u': activeSet.ForEach(x => x.Pause()); break; case 's': activeSet.ForEach(x => x.Stop()); break; case 'r': activeSet.ForEach(x => x.Resume()); break; case 'l': int index = 0; activeSet.ForEach(s => { s.IsLooped = !s.IsLooped; logger.Write(s.IsLooped ? "L" : " ", 3, 10 + index++); }); break; case 'v': FadeVolume(activeSet, true, 1, logger); break; case 'b': FadeVolume(activeSet, false, 1, logger); break; case 'f': FadeFilter(activeSet, true, 1, logger); break; case 'g': FadeFilter(activeSet, false, 1, logger); break; case '+': logger.Write(" ", 0, 10 + sIdx); sIdx++; if (sIdx > streams.Length - 1) { sIdx = 0; } logger.Write(">", 0, 10 + sIdx); break; case '-': logger.Write(" ", 0, 10 + sIdx); sIdx--; if (sIdx < 0) { sIdx = streams.Length - 1; } logger.Write(">", 0, 10 + sIdx); break; case 'q': quit = true; foreach (var cts in filterFades.Values) { cts.Cancel(); } foreach (var cts in volumeFades.Values) { cts.Cancel(); } foreach (var s in streams) { s.Stop(); // nicer and more effective } foreach (var s in streams) { s.Dispose(); } break; } } } }
public void Dispose() { lock (singletonMutex) { Debug.Assert(Instance == this, "Two instances running, somehow...?"); cancelled = true; lock (iterationMutex) streams.Clear(); Instance = null; } }
/// <summary> /// Open the sound device, sets up an audio context, and makes the new context /// the current context. Note that this method will stop the playback of /// music that was running prior to the game start. If any error occurs, then /// the state of the controller is reset. /// </summary> /// <returns>True if the sound controller was setup, and false if not.</returns> private bool OpenSoundController() { #if MONOMAC alcMacOSXMixerOutputRate(PREFERRED_MIX_RATE); #endif try { _device = Alc.OpenDevice(string.Empty); } catch (Exception ex) { _SoundInitException = ex; return (false); } if (CheckALError("Could not open AL device")) { return(false); } if (_device != IntPtr.Zero) { #if ANDROID // Attach activity event handlers so we can pause and resume all playing sounds AndroidGameActivity.Paused += Activity_Paused; AndroidGameActivity.Resumed += Activity_Resumed; // Query the device for the ideal frequency and update buffer size so // we can get the low latency sound path. /* The recommended sequence is: Check for feature "android.hardware.audio.low_latency" using code such as this: import android.content.pm.PackageManager; ... PackageManager pm = getContext().getPackageManager(); boolean claimsFeature = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY); Check for API level 17 or higher, to confirm use of android.media.AudioManager.getProperty(). Get the native or optimal output sample rate and buffer size for this device's primary output stream, using code such as this: import android.media.AudioManager; ... AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE); String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE)); String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); Note that sampleRate and framesPerBuffer are Strings. First check for null and then convert to int using Integer.parseInt(). Now use OpenSL ES to create an AudioPlayer with PCM buffer queue data locator. See http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android */ int frequency = DEFAULT_FREQUENCY; int updateSize = DEFAULT_UPDATE_SIZE; int updateBuffers = DEFAULT_UPDATE_BUFFER_COUNT; if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBeanMr1) { Android.Util.Log.Debug("OAL", Game.Activity.PackageManager.HasSystemFeature(PackageManager.FeatureAudioLowLatency) ? "Supports low latency audio playback." : "Does not support low latency audio playback."); var audioManager = Game.Activity.GetSystemService(Context.AudioService) as AudioManager; if (audioManager != null) { var result = audioManager.GetProperty(AudioManager.PropertyOutputSampleRate); if (!string.IsNullOrEmpty(result)) frequency = int.Parse(result, CultureInfo.InvariantCulture); result = audioManager.GetProperty(AudioManager.PropertyOutputFramesPerBuffer); if (!string.IsNullOrEmpty(result)) updateSize = int.Parse(result, CultureInfo.InvariantCulture); } // If 4.4 or higher, then we don't need to double buffer on the application side. // See http://stackoverflow.com/a/15006327 // Use the explicit value rather than a constant as the 4.2 SDK (the build SDK) does not define a constant for 4.4. if ((int)Android.OS.Build.VERSION.SdkInt >= 19) { updateBuffers = 1; } } else { Android.Util.Log.Debug("OAL", "Android 4.2 or higher required for low latency audio playback."); } Android.Util.Log.Debug("OAL", "Using sample rate " + frequency + "Hz and " + updateBuffers + " buffers of " + updateSize + " frames."); // These are missing and non-standard ALC constants const int AlcFrequency = 0x1007; const int AlcUpdateSize = 0x1014; const int AlcUpdateBuffers = 0x1015; int[] attribute = new[] { AlcFrequency, frequency, AlcUpdateSize, updateSize, AlcUpdateBuffers, updateBuffers, 0 }; #elif IOS EventHandler<AVAudioSessionInterruptionEventArgs> handler = delegate(object sender, AVAudioSessionInterruptionEventArgs e) { switch (e.InterruptionType) { case AVAudioSessionInterruptionType.Began: AVAudioSession.SharedInstance().SetActive(false); Alc.MakeContextCurrent(ContextHandle.Zero); Alc.SuspendContext(_context); break; case AVAudioSessionInterruptionType.Ended: AVAudioSession.SharedInstance().SetActive(true); Alc.MakeContextCurrent(_context); Alc.ProcessContext(_context); break; } }; AVAudioSession.Notifications.ObserveInterruption(handler); int[] attribute = new int[0]; #elif !DESKTOPGL int[] attribute = new int[0]; #endif #if DESKTOPGL _acontext = new AudioContext(); _context = Alc.GetCurrentContext(); _oggstreamer = new OggStreamer(); #else _context = Alc.CreateContext(_device, attribute); #endif if (CheckALError("Could not create AL context")) { CleanUpOpenAL(); return(false); } if (_context != ContextHandle.Zero) { Alc.MakeContextCurrent(_context); if (CheckALError("Could not make AL context current")) { CleanUpOpenAL(); return(false); } return (true); } } return (false); }
public OggStreamer(int bufferSize = DefaultBufferSize, float updateRate = DefaultUpdateRate) { UpdateRate = updateRate; BufferSize = bufferSize; lock (singletonMutex) { if (instance != null) throw new InvalidOperationException("Already running"); Instance = this; underlyingThread = new Thread(EnsureBuffersFilled) { Priority = ThreadPriority.Lowest }; underlyingThread.Start(); } readSampleBuffer = new float[bufferSize]; castBuffer = new short[bufferSize]; }
/// <summary> /// Creates a new instance of <see cref="AudioManager" />. /// </summary> public AudioManager() { _context = new AudioContext(); _oggStreamer = new OggStreamer(); }