public void SetBufferData( IALBuffer buffer, IntPtr data, int offset, int count ) { OpenALBuffer buf = buffer as OpenALBuffer; AL10.alBufferData( buf.Handle, XNAToShort[buf.Channels], data + offset, count, buf.SampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void SetBufferFloatData( IALBuffer buffer, AudioChannels channels, IntPtr data, int offset, int count, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[(int)channels], data + (offset * 4), count * 4, sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void PauseSound(ISound sound, bool paused) { if (sound == null) { return; } var key = ((OpenAlSound)sound).Source; int state; AL10.alGetSourcei(key, AL10.AL_SOURCE_STATE, out state); if (state == AL10.AL_PLAYING && paused) { AL10.alSourcePause(key); } else if (state == AL10.AL_PAUSED && !paused) { AL10.alSourcePlay(key); } }
public void Shutdown() { StopAllSounds(); Channel.Shutdown(); AL10.AlDeleteBuffers(buffers); ExitOpenAL(); Cmd.RemoveCommand("play"); Cmd.RemoveCommand("stopsound"); Cmd.RemoveCommand("soundlist"); Cmd.RemoveCommand("soundinfo"); for (int i = 0; i < num_sfx; i++) { if (known_sfx[i].name == null) { continue; } known_sfx[i].Clear(); } num_sfx = 0; }
bool TryGetSourceFromPool(out uint source) { foreach (var kvp in sourcePool) { if (!kvp.Value.IsActive) { sourcePool[kvp.Key].IsActive = true; source = kvp.Key; return(true); } } var freeSources = new List <uint>(); foreach (var key in sourcePool.Keys) { int state; AL10.alGetSourcei(key, AL10.AL_SOURCE_STATE, out state); if (state != AL10.AL_PLAYING && state != AL10.AL_PAUSED) { freeSources.Add(key); } } if (freeSources.Count == 0) { source = 0; return(false); } foreach (var i in freeSources) { sourcePool[i].IsActive = false; } sourcePool[freeSources[0]].IsActive = true; source = freeSources[0]; return(true); }
static string[] QueryDevices(string label, int type) { // Clear error bit AL10.alGetError(); // Returns a null separated list of strings, terminated by two nulls. var devicesPtr = ALC10.alcGetString(IntPtr.Zero, type); if (devicesPtr == IntPtr.Zero || AL10.alGetError() != AL10.AL_NO_ERROR) { Log.Write("sound", "Failed to query OpenAL device list using {0}", label); return(new string[0]); } var devices = new List <string>(); var buffer = new List <byte>(); var offset = 0; while (true) { var b = Marshal.ReadByte(devicesPtr, offset++); if (b != 0) { buffer.Add(b); continue; } // A null indicates termination of that string, so add that to our list. devices.Add(Encoding.UTF8.GetString(buffer.ToArray())); buffer.Clear(); // Two successive nulls indicates the end of the list. if (Marshal.ReadByte(devicesPtr, offset) == 0) { break; } } return(devices.ToArray()); }
private void alSourceQueueBuffer(sbyte[] buffer) { int alBuffer = soundBufferManager.Buffer; ByteBuffer directBuffer = soundBufferManager.getDirectBuffer(buffer.Length); directBuffer.clear(); directBuffer.limit(buffer.Length); directBuffer.put(buffer); directBuffer.rewind(); int alFormat = FormatStereo ? AL10.AL_FORMAT_STEREO16 : AL10.AL_FORMAT_MONO16; AL10.alBufferData(alBuffer, alFormat, directBuffer, SampleRate); AL10.alSourceQueueBuffers(alSource, alBuffer); soundBufferManager.releaseDirectBuffer(directBuffer); alSourcePlay(); checkFreeBuffers(); //if (log.DebugEnabled) { Console.WriteLine(string.Format("alSourceQueueBuffer buffer={0:D}, {1}", alBuffer, ToString())); } }
public static void UpdateStream(ByteBuffer samples, int count, int format, int rate) { EnableStreaming(); int source = channels[numChannels].sourceId; int processed = AL10.AlGetSourcei(source, AL10.AL_BUFFERS_PROCESSED); bool playing = (AL10.AlGetSourcei(source, AL10.AL_SOURCE_STATE) == AL10.AL_PLAYING); bool interupted = !playing && streamQueue > 2; Int32Buffer buffer = tmp; if (interupted) { UnqueueStreams(); buffer.Put(0, buffers.Get(Sound.MAX_SFX + streamQueue++)); Com.DPrintf("queue " + (streamQueue - 1) + '\\'); } else if (processed < 2) { if (streamQueue >= Sound.STREAM_QUEUE) { return; } buffer.Put(0, buffers.Get(Sound.MAX_SFX + streamQueue++)); Com.DPrintf("queue " + (streamQueue - 1) + '\\'); } else { AL10.AlSourceUnqueueBuffers(source, buffer); } samples.Position = 0; samples.Limit = count; AL10.AlBufferData(buffer.Get(0), format, samples, rate); AL10.AlSourceQueueBuffers(source, buffer); if (streamQueue > 1 && !playing) { Com.DPrintf("start sound\\n"); AL10.AlSourcePlay(source); } }
void PauseSound(uint source, bool paused) { AL10.alGetSourcei(source, AL10.AL_SOURCE_STATE, out var state); if (paused) { if (state == AL10.AL_PLAYING) { AL10.alSourcePause(source); } else if (state == AL10.AL_INITIAL) { // If a sound hasn't started yet, // we indicate it should not play be transitioning it to the stopped state. AL10.alSourcePlay(source); AL10.alSourceStop(source); } } else if (!paused && state != AL10.AL_PLAYING) { AL10.alSourcePlay(source); } }
public SoundState GetSourceState(IALSource source) { int state; AL10.alGetSourcei( (source as OpenALSource).Handle, AL10.AL_SOURCE_STATE, out state ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif if (state == AL10.AL_PLAYING) { return(SoundState.Playing); } else if (state == AL10.AL_PAUSED) { return(SoundState.Paused); } return(SoundState.Stopped); }
public IALSource GenSource(IALBuffer buffer) { uint result; AL10.alGenSources(1, out result); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif if (result == 0) { return(null); } AL10.alSourcei( result, AL10.AL_BUFFER, (int)(buffer as OpenALBuffer).Handle ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif return(new OpenALSource(result)); }
public static int Init(Int32Buffer buffers) { Channel.buffers = buffers; int sourceId; numChannels = 0; for (int i = 0; i < MAX_CHANNELS; i++) { try { AL10.AlGenSources(tmp); sourceId = tmp.Get(0); if (sourceId <= 0) { break; } } catch (OpenALException e) { break; } sources.Put(i, sourceId); channels[i] = new Channel(sourceId); numChannels++; AL10.AlSourcef(sourceId, AL10.AL_GAIN, 1F); AL10.AlSourcef(sourceId, AL10.AL_PITCH, 1F); AL10.AlSourcei(sourceId, AL10.AL_SOURCE_RELATIVE, AL10.AL_FALSE); AL10.AlSource(sourceId, AL10.AL_VELOCITY, NULLVECTOR); AL10.AlSourcei(sourceId, AL10.AL_LOOPING, AL10.AL_FALSE); AL10.AlSourcef(sourceId, AL10.AL_REFERENCE_DISTANCE, 200F); AL10.AlSourcef(sourceId, AL10.AL_MIN_GAIN, 0.0005F); AL10.AlSourcef(sourceId, AL10.AL_MAX_GAIN, 1F); } sources.Limit = numChannels; return(numChannels); }
private OpenALDevice() { alDevice = ALC10.alcOpenDevice(string.Empty); if (CheckALCError("Could not open AL device") || alDevice == IntPtr.Zero) { throw new Exception("Could not open audio device!"); } int[] attribute = new int[0]; alContext = ALC10.alcCreateContext(alDevice, attribute); if (CheckALCError("Could not create OpenAL context") || alContext == IntPtr.Zero) { Dispose(); throw new Exception("Could not create OpenAL context"); } ALC10.alcMakeContextCurrent(alContext); if (CheckALCError("Could not make OpenAL context current")) { Dispose(); throw new Exception("Could not make OpenAL context current"); } float[] ori = new float[] { 0.0f, 0.0f, -1.0f, 0.0f, 1.0f, 0.0f }; AL10.alListenerfv(AL10.AL_ORIENTATION, ori); AL10.alListener3f(AL10.AL_POSITION, 0.0f, 0.0f, 0.0f); AL10.alListener3f(AL10.AL_VELOCITY, 0.0f, 0.0f, 0.0f); AL10.alListenerf(AL10.AL_GAIN, 1.0f); // We do NOT use automatic attenuation! XNA does not do this! AL10.alDistanceModel(AL10.AL_NONE); instancePool = new List <SoundEffectInstance>(); dynamicInstancePool = new List <DynamicSoundEffectInstance>(); }
public void SubmitBuffer(byte[] buffer, int offset, int count) { // Generate a buffer if we don't have any to use. if (availableBuffers.Count == 0) { uint buf; AL10.alGenBuffers((IntPtr)1, out buf); availableBuffers.Enqueue(buf); } // Push the data to OpenAL. uint newBuf = availableBuffers.Dequeue(); AL10.alBufferData( newBuf, XNAToShort[channels], buffer, // TODO: offset -flibit (IntPtr)count, (IntPtr)sampleRate ); // If we're already playing, queue immediately. if (State == SoundState.Playing) { AL10.alSourceQueueBuffers( INTERNAL_alSource, (IntPtr)1, ref newBuf ); queuedBuffers.Enqueue(newBuf); } else { buffersToQueue.Enqueue(newBuf); } PendingBufferCount += 1; }
public static void Shutdown() { // Destroy the sources AL10.alDeleteSources(MAX_SOURCE_COUNT, s_allSources); ALUtils.CheckALError("unable to free audio sources"); s_availableSources.Clear(); s_usedSources.Clear(); // Destroy the context, and then close the device ALC10.alcMakeContextCurrent(IntPtr.Zero); ALUtils.CheckALCError(); ALC10.alcDestroyContext(Context); ALUtils.CheckALCError(); Context = IntPtr.Zero; ALC10.alcCloseDevice(Device); ALUtils.CheckALCError(); Device = IntPtr.Zero; IsShutdown = true; // Report LINFO("Shutdown OpenAL audio engine."); }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, float[] data, int offset, int count, int sampleRate ) { GCHandle handle = GCHandle.Alloc(data, GCHandleType.Pinned); AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[(int)channels], handle.AddrOfPinnedObject() + (offset * 4), (IntPtr)(count * 4), (IntPtr)sampleRate ); handle.Free(); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void SetSourcePitch(IALSource source, float pitch, bool clamp) { /* XNA sets pitch bounds to [-1.0f, 1.0f], each end being one octave. * OpenAL's AL_PITCH boundaries are (0.0f, INF). * Consider the function f(x) = 2 ^ x * The domain is (-INF, INF) and the range is (0, INF). * 0.0f is the original pitch for XNA, 1.0f is the original pitch for OpenAL. * Note that f(0) = 1, f(1) = 2, f(-1) = 0.5, and so on. * XNA's pitch values are on the domain, OpenAL's are on the range. * Remember: the XNA limit is arbitrarily between two octaves on the domain. * To convert, we just plug XNA pitch into f(x). * -flibit */ if (clamp && (pitch < -1.0f || pitch > 1.0f)) { throw new Exception("XNA PITCH MUST BE WITHIN [-1.0f, 1.0f]!"); } AL10.alSourcef( (source as OpenALSource).Handle, AL10.AL_PITCH, (float)Math.Pow(2, pitch) ); }
static string[] QueryDevices(string label, int type) { // Clear error bit AL10.alGetError(); var devices = new List <string>(); var next = ALC10.alcGetString(IntPtr.Zero, type); if (next == IntPtr.Zero || AL10.alGetError() != AL10.AL_NO_ERROR) { Log.Write("sound", "Failed to query OpenAL device list using {0}", label); return(new string[] { }); } do { var str = Marshal.PtrToStringAuto(next); next += UnicodeEncoding.Default.GetByteCount(str) + 1; devices.Add(str); } while (Marshal.ReadByte(next) != 0); return(devices.ToArray()); }
public void DequeueSourceBuffers( IALSource source, int buffersToDequeue, Queue <IALBuffer> errorCheck ) { uint[] bufs = new uint[buffersToDequeue]; AL10.alSourceUnqueueBuffers( (source as OpenALSource).Handle, (IntPtr)buffersToDequeue, bufs ); #if DEBUG // Error check our queuedBuffers list. IALBuffer[] sync = errorCheck.ToArray(); for (int i = 0; i < buffersToDequeue; i += 1) { if (bufs[i] != (sync[i] as OpenALBuffer).Handle) { throw new Exception("Buffer desync!"); } } #endif }
public IALBuffer ConvertStereoToMono(IALBuffer buffer) { OpenALBuffer buf = buffer as OpenALBuffer; int bufLen, bits; AL10.alGetBufferi( buf.Handle, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( buf.Handle, AL10.AL_BITS, out bits ); bits /= 8; #if VERBOSE_AL_DEBUGGING CheckALError(); #endif byte[] data = new byte[bufLen]; GCHandle dataHandle = GCHandle.Alloc(data, GCHandleType.Pinned); IntPtr dataPtr = dataHandle.AddrOfPinnedObject(); ALEXT.alGetBufferSamplesSOFT( buf.Handle, 0, bufLen / bits / 2, ALEXT.AL_STEREO_SOFT, bits == 2 ? ALEXT.AL_SHORT_SOFT : ALEXT.AL_BYTE_SOFT, dataPtr ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif byte[] monoData = new byte[bufLen / 2]; GCHandle monoHandle = GCHandle.Alloc(monoData, GCHandleType.Pinned); IntPtr monoPtr = monoHandle.AddrOfPinnedObject(); unsafe { if (bits == 2) { short *src = (short *)dataPtr; short *dst = (short *)monoPtr; for (int i = 0; i < monoData.Length / 2; i += 1) { dst[i] = (short)(((int)src[0] + (int)src[1]) / 2); src += 2; } } else { sbyte *src = (sbyte *)dataPtr; sbyte *dst = (sbyte *)monoPtr; for (int i = 0; i < monoData.Length; i += 1) { dst[i] = (sbyte)(((short)src[0] + (short)src[1]) / 2); src += 2; } } } monoHandle.Free(); dataHandle.Free(); data = null; return(GenBuffer( monoData, (uint)buf.SampleRate, 1, 0, 0, false, (uint)bits - 1 )); }
public IALBuffer GenBuffer( byte[] data, uint sampleRate, uint channels, uint loopStart, uint loopEnd, bool isADPCM, uint formatParameter ) { uint result; // Generate the buffer now, in case we need to perform alBuffer ops. AL10.alGenBuffers(1, out result); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif int format; int length = data.Length; if (isADPCM) { format = (channels == 2) ? ALEXT.AL_FORMAT_STEREO_MSADPCM_SOFT : ALEXT.AL_FORMAT_MONO_MSADPCM_SOFT; AL10.alBufferi( result, ALEXT.AL_UNPACK_BLOCK_ALIGNMENT_SOFT, (int)formatParameter ); } else { if (formatParameter == 1) { format = (channels == 2) ? AL10.AL_FORMAT_STEREO16: AL10.AL_FORMAT_MONO16; /* We have to perform extra data validation on * PCM16 data, as the MS SoundEffect builder will * leave extra bytes at the end which will confuse * alBufferData and throw an AL_INVALID_VALUE. * -flibit */ length &= 0x7FFFFFFE; } else { format = (channels == 2) ? AL10.AL_FORMAT_STEREO8: AL10.AL_FORMAT_MONO8; } } // Load it! AL10.alBufferData( result, format, data, length, (int)sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif // Calculate the duration now, after we've unpacked the buffer int bufLen, bits; AL10.alGetBufferi( result, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( result, AL10.AL_BITS, out bits ); if (bufLen == 0 || bits == 0) { throw new InvalidOperationException( "OpenAL buffer allocation failed!" ); } TimeSpan resultDur = TimeSpan.FromSeconds( bufLen / (bits / 8) / channels / ((double)sampleRate) ); // Set the loop points, if applicable if (loopStart > 0 || loopEnd > 0) { AL10.alBufferiv( result, ALEXT.AL_LOOP_POINTS_SOFT, new int[] { (int)loopStart, (int)loopEnd } ); } #if VERBOSE_AL_DEBUGGING CheckALError(); #endif // Finally. return(new OpenALBuffer(result, resultDur, (int)channels, (int)sampleRate)); }
public void SetDopplerScale(float scale) { AL10.alDopplerFactor(scale); }
private void INTERNAL_bufferData( byte[] data, uint sampleRate, uint channels, uint loopStart, uint loopEnd, bool isADPCM, uint formatParameter ) { if (OpenALDevice.Instance == null) { throw new NoAudioHardwareException(); } // Generate the buffer now, in case we need to perform alBuffer ops. AL10.alGenBuffers((IntPtr)1, out INTERNAL_buffer); int format; if (isADPCM) { format = (channels == 2) ? ALEXT.AL_FORMAT_STEREO_MSADPCM_SOFT : ALEXT.AL_FORMAT_MONO_MSADPCM_SOFT; AL10.alBufferi( INTERNAL_buffer, ALEXT.AL_UNPACK_BLOCK_ALIGNMENT_SOFT, (int)formatParameter ); } else { if (formatParameter == 1) { format = (channels == 2) ? AL10.AL_FORMAT_STEREO16: AL10.AL_FORMAT_MONO16; } else { format = (channels == 2) ? AL10.AL_FORMAT_STEREO8: AL10.AL_FORMAT_MONO8; } } // Load it! AL10.alBufferData( INTERNAL_buffer, format, data, (IntPtr)data.Length, (IntPtr)sampleRate ); // Calculate the duration now, after we've unpacked the buffer int bufLen, bits; AL10.alGetBufferi( INTERNAL_buffer, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( INTERNAL_buffer, AL10.AL_BITS, out bits ); Duration = TimeSpan.FromSeconds( bufLen / (bits / 8) / channels / ((double)sampleRate) ); // Set the loop points, if applicable if (loopStart > 0 || loopEnd > 0) { AL10.alBufferiv( INTERNAL_buffer, ALEXT.AL_LOOP_POINTS_SOFT, new int[] { (int)loopStart, (int)loopEnd } ); } }
public AudioBuffer() { AL10.alGenBuffers(1, out _handle); AudioSystem.alCheckError(); }
public void GetBufferData( IALSource source, IALBuffer[] buffer, IntPtr samples, int samplesLen, AudioChannels channels ) { int copySize1 = samplesLen / (int)channels; int copySize2 = 0; // Where are we now? int offset; AL10.alGetSourcei( (source as OpenALSource).Handle, AL11.AL_SAMPLE_OFFSET, out offset ); // Is that longer than what the active buffer has left...? uint buf = (buffer[0] as OpenALBuffer).Handle; int len; AL10.alGetBufferi( buf, AL10.AL_SIZE, out len ); len /= 2; // FIXME: Assuming 16-bit! len /= (int)channels; if (offset > len) { copySize2 = copySize1; copySize1 = 0; offset -= len; } else if (offset + copySize1 > len) { copySize2 = copySize1 - (len - offset); copySize1 = (len - offset); } // Copy! if (copySize1 > 0) { ALEXT.alGetBufferSamplesSOFT( buf, offset, copySize1, channels == AudioChannels.Stereo ? ALEXT.AL_STEREO_SOFT : ALEXT.AL_MONO_SOFT, ALEXT.AL_FLOAT_SOFT, samples ); offset = 0; } if (buffer.Length > 1 && copySize2 > 0) { ALEXT.alGetBufferSamplesSOFT( (buffer[1] as OpenALBuffer).Handle, 0, copySize2, channels == AudioChannels.Stereo ? ALEXT.AL_STEREO_SOFT : ALEXT.AL_MONO_SOFT, ALEXT.AL_FLOAT_SOFT, samples + (copySize1 * (int)channels) ); } }
public virtual void Play() { if (State != SoundState.Stopped && INTERNAL_alSource != 0) // FIXME: alSource check part of timer hack! { // FIXME: Is this XNA4 behavior? Stop(); } if (INTERNAL_delayMS != 0 && !INTERNAL_timer.IsRunning) { INTERNAL_timer.Start(); } if (INTERNAL_timer.ElapsedMilliseconds < INTERNAL_delayMS) { return; // We'll be back... } INTERNAL_timer.Stop(); INTERNAL_timer.Reset(); if (INTERNAL_alSource != 0) { // The sound has stopped, but hasn't cleaned up yet... AL10.alSourceStop(INTERNAL_alSource); AL10.alDeleteSources((IntPtr)1, ref INTERNAL_alSource); INTERNAL_alSource = 0; } AL10.alGenSources((IntPtr)1, out INTERNAL_alSource); if (INTERNAL_alSource == 0) { System.Console.WriteLine("WARNING: AL SOURCE WAS NOT AVAILABLE. SKIPPING."); return; } // Attach the buffer to this source AL10.alSourcei( INTERNAL_alSource, AL10.AL_BUFFER, (int)INTERNAL_parentEffect.INTERNAL_buffer ); // Apply Pan/Position if (INTERNAL_positionalAudio) { INTERNAL_positionalAudio = false; AL10.alSource3f( INTERNAL_alSource, AL10.AL_POSITION, position.X, position.Y, position.Z ); } else { Pan = Pan; } // Reassign Properties, in case the AL properties need to be applied. Volume = Volume; IsLooped = IsLooped; Pitch = Pitch; // Apply EFX if (INTERNAL_alEffectSlot != 0) { AL10.alSource3i( INTERNAL_alSource, EFX.AL_AUXILIARY_SEND_FILTER, (int)INTERNAL_alEffectSlot, 0, 0 ); } AL10.alSourcePlay(INTERNAL_alSource); }
public void StopAllSounds() { AL10.AlListenerf(AL10.AL_GAIN, 0); PlaySound.Reset(); Channel.Reset(); }
public virtual void Stop() { StopSource(); AL10.alSourcei(Source, AL10.AL_BUFFER, 0); }
public override void Play() { if (State != SoundState.Stopped) { return; // No-op if we're already playing. } if (INTERNAL_alSource != 0) { // The sound has stopped, but hasn't cleaned up yet... AL10.alSourceStop(INTERNAL_alSource); AL10.alDeleteSources((IntPtr)1, ref INTERNAL_alSource); INTERNAL_alSource = 0; } while (queuedBuffers.Count > 0) { availableBuffers.Enqueue(queuedBuffers.Dequeue()); } AL10.alGenSources((IntPtr)1, out INTERNAL_alSource); if (INTERNAL_alSource == 0) { System.Console.WriteLine("WARNING: AL SOURCE WAS NOT AVAILABLE. SKIPPING."); return; } // Queue the buffers to this source while (buffersToQueue.Count > 0) { uint nextBuf = buffersToQueue.Dequeue(); queuedBuffers.Enqueue(nextBuf); AL10.alSourceQueueBuffers( INTERNAL_alSource, (IntPtr)1, ref nextBuf ); } // Apply Pan/Position if (INTERNAL_positionalAudio) { INTERNAL_positionalAudio = false; AL10.alSource3f( INTERNAL_alSource, AL10.AL_POSITION, position.X, position.Y, position.Z ); } else { Pan = Pan; } // Reassign Properties, in case the AL properties need to be applied. Volume = Volume; IsLooped = IsLooped; Pitch = Pitch; // Finally. AL10.alSourcePlay(INTERNAL_alSource); OpenALDevice.Instance.dynamicInstancePool.Add(this); // ... but wait! What if we need moar buffers? if (PendingBufferCount <= 2 && BufferNeeded != null) { BufferNeeded(this, null); } }
public OpenAlAsyncLoadSound(uint source, bool looping, bool relative, WPos pos, float volume, int channels, int sampleBits, int sampleRate, Stream stream) : base(source, looping, relative, pos, volume, sampleRate) { // Load a silent buffer into the source. Without this, // attempting to change the state (i.e. play/pause) the source fails on some systems. var silentSource = new OpenAlSoundSource(SilentData, SilentData.Length, channels, sampleBits, sampleRate); AL10.alSourcei(source, AL10.AL_BUFFER, (int)silentSource.Buffer); playTask = Task.Run(async() => { MemoryStream memoryStream; using (stream) { try { memoryStream = new MemoryStream((int)stream.Length); } catch (NotSupportedException) { // Fallback for stream types that don't support Length. memoryStream = new MemoryStream(); } try { await stream.CopyToAsync(memoryStream, 81920, cts.Token); } catch (TaskCanceledException) { // Sound was stopped early, cleanup the unused buffer and exit. AL10.alSourceStop(source); AL10.alSourcei(source, AL10.AL_BUFFER, 0); silentSource.Dispose(); return; } } var data = memoryStream.GetBuffer(); var dataLength = (int)memoryStream.Length; var bytesPerSample = sampleBits / 8f; var lengthInSecs = dataLength / (channels * bytesPerSample * sampleRate); using (var soundSource = new OpenAlSoundSource(data, dataLength, channels, sampleBits, sampleRate)) { // Need to stop the source, before attaching the real input and deleting the silent one. AL10.alSourceStop(source); AL10.alSourcei(source, AL10.AL_BUFFER, (int)soundSource.Buffer); silentSource.Dispose(); lock (cts) { if (!cts.IsCancellationRequested) { // TODO: A race condition can happen between the state check and playing/rewinding if a // user pauses/resumes at the right moment. The window of opportunity is small and the // consequences are minor, so for now we'll ignore it. int state; AL10.alGetSourcei(Source, AL10.AL_SOURCE_STATE, out state); if (state != AL10.AL_STOPPED) { AL10.alSourcePlay(source); } else { // A stopped sound indicates it was paused before we finishing loaded. // We don't want to start playing it right away. // We rewind the source so when it is started, it plays from the beginning. AL10.alSourceRewind(source); } } } while (!cts.IsCancellationRequested) { // Need to check seek before state. Otherwise, the music can stop after our state check at // which point the seek will be zero, meaning we'll wait the full track length before seeing it // has stopped. var currentSeek = SeekPosition; int state; AL10.alGetSourcei(Source, AL10.AL_SOURCE_STATE, out state); if (state == AL10.AL_STOPPED) { break; } try { // Wait until the track is due to complete, and at most 60 times a second to prevent a // busy-wait. var delaySecs = Math.Max(lengthInSecs - currentSeek, 1 / 60f); await Task.Delay(TimeSpan.FromSeconds(delaySecs), cts.Token); } catch (TaskCanceledException) { // Sound was stopped early, allow normal cleanup to occur. } } AL10.alSourcei(Source, AL10.AL_BUFFER, 0); } }); }