public SoundEffect( byte[] buffer, int offset, int count, int sampleRate, AudioChannels channels, int loopStart, int loopLength ) { byte[] sendBuf; if (offset != 0 || count != buffer.Length) { // I kind of hate this. -flibit sendBuf = new byte[count]; Array.Copy(buffer, offset, sendBuf, 0, count); } else { sendBuf = buffer; } INTERNAL_buffer = AudioDevice.GenBuffer( sendBuf, (uint)sampleRate, (uint)channels, (uint)loopStart, (uint)(loopStart + loopLength), false, 1 ); }
public IALSource GenSource(IALBuffer buffer, bool isXACT) { uint result; AL10.alGenSources(1, out result); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif if (result == 0) { return(null); } AL10.alSourcei( result, AL10.AL_BUFFER, (int)(buffer as OpenALBuffer).Handle ); AL10.alSourcef( result, AL10.AL_REFERENCE_DISTANCE, AudioDevice.DistanceScale ); if (isXACT) { AL10.alSourcef( result, AL10.AL_MAX_GAIN, AudioDevice.MAX_GAIN_VALUE ); } #if VERBOSE_AL_DEBUGGING CheckALError(); #endif return(new OpenALSource(result)); }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, float[] data, int sampleRate ) { // No-op, duh. }
public void DeleteBuffer(IALBuffer buffer) { uint handle = (buffer as OpenALBuffer).Handle; AL10.alDeleteBuffers(1, ref handle); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void SetBufferFloatData( IALBuffer buffer, IntPtr data, int offset, int count ) { // No-op, duh. }
public void QueueSourceBuffer(IALSource source, IALBuffer buffer) { uint buf = (buffer as OpenALBuffer).Handle; AL10.alSourceQueueBuffers( (source as OpenALSource).Handle, (IntPtr)1, ref buf ); }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, IntPtr data, int offset, int count, int sampleRate ) { // No-op, duh. }
public void QueueSourceBuffer(IALSource source, IALBuffer buffer) { uint buf = (buffer as OpenALBuffer).Handle; AL10.alSourceQueueBuffers( (source as OpenALSource).Handle, 1, ref buf ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void SubmitFloatBufferEXT(float[] buffer, int offset, int count) { /* Float samples are the typical format received from decoders. * We currently use this for the VideoPlayer. * -flibit */ // Generate a buffer if we don't have any to use. if (availableBuffers.Count == 0) { availableBuffers.Enqueue(AudioDevice.ALDevice.GenBuffer()); } // Push buffer to the AL. IALBuffer newBuf = availableBuffers.Dequeue(); GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); AudioDevice.ALDevice.SetBufferFloatData( newBuf, channels, handle.AddrOfPinnedObject(), offset, count, sampleRate ); handle.Free(); // If we're already playing, queue immediately. if (INTERNAL_alSource != null) { AudioDevice.ALDevice.QueueSourceBuffer( INTERNAL_alSource, newBuf ); queuedBuffers.Enqueue(newBuf); // If the source stopped, reboot it now. if (AudioDevice.ALDevice.GetSourceState(INTERNAL_alSource) == SoundState.Stopped) { AudioDevice.ALDevice.PlaySource(INTERNAL_alSource); } } else { buffersToQueue.Enqueue(newBuf); } PendingBufferCount += 1; }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, float[] data, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[channels], data, (IntPtr)(data.Length * 4), (IntPtr)sampleRate ); }
public SoundEffect( byte[] buffer, int sampleRate, AudioChannels channels ) { INTERNAL_buffer = AudioDevice.GenBuffer( buffer, (uint) sampleRate, (uint) channels, 0, 0, false, 1 ); }
public SoundEffect( byte[] buffer, int sampleRate, AudioChannels channels ) { INTERNAL_buffer = AudioDevice.GenBuffer( buffer, (uint)sampleRate, (uint)channels, 0, 0, false, 1 ); }
public IALSource GenSource(IALBuffer buffer) { uint result; AL10.alGenSources((IntPtr)1, out result); if (result == 0) { return(null); } AL10.alSourcei( result, AL10.AL_BUFFER, (int)(buffer as OpenALBuffer).Handle ); return(new OpenALSource(result)); }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, byte[] data, int count, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToShort[channels], data, // TODO: offset -flibit (IntPtr)count, (IntPtr)sampleRate ); }
public void SubmitBuffer(byte[] buffer, int offset, int count) { // Generate a buffer if we don't have any to use. if (availableBuffers.Count == 0) { availableBuffers.Enqueue( AudioDevice.ALDevice.GenBuffer() ); } // Push the data to OpenAL. IALBuffer newBuf = availableBuffers.Dequeue(); GCHandle handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); AudioDevice.ALDevice.SetBufferData( newBuf, channels, handle.AddrOfPinnedObject(), offset, count, sampleRate ); handle.Free(); // If we're already playing, queue immediately. if (INTERNAL_alSource != null) { AudioDevice.ALDevice.QueueSourceBuffer( INTERNAL_alSource, newBuf ); queuedBuffers.Enqueue(newBuf); // If the source stopped, reboot it now. if (AudioDevice.ALDevice.GetSourceState(INTERNAL_alSource) == SoundState.Stopped) { AudioDevice.ALDevice.PlaySource(INTERNAL_alSource); } } else { buffersToQueue.Enqueue(newBuf); } PendingBufferCount += 1; }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, float[] data, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[channels], data, (IntPtr)(data.Length * 4), (IntPtr)sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, byte[] data, int count, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToShort[channels], data, // TODO: offset -flibit (IntPtr)count, (IntPtr)sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void SetBufferFloatData( IALBuffer buffer, AudioChannels channels, IntPtr data, int offset, int count, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[(int)channels], data + (offset * 4), count * 4, sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void SetBufferFloatData( IALBuffer buffer, IntPtr data, int offset, int count ) { OpenALBuffer buf = buffer as OpenALBuffer; AL10.alBufferData( buf.Handle, XNAToFloat[buf.Channels], data + (offset * 4), count * 4, buf.SampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public IALSource GenSource(IALBuffer buffer) { uint result; AL10.alGenSources(1, out result); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif if (result == 0) { return(null); } AL10.alSourcei( result, AL10.AL_BUFFER, (int)(buffer as OpenALBuffer).Handle ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif return(new OpenALSource(result)); }
/* THIS IS AN EXTENSION OF THE XNA4 API! */ public void SubmitFloatBufferEXT(float[] buffer) { /* Float samples are the typical format received from decoders. * We currently use this for the VideoPlayer. * -flibit */ // Generate a buffer if we don't have any to use. if (availableBuffers.Count == 0) { availableBuffers.Enqueue(AudioDevice.ALDevice.GenBuffer()); } // Push buffer to the AL. IALBuffer newBuf = availableBuffers.Dequeue(); AudioDevice.ALDevice.SetBufferData( newBuf, channels, buffer, sampleRate ); // If we're already playing, queue immediately. if (INTERNAL_alSource != null) { AudioDevice.ALDevice.QueueSourceBuffer( INTERNAL_alSource, newBuf ); queuedBuffers.Enqueue(newBuf); } else { buffersToQueue.Enqueue(newBuf); } PendingBufferCount += 1; }
internal SoundEffect( string name, byte[] buffer, uint sampleRate, uint channels, uint loopStart, uint loopLength, bool isADPCM, uint formatParameter ) { Name = name; INTERNAL_buffer = AudioDevice.GenBuffer( buffer, sampleRate, channels, loopStart, loopStart + loopLength, isADPCM, formatParameter ); }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, float[] data, int offset, int count, int sampleRate ) { GCHandle handle = GCHandle.Alloc(data, GCHandleType.Pinned); AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[(int)channels], handle.AddrOfPinnedObject() + (offset * 4), (IntPtr)(count * 4), (IntPtr)sampleRate ); handle.Free(); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void SubmitBuffer(byte[] buffer, int offset, int count) { // Generate a buffer if we don't have any to use. if (availableBuffers.Count == 0) { availableBuffers.Enqueue( AudioDevice.ALDevice.GenBuffer() ); } // Push the data to OpenAL. IALBuffer newBuf = availableBuffers.Dequeue(); AudioDevice.ALDevice.SetBufferData( newBuf, channels, buffer, // TODO: offset -flibit count, sampleRate ); // If we're already playing, queue immediately. if (INTERNAL_alSource != null) { AudioDevice.ALDevice.QueueSourceBuffer( INTERNAL_alSource, newBuf ); queuedBuffers.Enqueue(newBuf); } else { buffersToQueue.Enqueue(newBuf); } PendingBufferCount += 1; }
public SoundEffect( byte[] buffer, int offset, int count, int sampleRate, AudioChannels channels, int loopStart, int loopLength ) { byte[] sendBuf; if (offset != 0 || count != buffer.Length) { // I kind of hate this. -flibit sendBuf = new byte[count]; Array.Copy(buffer, offset, sendBuf, 0, count); } else { sendBuf = buffer; } INTERNAL_buffer = AudioDevice.GenBuffer( sendBuf, (uint) sampleRate, (uint) channels, (uint) loopStart, (uint) (loopStart + loopLength), false, 1 ); }
public IALSource GenSource(IALBuffer buffer) { uint result; AL10.alGenSources((IntPtr) 1, out result); if (result == 0) { return null; } AL10.alSourcei( result, AL10.AL_BUFFER, (int) (buffer as OpenALBuffer).Handle ); return new OpenALSource(result); }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, float[] data, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[channels], data, (IntPtr) (data.Length * 4), (IntPtr) sampleRate ); }
public void SetBufferFloatData( IALBuffer buffer, AudioChannels channels, IntPtr data, int offset, int count, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[(int) channels], data + (offset * 4), count * 4, sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void GetBufferData( IALSource source, IALBuffer[] buffer, float[] samples, AudioChannels channels ) { // No-op, duh. }
public void DeleteBuffer(IALBuffer buffer) { uint handle = (buffer as OpenALBuffer).Handle; AL10.alDeleteBuffers((IntPtr) 1, ref handle); }
public void QueueSourceBuffer(IALSource source, IALBuffer buffer) { // No-op, duh. }
public void QueueSourceBuffer(IALSource source, IALBuffer buffer) { uint buf = (buffer as OpenALBuffer).Handle; AL10.alSourceQueueBuffers( (source as OpenALSource).Handle, (IntPtr) 1, ref buf ); }
private void INTERNAL_loadAudioStream(Stream s) { byte[] data; uint sampleRate = 0; uint numChannels = 0; bool isADPCM = false; uint formatParameter = 0; using (BinaryReader reader = new BinaryReader(s)) { // RIFF Signature string signature = new string(reader.ReadChars(4)); if (signature != "RIFF") { throw new NotSupportedException("Specified stream is not a wave file."); } reader.ReadUInt32(); // Riff Chunk Size string wformat = new string(reader.ReadChars(4)); if (wformat != "WAVE") { throw new NotSupportedException("Specified stream is not a wave file."); } // WAVE Header string format_signature = new string(reader.ReadChars(4)); while (format_signature != "fmt ") { reader.ReadBytes(reader.ReadInt32()); format_signature = new string(reader.ReadChars(4)); } int format_chunk_size = reader.ReadInt32(); // Header Information uint audio_format = reader.ReadUInt16(); // 2 numChannels = reader.ReadUInt16(); // 4 sampleRate = reader.ReadUInt32(); // 8 reader.ReadUInt32(); // 12, Byte Rate ushort blockAlign = reader.ReadUInt16(); // 14, Block Align ushort bitDepth = reader.ReadUInt16(); // 16, Bits Per Sample if (audio_format == 1) { System.Diagnostics.Debug.Assert(bitDepth == 8 || bitDepth == 16); formatParameter = (uint)(bitDepth / 16); // 1 for 16, 0 for 8 } else if (audio_format != 2) { isADPCM = true; formatParameter = (((blockAlign / numChannels) - 6) * 2); } else { throw new NotSupportedException("Wave format is not supported."); } // Reads residual bytes if (format_chunk_size > 16) { reader.ReadBytes(format_chunk_size - 16); } // data Signature string data_signature = new string(reader.ReadChars(4)); while (data_signature.ToLowerInvariant() != "data") { reader.ReadBytes(reader.ReadInt32()); data_signature = new string(reader.ReadChars(4)); } if (data_signature != "data") { throw new NotSupportedException("Specified wave file is not supported."); } int waveDataLength = reader.ReadInt32(); data = reader.ReadBytes(waveDataLength); } INTERNAL_buffer = AudioDevice.GenBuffer( data, sampleRate, numChannels, 0, 0, isADPCM, formatParameter ); }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, float[] data, int offset, int count, int sampleRate ) { GCHandle handle = GCHandle.Alloc(data, GCHandleType.Pinned); AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[(int) channels], handle.AddrOfPinnedObject() + (offset * 4), (IntPtr) (count * 4), (IntPtr) sampleRate ); handle.Free(); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void GetBufferData( IALSource source, IALBuffer[] buffer, IntPtr samples, int samplesLen, AudioChannels channels ) { // No-op, duh. }
public void QueueSourceBuffer(IALSource source, IALBuffer buffer) { // No-op, duh. }
public IALSource GenSource(IALBuffer buffer) { return new NullSource(); }
internal SoundEffect( string name, byte[] buffer, uint sampleRate, uint channels, uint loopStart, uint loopLength, bool isADPCM, uint formatParameter ) { Name = name; INTERNAL_buffer = AudioDevice.GenBuffer( buffer, sampleRate, channels, loopStart, loopStart + loopLength, isADPCM, formatParameter ); }
public IALSource GenSource(IALBuffer buffer) { return(new NullSource()); }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, byte[] data, int count, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToShort[channels], data, // TODO: offset -flibit (IntPtr) count, (IntPtr) sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void DeleteBuffer(IALBuffer buffer) { // No-op, duh. }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, float[] data, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[channels], data, (IntPtr) (data.Length * 4), (IntPtr) sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
internal void Play(bool isManaged) { if (State != SoundState.Stopped) { return; // No-op if we're already playing. } if (INTERNAL_alSource != null) { // The sound has stopped, but hasn't cleaned up yet... AudioDevice.ALDevice.StopAndDisposeSource(INTERNAL_alSource); INTERNAL_alSource = null; } while (queuedBuffers.Count > 0) { availableBuffers.Enqueue(queuedBuffers.Dequeue()); PendingBufferCount -= 1; } INTERNAL_alSource = AudioDevice.ALDevice.GenSource(); if (INTERNAL_alSource == null) { FNAPlatform.Log("WARNING: AL SOURCE WAS NOT AVAILABLE. SKIPPING."); return; } // Queue the buffers to this source while (buffersToQueue.Count > 0) { IALBuffer nextBuf = buffersToQueue.Dequeue(); queuedBuffers.Enqueue(nextBuf); AudioDevice.ALDevice.QueueSourceBuffer(INTERNAL_alSource, nextBuf); } // Apply Pan/Position if (INTERNAL_positionalAudio) { INTERNAL_positionalAudio = false; AudioDevice.ALDevice.SetSourcePosition( INTERNAL_alSource, position ); } else { Pan = Pan; } // Reassign Properties, in case the AL properties need to be applied. Volume = Volume; Pitch = Pitch; // ... but wait! What if we need moar buffers? for ( int i = MINIMUM_BUFFER_CHECK - PendingBufferCount; (i > 0) && BufferNeeded != null; i -= 1 ) { BufferNeeded(this, null); } // Finally. AudioDevice.ALDevice.PlaySource(INTERNAL_alSource); if (isManaged) { AudioDevice.DynamicInstancePool.Add(this); } }
public unsafe IALBuffer ConvertStereoToMono(IALBuffer buffer) { OpenALBuffer buf = buffer as OpenALBuffer; int bufLen, bits; AL10.alGetBufferi( buf.Handle, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( buf.Handle, AL10.AL_BITS, out bits ); bits /= 8; #if VERBOSE_AL_DEBUGGING CheckALError(); #endif byte[] data = new byte[bufLen]; byte[] monoData = new byte[bufLen / 2]; fixed(byte *dataPtr = &data[0]) fixed(byte *monoPtr = &monoData[0]) { ALEXT.alGetBufferSamplesSOFT( buf.Handle, 0, bufLen / bits / 2, ALEXT.AL_STEREO_SOFT, bits == 2 ? ALEXT.AL_SHORT_SOFT : ALEXT.AL_BYTE_SOFT, (IntPtr)dataPtr ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif if (bits == 2) { short *src = (short *)dataPtr; short *dst = (short *)monoPtr; for (int i = 0; i < monoData.Length / 2; i += 1) { dst[i] = (short)(((int)src[0] + (int)src[1]) / 2); src += 2; } } else { sbyte *src = (sbyte *)dataPtr; sbyte *dst = (sbyte *)monoPtr; for (int i = 0; i < monoData.Length; i += 1) { dst[i] = (sbyte)(((short)src[0] + (short)src[1]) / 2); src += 2; } } } return(GenBuffer( monoData, (uint)buf.SampleRate, 1, 0, 0, false, (uint)bits - 1 )); }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, float[] data, int sampleRate ) { // No-op, duh. }
private void INTERNAL_loadAudioStream(Stream s) { byte[] data; uint sampleRate = 0; uint numChannels = 0; bool isADPCM = false; uint formatParameter = 0; using (BinaryReader reader = new BinaryReader(s)) { // RIFF Signature string signature = new string(reader.ReadChars(4)); if (signature != "RIFF") { throw new NotSupportedException("Specified stream is not a wave file."); } reader.ReadUInt32(); // Riff Chunk Size string wformat = new string(reader.ReadChars(4)); if (wformat != "WAVE") { throw new NotSupportedException("Specified stream is not a wave file."); } // WAVE Header string format_signature = new string(reader.ReadChars(4)); while (format_signature != "fmt ") { reader.ReadBytes(reader.ReadInt32()); format_signature = new string(reader.ReadChars(4)); } int format_chunk_size = reader.ReadInt32(); // Header Information uint audio_format = reader.ReadUInt16(); // 2 numChannels = reader.ReadUInt16(); // 4 sampleRate = reader.ReadUInt32(); // 8 reader.ReadUInt32(); // 12, Byte Rate ushort blockAlign = reader.ReadUInt16(); // 14, Block Align ushort bitDepth = reader.ReadUInt16(); // 16, Bits Per Sample if (audio_format == 1) { formatParameter = (uint) (bitDepth / 16); // 1 for 16, 0 for 8 } else if (audio_format != 2) { isADPCM = true; formatParameter = (((blockAlign / numChannels) - 6) * 2); } else { throw new NotSupportedException("Wave format is not supported."); } // Reads residual bytes if (format_chunk_size > 16) { reader.ReadBytes(format_chunk_size - 16); } // data Signature string data_signature = new string(reader.ReadChars(4)); while (data_signature.ToLowerInvariant() != "data") { reader.ReadBytes(reader.ReadInt32()); data_signature = new string(reader.ReadChars(4)); } if (data_signature != "data") { throw new NotSupportedException("Specified wave file is not supported."); } int waveDataLength = reader.ReadInt32(); data = reader.ReadBytes(waveDataLength); } INTERNAL_buffer = AudioDevice.GenBuffer( data, sampleRate, numChannels, 0, 0, isADPCM, formatParameter ); }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, IntPtr data, int offset, int count, int sampleRate ) { // No-op, duh. }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, byte[] data, int count, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToShort[channels], data, // TODO: offset -flibit (IntPtr) count, (IntPtr) sampleRate ); }
public void GetBufferData( IALSource source, IALBuffer[] buffer, IntPtr samples, int samplesLen, AudioChannels channels ) { int copySize1 = samplesLen / (int) channels; int copySize2 = 0; // Where are we now? int offset; AL10.alGetSourcei( (source as OpenALSource).Handle, AL11.AL_SAMPLE_OFFSET, out offset ); // Is that longer than what the active buffer has left...? uint buf = (buffer[0] as OpenALBuffer).Handle; int len; AL10.alGetBufferi( buf, AL10.AL_SIZE, out len ); len /= 2; // FIXME: Assuming 16-bit! len /= (int) channels; if (offset > len) { copySize2 = copySize1; copySize1 = 0; offset -= len; } else if (offset + copySize1 > len) { copySize2 = copySize1 - (len - offset); copySize1 = (len - offset); } // Copy! if (copySize1 > 0) { ALEXT.alGetBufferSamplesSOFT( buf, offset, copySize1, channels == AudioChannels.Stereo ? ALEXT.AL_STEREO_SOFT : ALEXT.AL_MONO_SOFT, ALEXT.AL_FLOAT_SOFT, samples ); offset = 0; } if (buffer.Length > 1 && copySize2 > 0) { ALEXT.alGetBufferSamplesSOFT( (buffer[1] as OpenALBuffer).Handle, 0, copySize2, channels == AudioChannels.Stereo ? ALEXT.AL_STEREO_SOFT : ALEXT.AL_MONO_SOFT, ALEXT.AL_FLOAT_SOFT, samples + (copySize1 * (int) channels) ); } }
public void DeleteBuffer(IALBuffer buffer) { uint handle = (buffer as OpenALBuffer).Handle; AL10.alDeleteBuffers((IntPtr) 1, ref handle); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public IALBuffer ConvertStereoToMono(IALBuffer buffer) { OpenALBuffer buf = buffer as OpenALBuffer; int bufLen, bits; AL10.alGetBufferi( buf.Handle, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( buf.Handle, AL10.AL_BITS, out bits ); bits /= 8; #if VERBOSE_AL_DEBUGGING CheckALError(); #endif byte[] data = new byte[bufLen]; GCHandle dataHandle = GCHandle.Alloc(data, GCHandleType.Pinned); IntPtr dataPtr = dataHandle.AddrOfPinnedObject(); ALEXT.alGetBufferSamplesSOFT( buf.Handle, 0, bufLen / bits / 2, ALEXT.AL_STEREO_SOFT, bits == 2 ? ALEXT.AL_SHORT_SOFT : ALEXT.AL_BYTE_SOFT, dataPtr ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif byte[] monoData = new byte[bufLen / 2]; GCHandle monoHandle = GCHandle.Alloc(monoData, GCHandleType.Pinned); IntPtr monoPtr = monoHandle.AddrOfPinnedObject(); unsafe { if (bits == 2) { short *src = (short *)dataPtr; short *dst = (short *)monoPtr; for (int i = 0; i < monoData.Length / 2; i += 1) { dst[i] = (short)(((int)src[0] + (int)src[1]) / 2); src += 2; } } else { sbyte *src = (sbyte *)dataPtr; sbyte *dst = (sbyte *)monoPtr; for (int i = 0; i < monoData.Length; i += 1) { dst[i] = (sbyte)(((short)src[0] + (short)src[1]) / 2); src += 2; } } } monoHandle.Free(); dataHandle.Free(); data = null; return(GenBuffer( monoData, (uint)buf.SampleRate, 1, 0, 0, false, (uint)bits - 1 )); }
public IALSource GenSource(IALBuffer buffer) { uint result; AL10.alGenSources((IntPtr) 1, out result); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif if (result == 0) { return null; } AL10.alSourcei( result, AL10.AL_BUFFER, (int) (buffer as OpenALBuffer).Handle ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif return new OpenALSource(result); }
public void DeleteBuffer(IALBuffer buffer) { // No-op, duh. }
public void QueueSourceBuffer(IALSource source, IALBuffer buffer) { uint buf = (buffer as OpenALBuffer).Handle; AL10.alSourceQueueBuffers( (source as OpenALSource).Handle, (IntPtr) 1, ref buf ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }