public void Update(short[] samples) { AL10.alBufferData(Buffer, AL10.AL_FORMAT_STEREO16, samples, samples.Length * sizeof(short), SamplingRate); Check(); LastUpdatedDateTime = DateTime.Now; LastSize = samples.Length; }
public AudioBufferInt16Stereo(short[] samples, int samplingRate) : base(samplingRate) { AL10.alBufferData(Buffer, AL10.AL_FORMAT_STEREO16, samples, samples.Length * sizeof(short), SamplingRate); Check(); LastUpdatedDateTime = DateTime.Now; LastSize = samples.Length; }
/// <summary> /// Fill a buffer with audio data in IEEE floating-point format. /// </summary> /// <param name="name">Name of the buffer.</param> /// <param name="channels">Indicates if the data is mono or stereo.</param> /// <param name="data">Floating-point data.</param> /// <param name="count">Number of samples to buffer.</param> /// <param name="freq">Playback frequency in samples per second.</param> public static void BufferData(uint name, Channels channels, float[] data, int count, int freq) { var format = channels == Channels.Mono ? AlBufferFormat.MonoFloat32 : AlBufferFormat.StereoFloat32; AL10.alBufferData(name, (int)format, data, count * 4, freq); AlHelper.AlAlwaysCheckError("alBufferData call failed."); }
public void Update(byte[] samples) { AL10.alBufferData(Buffer, AL10.AL_FORMAT_MONO8, samples, samples.Length, SamplingRate); Check(); LastUpdatedDateTime = DateTime.Now; LastSize = samples.Length; }
public AudioBufferInt16Stereo(short[] samples, int samplingRate) : base(samplingRate) { if (samples == null) { throw new ArgumentNullException(nameof(samples)); } AL10.alBufferData(Buffer, AL10.AL_FORMAT_STEREO16, samples, samples.Length * sizeof(short), SamplingRate); Check(); LastUpdatedDateTime = DateTime.Now; LastSize = samples.Length; }
internal void SetData(IntPtr data, AudioFormat fmt, uint hz, uint size) { AL10.alBufferData(_handle, (int)fmt, data, (int)size, (int)hz); ALUtils.CheckALError("unable to set audio buffer data"); AL10.alGetBufferi(_handle, AL10.AL_BITS, out int bits); AL10.alGetBufferi(_handle, AL10.AL_CHANNELS, out int channels); AL10.alGetBufferi(_handle, AL10.AL_SIZE, out int unpackedSize); Format = fmt; DataSize = (uint)unpackedSize; Duration = TimeSpan.FromSeconds((double)(unpackedSize / ((bits / 8) * channels)) / hz); }
public AudioBufferUInt8(ReadOnlySpan <byte> samples, int samplingRate) : base(samplingRate) { if (samples == null) { throw new ArgumentNullException(nameof(samples)); } unsafe { fixed(byte *samplePtr = &samples[0]) AL10.alBufferData(Buffer, AL10.AL_FORMAT_MONO8, (IntPtr)samplePtr, samples.Length, SamplingRate); } Check(); }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, float[] data, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[channels], data, (IntPtr)(data.Length * 4), (IntPtr)sampleRate ); }
public void Update(ReadOnlySpan <byte> samples) { if (samples == null) { throw new ArgumentNullException(nameof(samples)); } unsafe { fixed(byte *samplePtr = &samples[0]) AL10.alBufferData(Buffer, AL10.AL_FORMAT_MONO8, (IntPtr)samplePtr, samples.Length, SamplingRate); } Check(); LastUpdatedDateTime = DateTime.Now; LastSize = samples.Length; }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, byte[] data, int count, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToShort[channels], data, // TODO: offset -flibit (IntPtr)count, (IntPtr)sampleRate ); }
public AudioBuffer(WavFile wavFile) : this() { int format = 0; switch (wavFile.Channels) { case 1: { switch (wavFile.BitsPerSample) { case 8: format = AL10.AL_FORMAT_MONO8; break; case 16: format = AL10.AL_FORMAT_MONO16; break; default: throw new NotSupportedException("Invalid audio format!"); } } break; case 2: { switch (wavFile.BitsPerSample) { case 8: format = AL10.AL_FORMAT_STEREO8; break; case 16: format = AL10.AL_FORMAT_STEREO16; break; default: throw new NotSupportedException("Invalid audio format!"); } } break; } AL10.alBufferData(_handle, format, wavFile.Buffer, wavFile.Size, wavFile.Fequency); AudioSystem.alCheckError(); }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, float[] data, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[channels], data, (IntPtr)(data.Length * 4), (IntPtr)sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
/* THIS IS AN EXTENSION OF THE XNA4 API! */ public void SubmitFloatBufferEXT(float[] buffer) { /* Float samples are the typical format received from decoders. * We currently use this for the VideoPlayer. * -flibit */ // Generate a buffer if we don't have any to use. if (availableBuffers.Count == 0) { uint buf; AL10.alGenBuffers((IntPtr)1, out buf); availableBuffers.Enqueue(buf); } // Push the data to OpenAL. uint newBuf = availableBuffers.Dequeue(); AL10.alBufferData( newBuf, XNAToFloat[channels], buffer, (IntPtr)(buffer.Length * 4), (IntPtr)sampleRate ); // If we're already playing, queue immediately. if (State == SoundState.Playing) { AL10.alSourceQueueBuffers( INTERNAL_alSource, (IntPtr)1, ref newBuf ); queuedBuffers.Enqueue(newBuf); } else { buffersToQueue.Enqueue(newBuf); } PendingBufferCount += 1; }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, byte[] data, int count, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToShort[channels], data, // TODO: offset -flibit (IntPtr)count, (IntPtr)sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void SetBufferFloatData( IALBuffer buffer, IntPtr data, int offset, int count ) { OpenALBuffer buf = buffer as OpenALBuffer; AL10.alBufferData( buf.Handle, XNAToFloat[buf.Channels], data + (offset * 4), count * 4, buf.SampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public void SetBufferFloatData( IALBuffer buffer, AudioChannels channels, IntPtr data, int offset, int count, int sampleRate ) { AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[(int)channels], data + (offset * 4), count * 4, sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
private void alSourceQueueBuffer(sbyte[] buffer) { int alBuffer = soundBufferManager.Buffer; ByteBuffer directBuffer = soundBufferManager.getDirectBuffer(buffer.Length); directBuffer.clear(); directBuffer.limit(buffer.Length); directBuffer.put(buffer); directBuffer.rewind(); int alFormat = FormatStereo ? AL10.AL_FORMAT_STEREO16 : AL10.AL_FORMAT_MONO16; AL10.alBufferData(alBuffer, alFormat, directBuffer, SampleRate); AL10.alSourceQueueBuffers(alSource, alBuffer); soundBufferManager.releaseDirectBuffer(directBuffer); alSourcePlay(); checkFreeBuffers(); //if (log.DebugEnabled) { Console.WriteLine(string.Format("alSourceQueueBuffer buffer={0:D}, {1}", alBuffer, ToString())); } }
public void SubmitBuffer(byte[] buffer, int offset, int count) { // Generate a buffer if we don't have any to use. if (availableBuffers.Count == 0) { uint buf; AL10.alGenBuffers((IntPtr)1, out buf); availableBuffers.Enqueue(buf); } // Push the data to OpenAL. uint newBuf = availableBuffers.Dequeue(); AL10.alBufferData( newBuf, XNAToShort[channels], buffer, // TODO: offset -flibit (IntPtr)count, (IntPtr)sampleRate ); // If we're already playing, queue immediately. if (State == SoundState.Playing) { AL10.alSourceQueueBuffers( INTERNAL_alSource, (IntPtr)1, ref newBuf ); queuedBuffers.Enqueue(newBuf); } else { buffersToQueue.Enqueue(newBuf); } PendingBufferCount += 1; }
public void SetBufferData( IALBuffer buffer, AudioChannels channels, float[] data, int offset, int count, int sampleRate ) { GCHandle handle = GCHandle.Alloc(data, GCHandleType.Pinned); AL10.alBufferData( (buffer as OpenALBuffer).Handle, XNAToFloat[(int)channels], handle.AddrOfPinnedObject() + (offset * 4), (IntPtr)(count * 4), (IntPtr)sampleRate ); handle.Free(); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif }
public IALBuffer GenBuffer( byte[] data, uint sampleRate, uint channels, uint loopStart, uint loopEnd, bool isADPCM, uint formatParameter ) { uint result; // Generate the buffer now, in case we need to perform alBuffer ops. AL10.alGenBuffers(1, out result); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif int format; int length = data.Length; if (isADPCM) { format = (channels == 2) ? ALEXT.AL_FORMAT_STEREO_MSADPCM_SOFT : ALEXT.AL_FORMAT_MONO_MSADPCM_SOFT; AL10.alBufferi( result, ALEXT.AL_UNPACK_BLOCK_ALIGNMENT_SOFT, (int)formatParameter ); } else { if (formatParameter == 1) { format = (channels == 2) ? AL10.AL_FORMAT_STEREO16: AL10.AL_FORMAT_MONO16; /* We have to perform extra data validation on * PCM16 data, as the MS SoundEffect builder will * leave extra bytes at the end which will confuse * alBufferData and throw an AL_INVALID_VALUE. * -flibit */ length &= 0x7FFFFFFE; } else { format = (channels == 2) ? AL10.AL_FORMAT_STEREO8: AL10.AL_FORMAT_MONO8; } } // Load it! AL10.alBufferData( result, format, data, length, (int)sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif // Calculate the duration now, after we've unpacked the buffer int bufLen, bits; AL10.alGetBufferi( result, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( result, AL10.AL_BITS, out bits ); if (bufLen == 0 || bits == 0) { throw new InvalidOperationException( "OpenAL buffer allocation failed!" ); } TimeSpan resultDur = TimeSpan.FromSeconds( bufLen / (bits / 8) / channels / ((double)sampleRate) ); // Set the loop points, if applicable if (loopStart > 0 || loopEnd > 0) { AL10.alBufferiv( result, ALEXT.AL_LOOP_POINTS_SOFT, new int[] { (int)loopStart, (int)loopEnd } ); } #if VERBOSE_AL_DEBUGGING CheckALError(); #endif // Finally. return(new OpenALBuffer(result, resultDur, (int)channels, (int)sampleRate)); }
private void INTERNAL_bufferData( byte[] data, uint sampleRate, uint channels, uint loopStart, uint loopEnd, bool isADPCM, uint formatParameter ) { if (OpenALDevice.Instance == null) { throw new NoAudioHardwareException(); } // Generate the buffer now, in case we need to perform alBuffer ops. AL10.alGenBuffers((IntPtr)1, out INTERNAL_buffer); int format; if (isADPCM) { format = (channels == 2) ? ALEXT.AL_FORMAT_STEREO_MSADPCM_SOFT : ALEXT.AL_FORMAT_MONO_MSADPCM_SOFT; AL10.alBufferi( INTERNAL_buffer, ALEXT.AL_UNPACK_BLOCK_ALIGNMENT_SOFT, (int)formatParameter ); } else { if (formatParameter == 1) { format = (channels == 2) ? AL10.AL_FORMAT_STEREO16: AL10.AL_FORMAT_MONO16; } else { format = (channels == 2) ? AL10.AL_FORMAT_STEREO8: AL10.AL_FORMAT_MONO8; } } // Load it! AL10.alBufferData( INTERNAL_buffer, format, data, (IntPtr)data.Length, (IntPtr)sampleRate ); // Calculate the duration now, after we've unpacked the buffer int bufLen, bits; AL10.alGetBufferi( INTERNAL_buffer, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( INTERNAL_buffer, AL10.AL_BITS, out bits ); Duration = TimeSpan.FromSeconds( bufLen / (bits / 8) / channels / ((double)sampleRate) ); // Set the loop points, if applicable if (loopStart > 0 || loopEnd > 0) { AL10.alBufferiv( INTERNAL_buffer, ALEXT.AL_LOOP_POINTS_SOFT, new int[] { (int)loopStart, (int)loopEnd } ); } }
public OpenAlSoundSource(byte[] data, int byteCount, int channels, int sampleBits, int sampleRate) { SampleRate = sampleRate; AL10.alGenBuffers(1, out buffer); AL10.alBufferData(buffer, OpenAlSoundEngine.MakeALFormat(channels, sampleBits), data, byteCount, sampleRate); }
public AudioBufferUInt8(byte[] samples, int samplingRate) : base(samplingRate) { AL10.alBufferData(Buffer, AL10.AL_FORMAT_MONO8, samples, samples.Length, SamplingRate); Check(); }
/// <summary> /// Fill a buffer with data. /// </summary> /// <param name="name">Name of the buffer.</param> /// <param name="format">Format of data in the buffer.</param> /// <param name="data">Data as a byte array.</param> /// <param name="freq">Playback frequency in samples per second.</param> public static void BufferData(uint name, AlBufferFormat format, byte[] data, int freq) { AL10.alBufferData(name, (int)format, data, data.Length, freq); AlHelper.AlAlwaysCheckError("alBufferData call failed."); }
public IALBuffer GenBuffer( byte[] data, uint sampleRate, uint channels, uint loopStart, uint loopEnd, bool isADPCM, uint formatParameter ) { uint result; // Generate the buffer now, in case we need to perform alBuffer ops. AL10.alGenBuffers((IntPtr)1, out result); int format; if (isADPCM) { format = (channels == 2) ? ALEXT.AL_FORMAT_STEREO_MSADPCM_SOFT : ALEXT.AL_FORMAT_MONO_MSADPCM_SOFT; AL10.alBufferi( result, ALEXT.AL_UNPACK_BLOCK_ALIGNMENT_SOFT, (int)formatParameter ); } else { if (formatParameter == 1) { format = (channels == 2) ? AL10.AL_FORMAT_STEREO16: AL10.AL_FORMAT_MONO16; } else { format = (channels == 2) ? AL10.AL_FORMAT_STEREO8: AL10.AL_FORMAT_MONO8; } } // Load it! AL10.alBufferData( result, format, data, (IntPtr)data.Length, (IntPtr)sampleRate ); // Calculate the duration now, after we've unpacked the buffer int bufLen, bits; AL10.alGetBufferi( result, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( result, AL10.AL_BITS, out bits ); TimeSpan resultDur = TimeSpan.FromSeconds( bufLen / (bits / 8) / channels / ((double)sampleRate) ); // Set the loop points, if applicable if (loopStart > 0 || loopEnd > 0) { AL10.alBufferiv( result, ALEXT.AL_LOOP_POINTS_SOFT, new int[] { (int)loopStart, (int)loopEnd } ); } // Finally. return(new OpenALBuffer(result, resultDur)); }
private void INTERNAL_bufferData( byte[] data, uint sampleRate, uint channels, uint loopStart, uint loopEnd, bool isADPCM, uint formatParameter ) { if (OpenALDevice.Instance == null) { throw new NoAudioHardwareException(); } // Generate the buffer now, in case we need to perform alBuffer ops. AL10.alGenBuffers((IntPtr)1, out INTERNAL_buffer); int format; if (isADPCM) { Platform.AssertSupported("ADPCM"); format = (channels == 2) ? ALEXT.AL_FORMAT_STEREO_MSADPCM_SOFT : ALEXT.AL_FORMAT_MONO_MSADPCM_SOFT; AL10.alBufferi( INTERNAL_buffer, ALEXT.AL_UNPACK_BLOCK_ALIGNMENT_SOFT, (int)formatParameter ); } else { if (formatParameter == 1) { format = (channels == 2) ? AL10.AL_FORMAT_STEREO16: AL10.AL_FORMAT_MONO16; } else { format = (channels == 2) ? AL10.AL_FORMAT_STEREO8: AL10.AL_FORMAT_MONO8; } } // Load it! AL10.alBufferData( INTERNAL_buffer, format, data, (IntPtr)data.Length, (IntPtr)sampleRate ); // Calculate the duration now, after we've unpacked the buffer int bufLen, bits; AL10.alGetBufferi( INTERNAL_buffer, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( INTERNAL_buffer, AL10.AL_BITS, out bits ); Duration = TimeSpan.FromSeconds( bufLen / (bits / 8) / channels / ((double)sampleRate) ); // Compute the default loop end point (end of the buffer), because // some content builders automatically set a loop endpoint here instead of at 0 int defaultEndPoint = (bufLen / (bits / 8)) / (int)channels; var hasCustomStartPoint = (loopStart > 0); var hasCustomEndPoint = ( (loopEnd > loopStart) && (loopEnd < defaultEndPoint) ); if (hasCustomStartPoint || hasCustomEndPoint) { // Set the loop points, if applicable Platform.AssertSupported("CustomLoopPoints"); AL10.alBufferiv( INTERNAL_buffer, ALEXT.AL_LOOP_POINTS_SOFT, new int[] { (int)loopStart, (int)loopEnd } ); } }
public OpenAlSoundSource(byte[] data, int channels, int sampleBits, int sampleRate) { AL10.alGenBuffers(new IntPtr(1), out Buffer); AL10.alBufferData(Buffer, MakeALFormat(channels, sampleBits), data, new IntPtr(data.Length), new IntPtr(sampleRate)); }