public IALBuffer GenBuffer() { uint result; AL10.alGenBuffers((IntPtr)1, out result); return(new OpenALBuffer(result, TimeSpan.Zero)); }
protected AudioBuffer(int samplingRate) { SamplingRate = samplingRate; AL10.alGenBuffers(1, out Buffer); Check(); LastUpdatedDateTime = DateTime.Now; }
public SoundBuffer() { AL10.alGenBuffers(1, out _handle); ALUtils.CheckALError("unable to create sound buffer"); if (_handle == 0) { throw new AudioException("Unable to create OpenAL sound buffer"); } }
public IALBuffer GenBuffer(int sampleRate, AudioChannels channels) { uint result; AL10.alGenBuffers(1, out result); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif return(new OpenALBuffer(result, TimeSpan.Zero, (int)channels, sampleRate)); }
public IALBuffer GenBuffer() { uint result; AL10.alGenBuffers(1, out result); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif return(new OpenALBuffer(result, TimeSpan.Zero)); }
/// <summary> /// Create a number of OpenAL buffers for this device. /// </summary> /// <param name="buffers">Array to fill with buffer names.</param> /// <param name="n">Number of buffers to generate.</param> public void CreateBuffers(uint[] buffers, int n) { CheckDisposed(); ALC10.alcMakeContextCurrent(MainContext.Handle); AL10.alGenBuffers(n, buffers); AlHelper.AlAlwaysCheckError("Failed to generate buffers."); for (var i = 0; i < n; i++) { _buffers.Add(buffers[i]); } }
/// <summary> /// Create an OpenAL buffer for this device. /// </summary> public uint CreateBuffer() { CheckDisposed(); ALC10.alcMakeContextCurrent(MainContext.Handle); AL10.alGenBuffers(1, out var name); if (name == 0) { AlHelper.AlCheckError("alGenBuffer call failed."); throw new Exception("Failed to create buffer."); } _buffers.Add(name); return(name); }
/* THIS IS AN EXTENSION OF THE XNA4 API! */ public void SubmitFloatBufferEXT(float[] buffer) { /* Float samples are the typical format received from decoders. * We currently use this for the VideoPlayer. * -flibit */ // Generate a buffer if we don't have any to use. if (availableBuffers.Count == 0) { uint buf; AL10.alGenBuffers((IntPtr)1, out buf); availableBuffers.Enqueue(buf); } // Push the data to OpenAL. uint newBuf = availableBuffers.Dequeue(); AL10.alBufferData( newBuf, XNAToFloat[channels], buffer, (IntPtr)(buffer.Length * 4), (IntPtr)sampleRate ); // If we're already playing, queue immediately. if (State == SoundState.Playing) { AL10.alSourceQueueBuffers( INTERNAL_alSource, (IntPtr)1, ref newBuf ); queuedBuffers.Enqueue(newBuf); } else { buffersToQueue.Enqueue(newBuf); } PendingBufferCount += 1; }
public void SubmitBuffer(byte[] buffer, int offset, int count) { // Generate a buffer if we don't have any to use. if (availableBuffers.Count == 0) { uint buf; AL10.alGenBuffers((IntPtr)1, out buf); availableBuffers.Enqueue(buf); } // Push the data to OpenAL. uint newBuf = availableBuffers.Dequeue(); AL10.alBufferData( newBuf, XNAToShort[channels], buffer, // TODO: offset -flibit (IntPtr)count, (IntPtr)sampleRate ); // If we're already playing, queue immediately. if (State == SoundState.Playing) { AL10.alSourceQueueBuffers( INTERNAL_alSource, (IntPtr)1, ref newBuf ); queuedBuffers.Enqueue(newBuf); } else { buffersToQueue.Enqueue(newBuf); } PendingBufferCount += 1; }
public IALBuffer GenBuffer( byte[] data, uint sampleRate, uint channels, uint loopStart, uint loopEnd, bool isADPCM, uint formatParameter ) { uint result; // Generate the buffer now, in case we need to perform alBuffer ops. AL10.alGenBuffers(1, out result); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif int format; int length = data.Length; if (isADPCM) { format = (channels == 2) ? ALEXT.AL_FORMAT_STEREO_MSADPCM_SOFT : ALEXT.AL_FORMAT_MONO_MSADPCM_SOFT; AL10.alBufferi( result, ALEXT.AL_UNPACK_BLOCK_ALIGNMENT_SOFT, (int)formatParameter ); } else { if (formatParameter == 1) { format = (channels == 2) ? AL10.AL_FORMAT_STEREO16: AL10.AL_FORMAT_MONO16; /* We have to perform extra data validation on * PCM16 data, as the MS SoundEffect builder will * leave extra bytes at the end which will confuse * alBufferData and throw an AL_INVALID_VALUE. * -flibit */ length &= 0x7FFFFFFE; } else { format = (channels == 2) ? AL10.AL_FORMAT_STEREO8: AL10.AL_FORMAT_MONO8; } } // Load it! AL10.alBufferData( result, format, data, length, (int)sampleRate ); #if VERBOSE_AL_DEBUGGING CheckALError(); #endif // Calculate the duration now, after we've unpacked the buffer int bufLen, bits; AL10.alGetBufferi( result, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( result, AL10.AL_BITS, out bits ); if (bufLen == 0 || bits == 0) { throw new InvalidOperationException( "OpenAL buffer allocation failed!" ); } TimeSpan resultDur = TimeSpan.FromSeconds( bufLen / (bits / 8) / channels / ((double)sampleRate) ); // Set the loop points, if applicable if (loopStart > 0 || loopEnd > 0) { AL10.alBufferiv( result, ALEXT.AL_LOOP_POINTS_SOFT, new int[] { (int)loopStart, (int)loopEnd } ); } #if VERBOSE_AL_DEBUGGING CheckALError(); #endif // Finally. return(new OpenALBuffer(result, resultDur, (int)channels, (int)sampleRate)); }
private void INTERNAL_bufferData( byte[] data, uint sampleRate, uint channels, uint loopStart, uint loopEnd, bool isADPCM, uint formatParameter ) { if (OpenALDevice.Instance == null) { throw new NoAudioHardwareException(); } // Generate the buffer now, in case we need to perform alBuffer ops. AL10.alGenBuffers((IntPtr)1, out INTERNAL_buffer); int format; if (isADPCM) { format = (channels == 2) ? ALEXT.AL_FORMAT_STEREO_MSADPCM_SOFT : ALEXT.AL_FORMAT_MONO_MSADPCM_SOFT; AL10.alBufferi( INTERNAL_buffer, ALEXT.AL_UNPACK_BLOCK_ALIGNMENT_SOFT, (int)formatParameter ); } else { if (formatParameter == 1) { format = (channels == 2) ? AL10.AL_FORMAT_STEREO16: AL10.AL_FORMAT_MONO16; } else { format = (channels == 2) ? AL10.AL_FORMAT_STEREO8: AL10.AL_FORMAT_MONO8; } } // Load it! AL10.alBufferData( INTERNAL_buffer, format, data, (IntPtr)data.Length, (IntPtr)sampleRate ); // Calculate the duration now, after we've unpacked the buffer int bufLen, bits; AL10.alGetBufferi( INTERNAL_buffer, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( INTERNAL_buffer, AL10.AL_BITS, out bits ); Duration = TimeSpan.FromSeconds( bufLen / (bits / 8) / channels / ((double)sampleRate) ); // Set the loop points, if applicable if (loopStart > 0 || loopEnd > 0) { AL10.alBufferiv( INTERNAL_buffer, ALEXT.AL_LOOP_POINTS_SOFT, new int[] { (int)loopStart, (int)loopEnd } ); } }
public AudioBuffer() { AL10.alGenBuffers(1, out _handle); AudioSystem.alCheckError(); }
public OpenAlSoundSource(byte[] data, int byteCount, int channels, int sampleBits, int sampleRate) { SampleRate = sampleRate; AL10.alGenBuffers(1, out buffer); AL10.alBufferData(buffer, OpenAlSoundEngine.MakeALFormat(channels, sampleBits), data, byteCount, sampleRate); }
public OpenAlSoundSource(byte[] data, int channels, int sampleBits, int sampleRate) { AL10.alGenBuffers(new IntPtr(1), out Buffer); AL10.alBufferData(Buffer, MakeALFormat(channels, sampleBits), data, new IntPtr(data.Length), new IntPtr(sampleRate)); }
public IALBuffer GenBuffer( byte[] data, uint sampleRate, uint channels, uint loopStart, uint loopEnd, bool isADPCM, uint formatParameter ) { uint result; // Generate the buffer now, in case we need to perform alBuffer ops. AL10.alGenBuffers((IntPtr)1, out result); int format; if (isADPCM) { format = (channels == 2) ? ALEXT.AL_FORMAT_STEREO_MSADPCM_SOFT : ALEXT.AL_FORMAT_MONO_MSADPCM_SOFT; AL10.alBufferi( result, ALEXT.AL_UNPACK_BLOCK_ALIGNMENT_SOFT, (int)formatParameter ); } else { if (formatParameter == 1) { format = (channels == 2) ? AL10.AL_FORMAT_STEREO16: AL10.AL_FORMAT_MONO16; } else { format = (channels == 2) ? AL10.AL_FORMAT_STEREO8: AL10.AL_FORMAT_MONO8; } } // Load it! AL10.alBufferData( result, format, data, (IntPtr)data.Length, (IntPtr)sampleRate ); // Calculate the duration now, after we've unpacked the buffer int bufLen, bits; AL10.alGetBufferi( result, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( result, AL10.AL_BITS, out bits ); TimeSpan resultDur = TimeSpan.FromSeconds( bufLen / (bits / 8) / channels / ((double)sampleRate) ); // Set the loop points, if applicable if (loopStart > 0 || loopEnd > 0) { AL10.alBufferiv( result, ALEXT.AL_LOOP_POINTS_SOFT, new int[] { (int)loopStart, (int)loopEnd } ); } // Finally. return(new OpenALBuffer(result, resultDur)); }
private void INTERNAL_bufferData( byte[] data, uint sampleRate, uint channels, uint loopStart, uint loopEnd, bool isADPCM, uint formatParameter ) { if (OpenALDevice.Instance == null) { throw new NoAudioHardwareException(); } // Generate the buffer now, in case we need to perform alBuffer ops. AL10.alGenBuffers((IntPtr)1, out INTERNAL_buffer); int format; if (isADPCM) { Platform.AssertSupported("ADPCM"); format = (channels == 2) ? ALEXT.AL_FORMAT_STEREO_MSADPCM_SOFT : ALEXT.AL_FORMAT_MONO_MSADPCM_SOFT; AL10.alBufferi( INTERNAL_buffer, ALEXT.AL_UNPACK_BLOCK_ALIGNMENT_SOFT, (int)formatParameter ); } else { if (formatParameter == 1) { format = (channels == 2) ? AL10.AL_FORMAT_STEREO16: AL10.AL_FORMAT_MONO16; } else { format = (channels == 2) ? AL10.AL_FORMAT_STEREO8: AL10.AL_FORMAT_MONO8; } } // Load it! AL10.alBufferData( INTERNAL_buffer, format, data, (IntPtr)data.Length, (IntPtr)sampleRate ); // Calculate the duration now, after we've unpacked the buffer int bufLen, bits; AL10.alGetBufferi( INTERNAL_buffer, AL10.AL_SIZE, out bufLen ); AL10.alGetBufferi( INTERNAL_buffer, AL10.AL_BITS, out bits ); Duration = TimeSpan.FromSeconds( bufLen / (bits / 8) / channels / ((double)sampleRate) ); // Compute the default loop end point (end of the buffer), because // some content builders automatically set a loop endpoint here instead of at 0 int defaultEndPoint = (bufLen / (bits / 8)) / (int)channels; var hasCustomStartPoint = (loopStart > 0); var hasCustomEndPoint = ( (loopEnd > loopStart) && (loopEnd < defaultEndPoint) ); if (hasCustomStartPoint || hasCustomEndPoint) { // Set the loop points, if applicable Platform.AssertSupported("CustomLoopPoints"); AL10.alBufferiv( INTERNAL_buffer, ALEXT.AL_LOOP_POINTS_SOFT, new int[] { (int)loopStart, (int)loopEnd } ); } }