public XAudio2Stream(int rate, int bufferSize, int numBuffers, GetBufferDataCallback bufferFillCallback) { xaudio2 = new XAudio2(); //xaudio2 = new XAudio2(XAudio2Version.Version27); // To simulate Windows 7 behavior. //xaudio2.CriticalError += Xaudio2_CriticalError; // TODO : We need to decouple the number of emulated buffered frames and the // size of the low-level audio buffers. masteringVoice = new MasteringVoice(xaudio2); waveFormat = new WaveFormat(rate, 16, 1); audioBuffersRing = new AudioBuffer[numBuffers]; memBuffers = new DataPointer[audioBuffersRing.Length]; for (int i = 0; i < audioBuffersRing.Length; i++) { audioBuffersRing[i] = new AudioBuffer(); memBuffers[i].Size = bufferSize; memBuffers[i].Pointer = Utilities.AllocateMemory(memBuffers[i].Size); } bufferFill = bufferFillCallback; bufferSemaphore = new Semaphore(numBuffers, numBuffers); quitEvent = new ManualResetEvent(false); }
public PortAudioStream(int rate, int channels, int bufferSize, int numBuffers, GetBufferDataCallback bufferFillCallback) { if (refCount == 0) { Pa_Initialize(); refCount++; } Pa_OpenDefaultStream(out stream, 0, 1, PaSampleFormat.Int16, 44100, 0, null, IntPtr.Zero); bufferFill = bufferFillCallback; }
public OpenALStream(int rate, int bufferSize, int numBuffers, GetBufferDataCallback bufferFillCallback) { if (context == null) { context = new AudioContext(); } freq = rate; source = AL.GenSource(); buffers = AL.GenBuffers(numBuffers); bufferFill = bufferFillCallback; quit = false; }
public PortAudioStream(int rate, int bufferSize, int numBuffers, GetBufferDataCallback bufferFillCallback) { if (refCount == 0) { Pa_Initialize(); } refCount++; streamCallback = new PaStreamCallback(StreamCallback); immediateStreamCallback = new PaStreamCallback(ImmediateStreamCallback); Pa_OpenDefaultStream(out stream, 0, 1, PaSampleFormat.Int16, rate, 0, streamCallback, IntPtr.Zero); bufferFill = bufferFillCallback; }
public OpenALStream(int rate, int bufferSize, int numBuffers, GetBufferDataCallback bufferFillCallback) { if (context == null) { context = new AudioContext(); Console.WriteLine($"Default OpenAL audio device is '{AudioContext.DefaultDevice}'"); } // TODO : We need to decouple the number of emulated buffered frames and the // size of the low-level audio buffers. freq = rate; source = AL.GenSource(); buffers = AL.GenBuffers(numBuffers); bufferFill = bufferFillCallback; quit = false; }
public AndroidAudioStream(int rate, int bufferSizeInBytes, int numBuffers, GetBufferDataCallback bufferFillCallback) { // Probably not needed, but i've seen things about effects in the log that // worries me. Doesnt hurt. AudioManager am = (AudioManager)Application.Context.GetSystemService(Context.AudioService); am.UnloadSoundEffects(); bufferFill = bufferFillCallback; audioTrack = new AudioTrack.Builder() .SetAudioAttributes(new AudioAttributes.Builder().SetContentType(AudioContentType.Music).SetUsage(AudioUsageKind.Media).Build()) .SetAudioFormat(new AudioFormat.Builder().SetSampleRate(rate).SetEncoding(Encoding.Pcm16bit).SetChannelMask(ChannelOut.Mono).Build()) .SetTransferMode(AudioTrackMode.Stream) .SetPerformanceMode(AudioTrackPerformanceMode.LowLatency) .SetBufferSizeInBytes(bufferSizeInBytes).Build(); Debug.Assert(audioTrack.PerformanceMode == AudioTrackPerformanceMode.LowLatency); }
public XAudio2Stream(int rate, int channels, int bufferSize, int numBuffers, GetBufferDataCallback bufferFillCallback) { xaudio2 = new XAudio2(); masteringVoice = new MasteringVoice(xaudio2); waveFormat = new WaveFormat(rate, 16, channels); audioBuffersRing = new AudioBuffer[numBuffers]; memBuffers = new DataPointer[audioBuffersRing.Length]; for (int i = 0; i < audioBuffersRing.Length; i++) { audioBuffersRing[i] = new AudioBuffer(); memBuffers[i].Size = bufferSize; memBuffers[i].Pointer = Utilities.AllocateMemory(memBuffers[i].Size); } bufferFill = bufferFillCallback; bufferSemaphore = new Semaphore(numBuffers, numBuffers); quitEvent = new ManualResetEvent(false); }