/// <summary> /// Stop playback without flushing buffers /// </summary> public void Pause() { if (playbackState == WasapiOutState.Playing) { playbackState = WasapiOutState.Paused; playThreadEvent.Set(); } }
/// <summary> /// Dispose /// </summary> public void Dispose() { if (audioClient != null) { playbackState = WasapiOutState.Disposing; playThreadEvent.Set(); } }
/// <summary> /// Begin Playback /// </summary> public void Play() { if (playbackState != WasapiOutState.Playing) { playbackState = WasapiOutState.Playing; playThreadEvent.Set(); } }
/// <summary> /// Stop playback and flush buffers /// </summary> public void Stop() { if (playbackState == WasapiOutState.Playing || playbackState == WasapiOutState.Paused) { playbackState = WasapiOutState.Stopping; playThreadEvent.Set(); } }
private void FillBuffer(IWaveProvider playbackProvider, int frameCount) { IntPtr buffer = renderClient.GetBuffer(frameCount); int readLength = frameCount * bytesPerFrame; int read = playbackProvider.Read(readBuffer, 0, readLength); if (read == 0) { playbackState = WasapiOutState.Stopping; } Marshal.Copy(readBuffer, 0, buffer, read); int actualFrameCount = read / bytesPerFrame; /*if (actualFrameCount != frameCount) * { * Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount )); * }*/ renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); }
private async void PlayThread() { await Activate(); var playbackProvider = Init(); bool isClientRunning = false; try { if (this.resamplerNeeded) { var resampler = new WdlResamplingSampleProvider(playbackProvider.ToSampleProvider(), outputFormat.SampleRate); playbackProvider = new SampleToWaveProvider(resampler); } // fill a whole buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8; readBuffer = new byte[bufferFrameCount * bytesPerFrame]; FillBuffer(playbackProvider, bufferFrameCount); int timeout = 3 * latencyMilliseconds; while (playbackState != WasapiOutState.Disposed) { if (playbackState != WasapiOutState.Playing) { playThreadEvent.WaitOne(500); } // If still playing and notification is ok if (playbackState == WasapiOutState.Playing) { if (!isClientRunning) { audioClient.Start(); isClientRunning = true; } // If using Event Sync, Wait for notification from AudioClient or Sleep half latency var r = NativeMethods.WaitForSingleObjectEx(frameEventWaitHandle, timeout, true); if (r != 0) { throw new InvalidOperationException("Timed out waiting for event"); } // See how much buffer space is available. int numFramesPadding = 0; // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; int numFramesAvailable = bufferFrameCount - numFramesPadding; if (numFramesAvailable > 0) { FillBuffer(playbackProvider, numFramesAvailable); } } if (playbackState == WasapiOutState.Stopping) { // play the buffer out while (audioClient.CurrentPadding > 0) { await Task.Delay(latencyMilliseconds / 2); } audioClient.Stop(); isClientRunning = false; audioClient.Reset(); playbackState = WasapiOutState.Stopped; RaisePlaybackStopped(null); } if (playbackState == WasapiOutState.Disposing) { audioClient.Stop(); isClientRunning = false; audioClient.Reset(); playbackState = WasapiOutState.Disposed; var disposablePlaybackProvider = playbackProvider as IDisposable; if (disposablePlaybackProvider != null) { disposablePlaybackProvider.Dispose(); // do everything on this thread, even dispose in case it is Media Foundation } RaisePlaybackStopped(null); } } } catch (Exception e) { RaisePlaybackStopped(e); } finally { audioClient.Dispose(); audioClient = null; renderClient = null; NativeMethods.CloseHandle(frameEventWaitHandle); } }
private void FillBuffer(IWaveProvider playbackProvider, int frameCount, int requestedChannelCount, int providedChannelCount, int providedBitsPerSample) { IntPtr buffer = renderClient.GetBuffer(frameCount); //bytesPerFrame = outputFormat.Channels*outputFormat.BitsPerSample/8; int desiredReadLength = frameCount * bytesPerFrame; int providableReadLength = frameCount * providedChannelCount * providedBitsPerSample / 8; //readBuffer = new byte[providableReadLength]; int read = playbackProvider.Read(readBuffer, 0, providableReadLength); int actualFrameCount; if (read == 0) { playbackState = WasapiOutState.Stopping; actualFrameCount = 0; } else if (requestedChannelCount != providedChannelCount) { if (requestedChannelCount < providedChannelCount) //There are more channels provided than requested { //byte[] newReadBuffer = new byte[providableReadLength]; int currentWrittenChannels = 0; int unusedChannelsOffset = 0; int i; for (i = 0; i < read; i++) { if (currentWrittenChannels >= requestedChannelCount) { unusedChannelsOffset += providedChannelCount - currentWrittenChannels; currentWrittenChannels = 0; } newReadBuffer[i] = readBuffer[i + unusedChannelsOffset]; } Marshal.Copy(newReadBuffer, 0, buffer, i); actualFrameCount = i / bytesPerFrame; } else // if (providedChannelCount < requestedChannelCount) (There are less channels provided than are requested, space out the data accordingly in the buffer) { //byte[] newReadBuffer = new byte[desiredReadLength]; int unusedChannelsOffset = 0; int currentWrittenChannels = 0; int i; for (i = 0; i < read; i++) { if (currentWrittenChannels >= providedChannelCount) { unusedChannelsOffset += requestedChannelCount - providedChannelCount; currentWrittenChannels = 0; } newReadBuffer[i + unusedChannelsOffset] = readBuffer[i]; currentWrittenChannels++; } Marshal.Copy(newReadBuffer, 0, buffer, i + unusedChannelsOffset + requestedChannelCount - providedChannelCount); //We have provided actualFrameCount = (i + unusedChannelsOffset + requestedChannelCount - providedChannelCount) / bytesPerFrame; } } else { Marshal.Copy(readBuffer, 0, buffer, read); actualFrameCount = read / bytesPerFrame; } renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); Array.Clear(readBuffer, 0, read); Array.Clear(newReadBuffer, 0, newReadBuffer.Length); }
private void FillBuffer(IWaveProvider playbackProvider, int frameCount) { IntPtr buffer = renderClient.GetBuffer(frameCount); int readLength = frameCount*bytesPerFrame; int read = playbackProvider.Read(readBuffer, 0, readLength); if (read == 0) { playbackState = WasapiOutState.Stopping; } Marshal.Copy(readBuffer, 0, buffer, read); int actualFrameCount = read/bytesPerFrame; /*if (actualFrameCount != frameCount) { Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount )); }*/ renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); }
private async void PlayThread() { await Activate(); var playbackProvider = Init(); bool isClientRunning = false; try { if (this.resamplerNeeded) { var resampler = new WdlResamplingSampleProvider(playbackProvider.ToSampleProvider(), outputFormat.SampleRate); playbackProvider = new SampleToWaveProvider(resampler); } // fill a whole buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels*outputFormat.BitsPerSample/8; readBuffer = new byte[bufferFrameCount*bytesPerFrame]; FillBuffer(playbackProvider, bufferFrameCount); int timeout = 3 * latencyMilliseconds; while (playbackState != WasapiOutState.Disposed) { if (playbackState != WasapiOutState.Playing) { playThreadEvent.WaitOne(500); } // If still playing and notification is ok if (playbackState == WasapiOutState.Playing) { if (!isClientRunning) { audioClient.Start(); isClientRunning = true; } // If using Event Sync, Wait for notification from AudioClient or Sleep half latency var r = NativeMethods.WaitForSingleObjectEx(frameEventWaitHandle, timeout, true); if (r != 0) throw new InvalidOperationException("Timed out waiting for event"); // See how much buffer space is available. int numFramesPadding = 0; // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; int numFramesAvailable = bufferFrameCount - numFramesPadding; if (numFramesAvailable > 0) { FillBuffer(playbackProvider, numFramesAvailable); } } if (playbackState == WasapiOutState.Stopping) { // play the buffer out while (audioClient.CurrentPadding > 0) { await Task.Delay(latencyMilliseconds / 2); } audioClient.Stop(); isClientRunning = false; audioClient.Reset(); playbackState = WasapiOutState.Stopped; RaisePlaybackStopped(null); } if (playbackState == WasapiOutState.Disposing) { audioClient.Stop(); isClientRunning = false; audioClient.Reset(); playbackState = WasapiOutState.Disposed; var disposablePlaybackProvider = playbackProvider as IDisposable; if (disposablePlaybackProvider!=null) disposablePlaybackProvider.Dispose(); // do everything on this thread, even dispose in case it is Media Foundation RaisePlaybackStopped(null); } } } catch (Exception e) { RaisePlaybackStopped(e); } finally { audioClient.Dispose(); audioClient = null; renderClient = null; NativeMethods.CloseHandle(frameEventWaitHandle); } }