private void PerformStop(Exception e) { timer.Enabled = false; audioClient?.Stop(); RaisePlaybackStopped(e); audioClient?.Reset(); }
private void MainForm_FormClosing(object sender, FormClosingEventArgs e) { e.Cancel = false; try { _audioServer.Stop(); } catch (Exception ex) { MessageBox.Show("服务器关闭异常,Error:" + ex.Message); } try { _audioClient1.Stop(); } catch (Exception ex) { MessageBox.Show("客户端一关闭异常,Error:" + ex.Message); } try { _audioClient2.Stop(); } catch (Exception ex) { MessageBox.Show("客户端二关闭异常,Error:" + ex.Message); } }
public void StopCapture() { cancelTokenSource.Cancel(); captureTask.Wait(); // Wait for task to finish micAudioClient.Stop(); speakAudioClient.Stop(); captureTask.Dispose(); }
public void Dispose() { if (_audioClient == null) { return; } _audioClient.Stop(); _audioClient.Dispose(); _audioClient = null; // Not disposing the device as it may be in use in a recording. }
public void StopListeningForPeakLevel() { if (_audioClient == null) { return; } _audioClient.Stop(); _audioClient.Dispose(); _audioClient = null; _audioClient = null; }
public void StopListeningForPeakLevel() { if (_audioClientMic == null) { return; } _audioClientMic.Stop(); _audioClientMic.Dispose(); _audioClientMic = null; if (_audioClientSpeak == null) { return; } _audioClientSpeak.Stop(); _audioClientSpeak.Dispose(); _audioClientSpeak = null; }
private void CaptureThread(AudioClient client) { Exception exception = null; try { DoRecording(client); } catch (Exception e) { exception = e; } finally { client.Stop(); // don't dispose - the AudioClient only gets disposed when WasapiCapture is disposed } captureThread = null; RaiseRecordingStopped(exception); Debug.WriteLine("Stop wasapi"); }
private async void PlayThread() { await Activate(); var playbackProvider = Init(); bool isClientRunning = false; try { if (this.resamplerNeeded) { var resampler = new WdlResamplingSampleProvider(playbackProvider.ToSampleProvider(), outputFormat.SampleRate); playbackProvider = new SampleToWaveProvider(resampler); } // fill a whole buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8; readBuffer = new byte[bufferFrameCount * bytesPerFrame]; FillBuffer(playbackProvider, bufferFrameCount); int timeout = 3 * latencyMilliseconds; while (playbackState != WasapiOutState.Disposed) { if (playbackState != WasapiOutState.Playing) { playThreadEvent.WaitOne(500); } // If still playing and notification is ok if (playbackState == WasapiOutState.Playing) { if (!isClientRunning) { audioClient.Start(); isClientRunning = true; } // If using Event Sync, Wait for notification from AudioClient or Sleep half latency var r = NativeMethods.WaitForSingleObjectEx(frameEventWaitHandle, timeout, true); if (r != 0) { throw new InvalidOperationException("Timed out waiting for event"); } // See how much buffer space is available. int numFramesPadding = 0; // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; int numFramesAvailable = bufferFrameCount - numFramesPadding; if (numFramesAvailable > 0) { FillBuffer(playbackProvider, numFramesAvailable); } } if (playbackState == WasapiOutState.Stopping) { // play the buffer out while (audioClient.CurrentPadding > 0) { await Task.Delay(latencyMilliseconds / 2); } audioClient.Stop(); isClientRunning = false; audioClient.Reset(); playbackState = WasapiOutState.Stopped; RaisePlaybackStopped(null); } if (playbackState == WasapiOutState.Disposing) { audioClient.Stop(); isClientRunning = false; audioClient.Reset(); playbackState = WasapiOutState.Disposed; var disposablePlaybackProvider = playbackProvider as IDisposable; if (disposablePlaybackProvider != null) { disposablePlaybackProvider.Dispose(); // do everything on this thread, even dispose in case it is Media Foundation } RaisePlaybackStopped(null); } } } catch (Exception e) { RaisePlaybackStopped(e); } finally { audioClient.Dispose(); audioClient = null; renderClient = null; NativeMethods.CloseHandle(frameEventWaitHandle); } }
private void DoRecording(AudioClient client) { Debug.WriteLine(client.BufferSize); var buf = new Byte[client.BufferSize * bytesPerFrame]; int bufLength = 0; int minPacketSize = waveFormat.AverageBytesPerSecond / 100; //100ms IntPtr hEvent = NativeMethods.CreateEventEx(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS); client.SetEventHandle(hEvent); try { AudioCaptureClient capture = client.AudioCaptureClient; client.Start(); int packetSize = capture.GetNextPacketSize(); while (!this.stop) { IntPtr pData = IntPtr.Zero; int numFramesToRead = 0; AudioClientBufferFlags dwFlags = 0; if (packetSize == 0) { if (NativeMethods.WaitForSingleObjectEx(hEvent, 100, true) != 0) { throw new Exception("Capture event timeout"); } } pData = capture.GetBuffer(out numFramesToRead, out dwFlags); if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0) { pData = IntPtr.Zero; } if (numFramesToRead == 0) { continue; } int capturedBytes = numFramesToRead * bytesPerFrame; System.Runtime.InteropServices.Marshal.Copy(pData, buf, bufLength, capturedBytes); bufLength += capturedBytes; capture.ReleaseBuffer(numFramesToRead); if (bufLength >= minPacketSize) { if (DataAvailable != null) { DataAvailable(this, new WaveInEventArgs(buf, bufLength)); } bufLength = 0; } packetSize = capture.GetNextPacketSize(); } } catch (Exception ex) { RaiseRecordingStopped(ex); Debug.WriteLine("stop wasapi"); } finally { RaiseRecordingStopped(null); NativeMethods.CloseHandle(hEvent); client.Stop(); client.Dispose(); } }
/// <summary> /// Begin Playback /// </summary> public void Play() { if (PlaybackState == EPlaybackState.Playing) { return; } if (PlaybackState == EPlaybackState.Paused) { PlaybackState = EPlaybackState.Playing; return; } Debug.WriteLine("[render]Task starting..."); playTask = Task.Run(() => { IWaveProvider playbackProvider = this.sourceProvider; AudioClient client = this.audioClient; Exception exception = null; PlaybackState = EPlaybackState.Playing; try { // fill a whole buffer var bufferFrameCount = client.BufferSize; var bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8; readBuffer = new byte[bufferFrameCount * bytesPerFrame]; //FillBuffer(playbackProvider, bufferFrameCount); client.Start(); while (PlaybackState != EPlaybackState.Stopped) { // If using Event Sync, Wait for notification from AudioClient or Sleep half latency if (isUsingEventSync) { //indexHandle = WaitHandle.WaitAny(waitHandles, 3 * latencyMilliseconds, false); frameEventWaitHandle.WaitOne(3 * latencyMilliseconds); } else { Task.Delay(latencyMilliseconds / 2); } // If still playing and notification is ok if (PlaybackState != EPlaybackState.Playing) { continue; } // See how much buffer space is available. int numFramesPadding = 0; if (isUsingEventSync) { // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize numFramesPadding = (shareMode == EAudioClientShareMode.Shared) ? client.CurrentPadding : 0; } else { numFramesPadding = client.CurrentPadding; } int numFramesAvailable = bufferFrameCount - numFramesPadding; if (numFramesAvailable > 0) { FillBuffer(playbackProvider, numFramesAvailable); } } } catch (Exception e) { Debug.WriteLine("[render]Task catch Exception."); Debug.WriteLine(e.Message); Debug.WriteLine(e.Source); Debug.WriteLine(e.StackTrace); exception = e; } finally { client.Stop(); client.Reset(); Debug.WriteLine("[render]Task stop detected."); RaisePlaybackStopped(exception); } }); Debug.WriteLine("[render]Task started"); }
private void PlayThread() { try { AudioBuffer buff = GetBuffer(false); if (buff == null) { RaisePlaybackStopped(); return; } audioClient.Reset(); // fill a whole buffer IntPtr buffer = renderClient.GetBuffer(buff.Length); Marshal.Copy(buff.Bytes, 0, buffer, buff.ByteLength); renderClient.ReleaseBuffer(buff.Length, AudioClientBufferFlags.None); ReleaseBuffer(buff, false, 0); // Create WaitHandle for sync if (frameEventWaitHandle != null) { frameEventWaitHandle.Reset(); } audioClient.Start(); if (isUsingEventSync && shareMode == AudioClientShareMode.Exclusive) { while (playbackState != PlaybackState.Stopped) { int indexHandle = WaitHandle.WaitAny(waitHandles, 10 * latencyMilliseconds, false); if (playbackState == PlaybackState.Playing && indexHandle != WaitHandle.WaitTimeout) { // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize buff = GetBuffer(false); if (buff == null) { break; } buffer = renderClient.GetBuffer(buff.Length); Marshal.Copy(buff.Bytes, 0, buffer, buff.ByteLength); renderClient.ReleaseBuffer(buff.Length, AudioClientBufferFlags.None); ReleaseBuffer(buff, false, 0); } } } else { buff = null; int offs = 0; while (playbackState != PlaybackState.Stopped) { // If using Event Sync, Wait for notification from AudioClient or Sleep half latency int indexHandle = 0; if (isUsingEventSync) { indexHandle = WaitHandle.WaitAny(waitHandles, 3 * latencyMilliseconds, false); } else { Thread.Sleep(latencyMilliseconds / 2); } // If still playing and notification is ok if (playbackState == PlaybackState.Playing && indexHandle != WaitHandle.WaitTimeout) { // See how much buffer space is available. int numFramesAvailable = bufferFrameCount - audioClient.CurrentPadding; if (numFramesAvailable > 0) { if (buff == null) { buff = GetBuffer(false); offs = 0; } if (buff == null) { break; } numFramesAvailable = Math.Min(numFramesAvailable, buff.Length - offs); buffer = renderClient.GetBuffer(numFramesAvailable); Marshal.Copy(buff.Bytes, offs * Settings.PCM.BlockAlign, buffer, numFramesAvailable * Settings.PCM.BlockAlign); renderClient.ReleaseBuffer(numFramesAvailable, AudioClientBufferFlags.None); offs += numFramesAvailable; if (offs == buff.Length) { ReleaseBuffer(buff, false, 0); buff = null; } } } } } //Thread.Sleep(isUsingEventSync ? latencyMilliseconds : latencyMilliseconds / 2); audioClient.Stop(); if (playbackState == PlaybackState.Stopped) { audioClient.Reset(); } } catch (Exception ex) { playbackState = PlaybackState.Stopped; ReleaseBuffer(readBuffers[0], false, 0); ReleaseBuffer(readBuffers[1], false, 0); playThread = null; try { audioClient.Stop(); } catch { } RaisePlaybackException(ex); return; } ReleaseBuffer(readBuffers[0], false, 0); ReleaseBuffer(readBuffers[1], false, 0); RaisePlaybackStopped(); }
private void DoRecording() { Debug.WriteLine("Recording buffer size: " + audioClient.BufferSize); var buf = new Byte[audioClient.BufferSize * bytesPerFrame]; int bufLength = 0; int minPacketSize = waveFormat.AverageBytesPerSecond / 100; //100ms try { AudioCaptureClient capture = audioClient.AudioCaptureClient; audioClient.Start(); int packetSize = capture.GetNextPacketSize(); while (captureState == WasapiCaptureState.Recording) { IntPtr pData = IntPtr.Zero; int numFramesToRead = 0; AudioClientBufferFlags dwFlags = 0; if (packetSize == 0) { if (NativeMethods.WaitForSingleObjectEx(hEvent, 100, true) != 0) { throw new Exception("Capture event timeout"); } } pData = capture.GetBuffer(out numFramesToRead, out dwFlags); if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0) { pData = IntPtr.Zero; } if (numFramesToRead == 0) { continue; } int capturedBytes = numFramesToRead * bytesPerFrame; if (pData == IntPtr.Zero) { Array.Clear(buf, bufLength, capturedBytes); } else { Marshal.Copy(pData, buf, bufLength, capturedBytes); } bufLength += capturedBytes; capture.ReleaseBuffer(numFramesToRead); if (bufLength >= minPacketSize) { if (DataAvailable != null) { DataAvailable(this, new WaveInEventArgs(buf, bufLength)); } bufLength = 0; } packetSize = capture.GetNextPacketSize(); } } catch (Exception ex) { RaiseRecordingStopped(ex); Debug.WriteLine("stop wasapi"); } finally { RaiseRecordingStopped(null); audioClient.Stop(); } Debug.WriteLine("stop wasapi"); }
private void PlayThread() { ResamplerDmoStream resamplerDmoStream = null; IWaveProvider playbackProvider = sourceProvider; Exception exception = null; Stopwatch sw = new Stopwatch(); sw.Start(); Thread.CurrentThread.Priority = ThreadPriority.Highest; var lastOutput = sw.ElapsedMilliseconds; try { if (dmoResamplerNeeded) { resamplerDmoStream = new ResamplerDmoStream(sourceProvider, outputFormat); playbackProvider = resamplerDmoStream; } // fill a whole buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8; readBuffer = new byte[bufferFrameCount * bytesPerFrame]; FillBuffer(playbackProvider, bufferFrameCount); // Create WaitHandle for sync var waitHandles = new WaitHandle[] { frameEventWaitHandle }; audioClient.Start(); started = true; while (playbackState != PlaybackState.Stopped) { // If using Event Sync, Wait for notification from AudioClient or Sleep half latency int indexHandle = 0; if (isUsingEventSync) { indexHandle = WaitHandle.WaitAny(waitHandles, 3 * latencyMilliseconds, false); } else { Thread.Sleep(latencyMilliseconds / 2); } var now = sw.ElapsedMilliseconds; // If still playing and notification is ok if (playbackState == PlaybackState.Playing && indexHandle != WaitHandle.WaitTimeout) { // See how much buffer space is available. int numFramesPadding; if (isUsingEventSync) { // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; } else { numFramesPadding = audioClient.CurrentPadding; } int numFramesAvailable = bufferFrameCount - numFramesPadding; if (numFramesAvailable > 10) // see https://naudio.codeplex.com/workitem/16363 { Console.WriteLine("time:" + now + ", count:" + numFramesAvailable); FillBuffer(playbackProvider, numFramesAvailable); Console.WriteLine("fill buffer time:" + now + ", count:" + numFramesAvailable); lastOutput = now; } else { if (now - lastOutput > 15) { Console.WriteLine("not finished:" + (now - lastOutput) + ", CurrentPadding:" + audioClient.CurrentPadding); } } } if (now - lastOutput > 15) { Console.WriteLine(); } } Thread.Sleep(latencyMilliseconds / 2); audioClient.Stop(); if (playbackState == PlaybackState.Stopped) { audioClient.Reset(); } } catch (Exception e) { exception = e; } finally { if (resamplerDmoStream != null) { resamplerDmoStream.Dispose(); } RaisePlaybackStopped(exception); } }
private async void PlayThread() { MediaFoundationResampler mediaFoundationResampler = null; IWaveProvider playbackProvider = this.sourceProvider; Exception exception = null; try { if (this.resamplerNeeded) { mediaFoundationResampler = new MediaFoundationResampler(sourceProvider, outputFormat); playbackProvider = mediaFoundationResampler; } // fill a whole buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8; readBuffer = new byte[bufferFrameCount * bytesPerFrame]; FillBuffer(playbackProvider, bufferFrameCount); audioClient.Start(); while (playbackState != PlaybackState.Stopped) { // If using Event Sync, Wait for notification from AudioClient or Sleep half latency int timeout = 3 * latencyMilliseconds; var r = NativeMethods.WaitForSingleObjectEx(frameEventWaitHandle, timeout, true); if (r != 0) { throw new InvalidOperationException("Timed out waiting for event"); } // If still playing and notification is ok if (playbackState == PlaybackState.Playing) { // See how much buffer space is available. int numFramesPadding = 0; // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; int numFramesAvailable = bufferFrameCount - numFramesPadding; if (numFramesAvailable > 0) { FillBuffer(playbackProvider, numFramesAvailable); } } } // play the buffer out while (audioClient.CurrentPadding > 0) { await Task.Delay(latencyMilliseconds / 2); } audioClient.Stop(); if (playbackState == PlaybackState.Stopped) { audioClient.Reset(); } } catch (Exception e) { exception = e; } finally { if (mediaFoundationResampler != null) { mediaFoundationResampler.Dispose(); } RaisePlaybackStopped(exception); } }
//based on http://msdn.microsoft.com/en-us/library/windows/desktop/dd370800(v=vs.85).aspx private void CaptureProc(object playbackStartedEventWaitHandle) { try { int bufferSize; int frameSize; long actualDuration; int actualLatency; int sleepDuration; byte[] buffer; int eventWaitHandleIndex; WaitHandle[] eventWaitHandleArray; bufferSize = _audioClient.BufferSize; frameSize = WaveFormat.Channels * WaveFormat.BytesPerSample; actualDuration = (long)((double)REFTIMES_PER_SEC * bufferSize / WaveFormat.SampleRate); actualLatency = (int)(actualDuration / REFTIMES_PER_MILLISEC); sleepDuration = actualLatency / 8; buffer = new byte[bufferSize * frameSize]; eventWaitHandleIndex = WaitHandle.WaitTimeout; eventWaitHandleArray = new WaitHandle[] { _eventWaitHandle }; _audioClient.Start(); _recordingState = SoundIn.RecordingState.Recording; if (playbackStartedEventWaitHandle is EventWaitHandle) { ((EventWaitHandle)playbackStartedEventWaitHandle).Set(); playbackStartedEventWaitHandle = null; } while (RecordingState != SoundIn.RecordingState.Stopped) { if (_eventSync) { eventWaitHandleIndex = WaitHandle.WaitAny(eventWaitHandleArray, actualLatency, false); if (eventWaitHandleIndex == WaitHandle.WaitTimeout) { continue; } } else { Thread.Sleep(sleepDuration); } if (RecordingState == SoundIn.RecordingState.Recording) { ReadData(buffer, _audioCaptureClient, (uint)frameSize); } } Thread.Sleep(actualLatency / 2); _audioClient.Stop(); _audioClient.Reset(); } finally { if (playbackStartedEventWaitHandle is EventWaitHandle) { ((EventWaitHandle)playbackStartedEventWaitHandle).Set(); } RaiseStopped(); } }
//based on http://msdn.microsoft.com/en-us/library/windows/desktop/dd370800(v=vs.85).aspx private void CaptureProc(object param) { var playbackStartedEventWaitHandle = param as EventWaitHandle; Exception exception = null; try { int bufferSize = _audioClient.BufferSize; int frameSize = WaveFormat.Channels * WaveFormat.BytesPerSample; long actualDuration = (long)((double)ReftimesPerSecond * bufferSize / WaveFormat.SampleRate); int actualLatency = (int)(actualDuration / ReftimesPerMillisecond); int sleepDuration = actualLatency / 8; byte[] buffer = new byte[bufferSize * frameSize]; WaitHandle[] eventWaitHandleArray = { _eventWaitHandle }; _audioClient.Start(); _recordingState = RecordingState.Recording; if (playbackStartedEventWaitHandle != null) { playbackStartedEventWaitHandle.Set(); playbackStartedEventWaitHandle = null; } while (RecordingState != RecordingState.Stopped) { if (_eventSync) { int eventWaitHandleIndex = WaitHandle.WaitAny(eventWaitHandleArray, actualLatency, false); if (eventWaitHandleIndex == WaitHandle.WaitTimeout) { continue; } } else { Thread.Sleep(sleepDuration); } if (RecordingState == RecordingState.Recording) { ReadData(buffer, _audioCaptureClient, (uint)frameSize); } } Thread.Sleep(actualLatency / 2); _audioClient.Stop(); _audioClient.Reset(); } catch (Exception ex) { exception = ex; } finally { if (playbackStartedEventWaitHandle != null) { playbackStartedEventWaitHandle.Set(); } RaiseStopped(exception); } }
private void PlayThread() { ResamplerDmoStream resamplerDmoStream = null; IWaveProvider playbackProvider = this.sourceProvider; Exception exception = null; try { if (this.dmoResamplerNeeded) { resamplerDmoStream = new ResamplerDmoStream(sourceProvider, outputFormat); playbackProvider = resamplerDmoStream; } // fill a whole buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8; readBuffer = new byte[bufferFrameCount * bytesPerFrame]; FillBuffer(playbackProvider, bufferFrameCount); // Create WaitHandle for sync WaitHandle[] waitHandles = new WaitHandle[] { frameEventWaitHandle }; audioClient.Start(); while (playbackState != PlaybackState.Stopped) { // If using Event Sync, Wait for notification from AudioClient or Sleep half latency int indexHandle = 0; if (isUsingEventSync) { indexHandle = WaitHandle.WaitAny(waitHandles, 3 * latencyMilliseconds, false); } else { Thread.Sleep(latencyMilliseconds / 2); } // If still playing and notification is ok if (playbackState == PlaybackState.Playing && indexHandle != WaitHandle.WaitTimeout) { // See how much buffer space is available. int numFramesPadding = 0; if (isUsingEventSync) { // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; } else { numFramesPadding = audioClient.CurrentPadding; } int numFramesAvailable = bufferFrameCount - numFramesPadding; if (numFramesAvailable > 0) { FillBuffer(playbackProvider, numFramesAvailable); } } } Thread.Sleep(latencyMilliseconds / 2); audioClient.Stop(); if (playbackState == PlaybackState.Stopped) { audioClient.Reset(); } } catch (Exception e) { exception = e; } finally { if (resamplerDmoStream != null) { resamplerDmoStream.Dispose(); } RaisePlaybackStopped(exception); } }
private void PlaybackProc(object playbackStartedEventWaithandle) { try { int bufferSize; int frameSize; byte[] buffer; int eventWaitHandleIndex; WaitHandle[] eventWaitHandleArray; bufferSize = _audioClient.BufferSize; frameSize = _outputFormat.Channels * _outputFormat.BytesPerSample; buffer = new byte[bufferSize * frameSize]; eventWaitHandleIndex = WaitHandle.WaitTimeout; eventWaitHandleArray = new WaitHandle[] { _eventWaitHandle }; //001 /*if (!FeedBuffer(_renderClient, buffer, bufferSize, frameSize)) //todo: might cause a deadlock: play() is waiting on eventhandle but FeedBuffer got already called * { * _playbackState = PlaybackState.Stopped; * if (playbackStartedEventWaithandle is EventWaitHandle) * { * ((EventWaitHandle)playbackStartedEventWaithandle).Set(); * playbackStartedEventWaithandle = null; * } * } * else * {*/ _audioClient.Start(); _playbackState = SoundOut.PlaybackState.Playing; if (playbackStartedEventWaithandle is EventWaitHandle) { ((EventWaitHandle)playbackStartedEventWaithandle).Set(); playbackStartedEventWaithandle = null; } while (PlaybackState != PlaybackState.Stopped) { if (_eventSync) //based on the "RenderSharedEventDriven"-Sample: http://msdn.microsoft.com/en-us/library/dd940520(v=vs.85).aspx { eventWaitHandleIndex = WaitHandle.WaitAny(eventWaitHandleArray, 3 * _latency, false); //3 * latency = see msdn: recommended timeout if (eventWaitHandleIndex == WaitHandle.WaitTimeout) { continue; } } else //based on the "RenderSharedTimerDriven"-Sample: http://msdn.microsoft.com/en-us/library/dd940521(v=vs.85).aspx { Thread.Sleep(_latency / 8); } if (PlaybackState == PlaybackState.Playing) { int padding; if (_eventSync && _shareMode == AudioClientShareMode.Exclusive) { padding = 0; } else { padding = _audioClient.GetCurrentPadding(); } int framesReadyToFill = bufferSize - padding; if (framesReadyToFill > 5 && !(_source is DmoResampler && ((DmoResampler)_source).OutputToInput(framesReadyToFill * frameSize) <= 0)) //avoid conversion errors { if (!FeedBuffer(_renderClient, buffer, framesReadyToFill, frameSize)) { _playbackState = PlaybackState.Stopped; //TODO: Fire Stopped-event here? } } } } Thread.Sleep(_latency / 2); _audioClient.Stop(); _audioClient.Reset(); //} } finally { //CleanupResources(); if (playbackStartedEventWaithandle is EventWaitHandle) { ((EventWaitHandle)playbackStartedEventWaithandle).Set(); } RaiseStopped(); } }