public void Dispose() { if (this.audioClient != null) { this.Stop(); this.audioClient.Dispose(); this.audioClient = null; this.renderClient = null; } }
public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency) { this.audioClient = device.AudioClient; this.mmDevice = device; this.shareMode = shareMode; this.isUsingEventSync = useEventSync; this.latencyMilliseconds = latency; this.syncContext = SynchronizationContext.Current; this.outputFormat = this.audioClient.MixFormat; }
private void CaptureThread(AudioClient client) { Exception exception = null; try { DoRecording(client); } catch (Exception e) { exception = e; } finally { client.Stop(); // don't dispose - the AudioClient only gets disposed when WasapiCapture is disposed } captureThread = null; captureState = CaptureState.Stopped; RaiseRecordingStopped(exception); }
private void DoRecording(AudioClient client) { //Debug.WriteLine(String.Format("Client buffer frame count: {0}", client.BufferSize)); int bufferFrameCount = client.BufferSize; // Calculate the actual duration of the allocated buffer. long actualDuration = (long)((double)ReftimesPerSec * bufferFrameCount / waveFormat.SampleRate); int sleepMilliseconds = (int)(actualDuration / ReftimesPerMillisec / 2); int waitMilliseconds = (int)(3 * actualDuration / ReftimesPerMillisec); var capture = client.AudioCaptureClient; client.Start(); // avoid race condition where we stop immediately after starting if (captureState == CaptureState.Starting) { captureState = CaptureState.Capturing; } while (captureState == CaptureState.Capturing) { if (isUsingEventSync) { frameEventWaitHandle.WaitOne(waitMilliseconds, false); } else { Thread.Sleep(sleepMilliseconds); } if (captureState != CaptureState.Capturing) { break; } // If still recording ReadNextPacket(capture); } }