/// <summary> /// Creates a new WASAPI Output /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode"></param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="latency"></param> public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency) { audioClient = device.AudioClient; this.shareMode = shareMode; isUsingEventSync = useEventSync; latencyMilliseconds = latency; }
/// <summary> /// Creates a new WASAPI Output /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode"></param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="latency"></param> public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency) { this.audioClient = device.AudioClient; this.shareMode = shareMode; this.isUsingEventSync = useEventSync; this.latencyMilliseconds = latency; this.syncContext = SynchronizationContext.Current; }
public override void Exit() { capClient.Dispose(); capClient = null; audioClient.Stop(); audioClient.Dispose(); audioClient = null; }
/// <summary> /// Creates a new WASAPI Output /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode"></param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="latency">Desired latency in milliseconds</param> public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency) { audioClient = device.AudioClient; mmDevice = device; this.shareMode = shareMode; isUsingEventSync = useEventSync; latencyMilliseconds = latency; syncContext = SynchronizationContext.Current; outputFormat = audioClient.MixFormat; // allow the user to query the default format for shared mode streams }
/// <summary> /// Creates a new WASAPI Output device /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode">Share mode to use</param> /// <param name="latency">Latency in milliseconds</param> public WasapiOutGuiThread(MMDevice device, AudioClientShareMode shareMode, int latency) { audioClient = device.AudioClient; outputFormat = audioClient.MixFormat; this.shareMode = shareMode; latencyMilliseconds = latency; timer = new Timer(); timer.Tick += TimerOnTick; timer.Interval = latency/2; }
/// <summary> /// Initializes a new instance of the <see cref="WasapiCapture"/> class. /// </summary> /// <param name="captureDevice">The capture device.</param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> public WasapiCapture(MMDevice captureDevice, bool useEventSync) { syncContext = SynchronizationContext.Current; audioClient = captureDevice.AudioClient; ShareMode = AudioClientShareMode.Shared; isUsingEventSync = useEventSync; waveFormat = audioClient.MixFormat; }
/// <summary> /// Initializes a new instance of the <see cref="WasapiCapture" /> class. /// </summary> /// <param name="captureDevice">The capture device.</param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="audioBufferMillisecondsLength">Length of the audio buffer in milliseconds. A lower value means lower latency but increased CPU usage.</param> public WasapiCapture(MMDevice captureDevice, bool useEventSync, int audioBufferMillisecondsLength) { syncContext = SynchronizationContext.Current; audioClient = captureDevice.AudioClient; ShareMode = AudioClientShareMode.Shared; isUsingEventSync = useEventSync; this.audioBufferMillisecondsLength = audioBufferMillisecondsLength; waveFormat = audioClient.MixFormat; }
private async Task Activate() { var icbh = new ActivateAudioInterfaceCompletionHandler( ac2 => { /*var wfx = new WaveFormat(44100, 16, 2); int hr = ac2.Initialize(AudioClientShareMode.Shared, AudioClientStreamFlags.EventCallback | AudioClientStreamFlags.NoPersist, 10000000, 0, wfx, IntPtr.Zero);*/ }); var IID_IAudioClient2 = new Guid("726778CD-F60A-4eda-82DE-E47610CD78AA"); IActivateAudioInterfaceAsyncOperation activationOperation; NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation); var audioClient2 = await icbh; this.audioClient = new AudioClient((IAudioClient) audioClient2); }
/// <summary> /// Creates a new WASAPI Output /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode"></param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="latency"></param> public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency, AudioPCMConfig pcm) { this.audioClient = device.AudioClient; this.shareMode = shareMode; this.isUsingEventSync = useEventSync; this.latencyMilliseconds = latency; this.pcm = pcm; this.outputFormat = new NAudio.Wave.WaveFormatExtensible(pcm.SampleRate, pcm.BitsPerSample, pcm.ChannelCount); NAudio.Wave.WaveFormatExtensible closestSampleRateFormat; if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat)) throw new NotSupportedException("PCM format mismatch"); Init(); bufferFrameCount = audioClient.BufferSize; readBuffers = new AudioBuffer[2]; readBuffers[0] = new AudioBuffer(pcm, bufferFrameCount); readBuffers[1] = new AudioBuffer(pcm, bufferFrameCount); //if (this.shareMode == AudioClientShareMode.Exclusive) // this.latencyMilliseconds = (int)(this.audioClient.DefaultDevicePeriod / 10000); }
/// <summary> /// Initialises a new instance of the WASAPI capture class /// </summary> /// <param name="captureDevice">Capture device to use</param> public WasapiCapture(MMDevice captureDevice) { syncContext = SynchronizationContext.Current; audioClient = captureDevice.AudioClient; ShareMode = AudioClientShareMode.Shared; waveFormat = audioClient.MixFormat; var wfe = waveFormat as WaveFormatExtensible; if (wfe != null) { try { waveFormat = wfe.ToStandardWaveFormat(); } catch (InvalidOperationException) { // couldn't convert to a standard format } } }
/// <summary> /// Initialises a new instance of the WASAPI capture class /// </summary> /// <param name="captureDevice">Capture device to use</param> public WasapiCapture(MMDevice captureDevice) { syncContext = SynchronizationContext.Current; audioClient = captureDevice.AudioClient; ShareMode = AudioClientShareMode.Shared; waveFormat = audioClient.MixFormat; var wfe = waveFormat as WaveFormatExtensible; if (wfe != null) { try { waveFormat = wfe.ToStandardWaveFormat(); } catch (InvalidOperationException) { // couldn't convert to a standard format } } }
private void CaptureThread(AudioClient client) { Exception exception = null; try { DoRecording(client); } catch (Exception e) { exception = e; } finally { client.Stop(); // don't dispose - the AudioClient only gets disposed when WasapiCapture is disposed } captureThread = null; captureState = CaptureState.Stopped; RaiseRecordingStopped(exception); }
private void CaptureThread(AudioClient client) { Exception exception = null; try { DoRecording(client); } catch (Exception e) { exception = e; } finally { client.Stop(); // don't dispose - the AudioClient only gets disposed when WasapiCapture is disposed } RaiseRecordingStopped(exception); System.Diagnostics.Debug.WriteLine("stop wasapi"); }
private void DoRecording(AudioClient client) { //Debug.WriteLine(String.Format("Client buffer frame count: {0}", client.BufferSize)); int bufferFrameCount = client.BufferSize; // Calculate the actual duration of the allocated buffer. long actualDuration = (long)((double)ReftimesPerSec * bufferFrameCount / waveFormat.SampleRate); int sleepMilliseconds = (int)(actualDuration / ReftimesPerMillisec / 2); int waitMilliseconds = (int)(3 * actualDuration / ReftimesPerMillisec); var capture = client.AudioCaptureClient; client.Start(); captureState = CaptureState.Capturing; while (captureState == CaptureState.Capturing) { bool readBuffer = true; if (isUsingEventSync) { readBuffer = frameEventWaitHandle.WaitOne(waitMilliseconds, false); } else { Thread.Sleep(sleepMilliseconds); } if (captureState != CaptureState.Capturing) { break; } // If still recording and notification is ok if (readBuffer) { ReadNextPacket(capture); } } }
private void CaptureThread(AudioClient client) { Debug.WriteLine(client.BufferSize); int bufferFrameCount = audioClient.BufferSize; // Calculate the actual duration of the allocated buffer. var actualDuration = (long)((double)REFTIMES_PER_SEC * bufferFrameCount / WaveFormat.SampleRate); var sleepMilliseconds = (int)(actualDuration / REFTIMES_PER_MILLISEC / 2); AudioCaptureClient capture = client.AudioCaptureClient; client.Start(); try { Debug.WriteLine(string.Format("sleep: {0} ms", sleepMilliseconds)); while (!stop) { Thread.Sleep(sleepMilliseconds); ReadNextPacket(capture); } } finally { client.Stop(); if (RecordingStopped != null) { RecordingStopped(this, EventArgs.Empty); } // don't dispose - the AudioClient only gets disposed when WasapiCapture is disposed } Debug.WriteLine("stop wasapi"); }
/// <summary> /// Dispose /// </summary> public void Dispose() { StopRecording(); if (captureThread != null) { captureThread.Join(); captureThread = null; } if (audioClient != null) { audioClient.Dispose(); audioClient = null; } }
/// <summary> /// Dispose /// </summary> public void Dispose() { if (audioClient != null) { Stop(); audioClient.Dispose(); audioClient = null; renderClient = null; } if (resamplerDmoStream != null) { resamplerDmoStream.Dispose(); resamplerDmoStream = null; } }
private async void PlayThread() { await Activate(); var playbackProvider = Init(); bool isClientRunning = false; try { if (this.resamplerNeeded) { var resampler = new WdlResamplingSampleProvider(playbackProvider.ToSampleProvider(), outputFormat.SampleRate); playbackProvider = new SampleToWaveProvider(resampler); } // fill a whole buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels*outputFormat.BitsPerSample/8; readBuffer = new byte[bufferFrameCount*bytesPerFrame]; FillBuffer(playbackProvider, bufferFrameCount); int timeout = 3 * latencyMilliseconds; while (playbackState != WasapiOutState.Disposed) { if (playbackState != WasapiOutState.Playing) { playThreadEvent.WaitOne(500); } // If still playing and notification is ok if (playbackState == WasapiOutState.Playing) { if (!isClientRunning) { audioClient.Start(); isClientRunning = true; } // If using Event Sync, Wait for notification from AudioClient or Sleep half latency var r = NativeMethods.WaitForSingleObjectEx(frameEventWaitHandle, timeout, true); if (r != 0) throw new InvalidOperationException("Timed out waiting for event"); // See how much buffer space is available. int numFramesPadding = 0; // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; int numFramesAvailable = bufferFrameCount - numFramesPadding; if (numFramesAvailable > 0) { FillBuffer(playbackProvider, numFramesAvailable); } } if (playbackState == WasapiOutState.Stopping) { // play the buffer out while (audioClient.CurrentPadding > 0) { await Task.Delay(latencyMilliseconds / 2); } audioClient.Stop(); isClientRunning = false; audioClient.Reset(); playbackState = WasapiOutState.Stopped; RaisePlaybackStopped(null); } if (playbackState == WasapiOutState.Disposing) { audioClient.Stop(); isClientRunning = false; audioClient.Reset(); playbackState = WasapiOutState.Disposed; var disposablePlaybackProvider = playbackProvider as IDisposable; if (disposablePlaybackProvider!=null) disposablePlaybackProvider.Dispose(); // do everything on this thread, even dispose in case it is Media Foundation RaisePlaybackStopped(null); } } } catch (Exception e) { RaisePlaybackStopped(e); } finally { audioClient.Dispose(); audioClient = null; renderClient = null; NativeMethods.CloseHandle(frameEventWaitHandle); } }
private void CaptureThread(AudioClient client) { Exception exception = null; try { DoRecording(client); } catch (Exception e) { exception = e; } finally { client.Stop(); // don't dispose - the AudioClient only gets disposed when WasapiCapture is disposed } captureThread = null; captureState = CaptureState.Stopped; RaiseRecordingStopped(exception); }
/// <summary> /// Initialises a new instance of the WASAPI capture class /// </summary> /// <param name="captureDevice">Capture device to use</param> public WasapiLoopbackCapture(MMDevice captureDevice) { this.audioClient = captureDevice.AudioClient; }
private void CaptureThread(AudioClient client) { Exception exception = null; try { DoRecording(client); } catch (Exception e) { exception = e; } finally { client.Stop(); // don't dispose - the AudioClient only gets disposed when WasapiCapture is disposed } RaiseRecordingStopped(exception); System.Diagnostics.Debug.WriteLine("stop wasapi"); }
/// <summary> /// Initialises a new instance of the WASAPI capture class /// </summary> /// <param name="captureDevice">Capture device to use</param> public WasapiCapture(MMDevice captureDevice) { this.audioClient = captureDevice.AudioClient; this.waveFormat = audioClient.MixFormat; }
/// <summary> /// Dispose /// </summary> public void Dispose() { if (audioClient != null) { Stop(); audioClient.Dispose(); audioClient = null; renderClient = null; } }
private void GetAudioClientInterface() { object result; Marshal.ThrowExceptionForHR(deviceInterface.Activate(ref IID_IAudioClient, ClsCtx.ALL, IntPtr.Zero, out result)); audioClient = new AudioClient(result as IAudioClient); }
private void DoRecording(AudioClient client) { Debug.WriteLine(String.Format("Client buffer frame count: {0}", client.BufferSize)); int bufferFrameCount = client.BufferSize; // Calculate the actual duration of the allocated buffer. long actualDuration = (long)((double)REFTIMES_PER_SEC * bufferFrameCount / waveFormat.SampleRate); int sleepMilliseconds = (int)(actualDuration / REFTIMES_PER_MILLISEC / 2); int waitMilliseconds = (int)(3 * actualDuration / REFTIMES_PER_MILLISEC); AudioCaptureClient capture = client.AudioCaptureClient; client.Start(); if (isUsingEventSync) { Debug.WriteLine(string.Format("wait: {0} ms", waitMilliseconds)); } else { Debug.WriteLine(string.Format("sleep: {0} ms", sleepMilliseconds)); } while (!this.requestStop) { bool readBuffer = true; if (isUsingEventSync) { readBuffer = frameEventWaitHandle.WaitOne(waitMilliseconds, false); } else { Thread.Sleep(sleepMilliseconds); } // If still playing and notification is ok if (!this.requestStop && readBuffer) { ReadNextPacket(capture); } } }
private void DoRecording(AudioClient client) { //Debug.WriteLine(String.Format("Client buffer frame count: {0}", client.BufferSize)); int bufferFrameCount = client.BufferSize; // Calculate the actual duration of the allocated buffer. long actualDuration = (long)((double)ReftimesPerSec * bufferFrameCount / waveFormat.SampleRate); int sleepMilliseconds = (int)(actualDuration / ReftimesPerMillisec / 2); int waitMilliseconds = (int)(3 * actualDuration / ReftimesPerMillisec); var capture = client.AudioCaptureClient; client.Start(); captureState = CaptureState.Capturing; while (captureState == CaptureState.Capturing) { bool readBuffer = true; if (isUsingEventSync) { readBuffer = frameEventWaitHandle.WaitOne(waitMilliseconds, false); } else { Thread.Sleep(sleepMilliseconds); } if (captureState != CaptureState.Capturing) break; // If still recording and notification is ok if (readBuffer) { ReadNextPacket(capture); } } }
private void InitializeAudioClient() { var enumerator = new MMDeviceEnumerator(); var captureDevice = enumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); audioClient = captureDevice.AudioClient; int recordBufferLength = samplingRate; // 20ms worth of recording recordBuffer = new float[recordBufferLength * channelCount]; long requestedDuration = 10000 * period * 2; audioClient.Initialize(AudioClientShareMode.Shared, AudioClientStreamFlags.Loopback, requestedDuration, 0, WaveFormat.CreateIeeeFloatWaveFormat(samplingRate, channelCount), Guid.Empty); capClient = audioClient.AudioCaptureClient; audioClient.Start(); }
private async Task Activate() { var icbh = new ActivateAudioInterfaceCompletionHandler( ac2 => { if (this.audioClientProperties != null) { IntPtr p = Marshal.AllocHGlobal(Marshal.SizeOf(this.audioClientProperties.Value)); Marshal.StructureToPtr(this.audioClientProperties.Value, p, false); ac2.SetClientProperties(p); // TODO: consider whether we can marshal this without the need for AllocHGlobal } /*var wfx = new WaveFormat(44100, 16, 2); int hr = ac2.Initialize(AudioClientShareMode.Shared, AudioClientStreamFlags.EventCallback | AudioClientStreamFlags.NoPersist, 10000000, 0, wfx, IntPtr.Zero);*/ }); var IID_IAudioClient2 = new Guid("726778CD-F60A-4eda-82DE-E47610CD78AA"); IActivateAudioInterfaceAsyncOperation activationOperation; NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation); var audioClient2 = await icbh; this.audioClient = new AudioClient((IAudioClient)audioClient2); }
/// <summary> /// Dispose /// </summary> public void Dispose() { if (audioClient != null) { Stop(); audioClient.Dispose(); audioClient = null; renderClient = null; NativeMethods.CloseHandle(frameEventWaitHandle); } }
private void DoRecording(AudioClient client) { Debug.WriteLine(client.BufferSize); int bufferFrameCount = audioClient.BufferSize; // Calculate the actual duration of the allocated buffer. long actualDuration = (long)((double)REFTIMES_PER_SEC * bufferFrameCount / WaveFormat.SampleRate); int sleepMilliseconds = (int)(actualDuration / REFTIMES_PER_MILLISEC / 2); AudioCaptureClient capture = client.AudioCaptureClient; client.Start(); Debug.WriteLine(string.Format("sleep: {0} ms", sleepMilliseconds)); while (!this.stop) { Thread.Sleep(sleepMilliseconds); ReadNextPacket(capture); } }
private void CaptureThread(AudioClient client) { Debug.WriteLine(client.BufferSize); int bufferFrameCount = audioClient.BufferSize; // Calculate the actual duration of the allocated buffer. long actualDuration = (long) ((double)REFTIMES_PER_SEC * bufferFrameCount / WaveFormat.SampleRate); int sleepMilliseconds = (int)(actualDuration / REFTIMES_PER_MILLISEC / 2); AudioCaptureClient capture = client.AudioCaptureClient; client.Start(); try { Debug.WriteLine(string.Format("sleep: {0} ms", sleepMilliseconds)); while (!this.stop) { Thread.Sleep(sleepMilliseconds); ReadNextPacket(capture); } client.Stop(); if (RecordingStopped != null) { RecordingStopped(this, null); } } finally { if (capture != null) { capture.Dispose(); } if (client != null) { client.Dispose(); } client = null; capture = null; } System.Diagnostics.Debug.WriteLine("stop wasapi"); }
/// <summary> /// Dispose /// </summary> public void Dispose() { StopRecording(); if (audioClient != null) { audioClient.Dispose(); audioClient = null; } }
/// <summary> /// Initialises a new instance of the WASAPI capture class /// </summary> /// <param name="captureDevice">Capture device to use</param> public WasapiLoopbackCapture(MMDevice captureDevice) { this.audioClient = captureDevice.AudioClient; }
/// <summary> /// Initialises a new instance of the WASAPI capture class /// </summary> /// <param name="captureDevice">Capture device to use</param> public WasapiCapture(MMDevice captureDevice) { audioClient = captureDevice.AudioClient; WaveFormat = audioClient.MixFormat; }
private void InitializeCaptureDevice(IAudioClient audioClientInterface) { var audioClient = new AudioClient((IAudioClient)audioClientInterface); if (waveFormat == null) { this.waveFormat = audioClient.MixFormat; } long requestedDuration = REFTIMES_PER_MILLISEC * 100; if (!audioClient.IsFormatSupported(AudioClientShareMode.Shared, WaveFormat)) { throw new ArgumentException("Unsupported Wave Format"); } var streamFlags = GetAudioClientStreamFlags(); audioClient.Initialize(AudioClientShareMode.Shared, streamFlags, requestedDuration, 0, this.waveFormat, Guid.Empty); int bufferFrameCount = audioClient.BufferSize; this.bytesPerFrame = this.waveFormat.Channels * this.waveFormat.BitsPerSample / 8; this.recordBuffer = new byte[bufferFrameCount * bytesPerFrame]; Debug.WriteLine(string.Format("record buffer size = {0}", this.recordBuffer.Length)); // Get back the effective latency from AudioClient latencyMilliseconds = (int)(audioClient.StreamLatency / 10000); }
private void DoRecording(AudioClient client) { Debug.WriteLine(client.BufferSize); var buf = new Byte[client.BufferSize * bytesPerFrame]; int bufLength = 0; int minPacketSize = waveFormat.AverageBytesPerSecond / 100; //100ms IntPtr hEvent = NativeMethods.CreateEventEx(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS); client.SetEventHandle(hEvent); try { AudioCaptureClient capture = client.AudioCaptureClient; client.Start(); int packetSize = capture.GetNextPacketSize(); while (!this.stop) { IntPtr pData = IntPtr.Zero; int numFramesToRead = 0; AudioClientBufferFlags dwFlags = 0; if (packetSize == 0) { if (NativeMethods.WaitForSingleObjectEx(hEvent, 100, true) != 0) { throw new Exception("Capture event timeout"); } } pData = capture.GetBuffer(out numFramesToRead, out dwFlags); if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0) { pData = IntPtr.Zero; } if (numFramesToRead == 0) { continue; } int capturedBytes = numFramesToRead * bytesPerFrame; if (pData == IntPtr.Zero) { Array.Clear(buf, bufLength, capturedBytes); } else { Marshal.Copy(pData, buf, bufLength, capturedBytes); } bufLength += capturedBytes; capture.ReleaseBuffer(numFramesToRead); if (bufLength >= minPacketSize) { if (DataAvailable != null) { DataAvailable(this, new WaveInEventArgs(buf, bufLength)); } bufLength = 0; } packetSize = capture.GetNextPacketSize(); } } catch (Exception ex) { RaiseRecordingStopped(ex); Debug.WriteLine("stop wasapi"); } finally { RaiseRecordingStopped(null); NativeMethods.CloseHandle(hEvent); client.Stop(); client.Dispose(); } }
private void CaptureThread(AudioClient client) { Debug.WriteLine(client.BufferSize); int bufferFrameCount = audioClient.BufferSize; // Calculate the actual duration of the allocated buffer. var actualDuration = (long) ((double) REFTIMES_PER_SEC* bufferFrameCount/WaveFormat.SampleRate); var sleepMilliseconds = (int) (actualDuration/REFTIMES_PER_MILLISEC/2); AudioCaptureClient capture = client.AudioCaptureClient; client.Start(); try { Debug.WriteLine(string.Format("sleep: {0} ms", sleepMilliseconds)); while (!stop) { Thread.Sleep(sleepMilliseconds); ReadNextPacket(capture); } } finally { client.Stop(); if (RecordingStopped != null) { RecordingStopped(this, EventArgs.Empty); } // don't dispose - the AudioClient only gets disposed when WasapiCapture is disposed } Debug.WriteLine("stop wasapi"); }