/// <summary> /// Initializes the capture device. Must be called on the UI (STA) thread. /// If not called manually then StartRecording() will call it internally. /// </summary> public async Task InitAsync() { if (captureState == WasapiCaptureState.Disposed) { throw new ObjectDisposedException(nameof(WasapiCaptureRT)); } if (captureState != WasapiCaptureState.Uninitialized) { throw new InvalidOperationException("Already initialized"); } /* var icbh = new ActivateAudioInterfaceCompletionHandler(ac2 => InitializeCaptureDevice((IAudioClient)ac2)); * IActivateAudioInterfaceAsyncOperation activationOperation; * // must be called on UI thread * NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation); * * audioClient = new AudioClient((IAudioClient)(await icbh)); * * hEvent = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS); * audioClient.SetEventHandle(hEvent);*/ var icbh = new ActivateAudioInterfaceCompletionHandler(ac2 => { if (this.audioClientProperties != null) { IntPtr p = Marshal.AllocHGlobal(Marshal.SizeOf(this.audioClientProperties.Value)); Marshal.StructureToPtr(this.audioClientProperties.Value, p, false); ac2.SetClientProperties(p); Marshal.FreeHGlobal(p); // TODO: consider whether we can marshal this without the need for AllocHGlobal } InitializeCaptureDevice((IAudioClient2)ac2); audioClient = new AudioClient((IAudioClient2)ac2); hEvent = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS); audioClient.SetEventHandle(hEvent); }); IActivateAudioInterfaceAsyncOperation activationOperation; // must be called on UI thread NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation); await icbh; captureState = WasapiCaptureState.Stopped; }
/// <summary> /// Dispose /// </summary> public void Dispose() { if (captureState == WasapiCaptureState.Disposed) { return; } try { StopRecording(); NativeMethods.CloseHandle(hEvent); audioClient?.Dispose(); } catch (Exception ex) { Debug.WriteLine("Exception disposing WasapiCaptureRT: " + ex.ToString()); } hEvent = IntPtr.Zero; audioClient = null; captureState = WasapiCaptureState.Disposed; }
private void DoRecording() { Debug.WriteLine("Recording buffer size: " + audioClient.BufferSize); var buf = new Byte[audioClient.BufferSize * bytesPerFrame]; int bufLength = 0; int minPacketSize = waveFormat.AverageBytesPerSecond / 100; //100ms try { AudioCaptureClient capture = audioClient.AudioCaptureClient; audioClient.Start(); int packetSize = capture.GetNextPacketSize(); while (captureState == WasapiCaptureState.Recording) { IntPtr pData = IntPtr.Zero; int numFramesToRead = 0; AudioClientBufferFlags dwFlags = 0; if (packetSize == 0) { if (NativeMethods.WaitForSingleObjectEx(hEvent, 100, true) != 0) { throw new Exception("Capture event timeout"); } } pData = capture.GetBuffer(out numFramesToRead, out dwFlags); if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0) { pData = IntPtr.Zero; } if (numFramesToRead == 0) { continue; } int capturedBytes = numFramesToRead * bytesPerFrame; if (pData == IntPtr.Zero) { Array.Clear(buf, bufLength, capturedBytes); } else { Marshal.Copy(pData, buf, bufLength, capturedBytes); } bufLength += capturedBytes; capture.ReleaseBuffer(numFramesToRead); if (bufLength >= minPacketSize) { if (DataAvailable != null) { DataAvailable(this, new WaveInEventArgs(buf, bufLength)); } bufLength = 0; } packetSize = capture.GetNextPacketSize(); } } catch (Exception ex) { RaiseRecordingStopped(ex); Debug.WriteLine("stop wasapi"); } finally { RaiseRecordingStopped(null); audioClient.Stop(); } Debug.WriteLine("stop wasapi"); }