private void InitializeCaptureDevice() { if (initialized) { return; } long requestedDuration = ReftimesPerMillisec * audioBufferMillisecondsLength; if (!audioClient.IsFormatSupported(ShareMode, waveFormat)) { throw new ArgumentException("Unsupported Wave Format"); } var streamFlags = GetAudioClientStreamFlags(); // If using EventSync, setup is specific with shareMode if (isUsingEventSync) { // Init Shared or Exclusive if (ShareMode == AudioClientShareMode.Shared) { // With EventCallBack and Shared, both latencies must be set to 0 audioClient.Initialize(ShareMode, AudioClientStreamFlags.EventCallback | streamFlags, requestedDuration, 0, waveFormat, Guid.Empty); } else { // With EventCallBack and Exclusive, both latencies must equals audioClient.Initialize(ShareMode, AudioClientStreamFlags.EventCallback | streamFlags, requestedDuration, requestedDuration, waveFormat, Guid.Empty); } // Create the Wait Event Handle frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset); audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle()); } else { // Normal setup for both sharedMode audioClient.Initialize(ShareMode, streamFlags, requestedDuration, 0, waveFormat, Guid.Empty); } int bufferFrameCount = audioClient.BufferSize; bytesPerFrame = waveFormat.Channels * waveFormat.BitsPerSample / 8; recordBuffer = new byte[bufferFrameCount * bytesPerFrame]; //Debug.WriteLine(string.Format("record buffer size = {0}", this.recordBuffer.Length)); initialized = true; }
private void DoRecording(AudioClient client) { Debug.WriteLine(client.BufferSize); var buf = new Byte[client.BufferSize * bytesPerFrame]; int bufLength = 0; int minPacketSize = waveFormat.AverageBytesPerSecond / 100; //100ms IntPtr hEvent = NativeMethods.CreateEventEx(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS); client.SetEventHandle(hEvent); try { AudioCaptureClient capture = client.AudioCaptureClient; client.Start(); int packetSize = capture.GetNextPacketSize(); while (!this.stop) { IntPtr pData = IntPtr.Zero; int numFramesToRead = 0; AudioClientBufferFlags dwFlags = 0; if (packetSize == 0) { if (NativeMethods.WaitForSingleObjectEx(hEvent, 100, true) != 0) { throw new Exception("Capture event timeout"); } } pData = capture.GetBuffer(out numFramesToRead, out dwFlags); if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0) { pData = IntPtr.Zero; } if (numFramesToRead == 0) { continue; } int capturedBytes = numFramesToRead * bytesPerFrame; if (pData == IntPtr.Zero) { Array.Clear(buf, bufLength, capturedBytes); } else { Marshal.Copy(pData, buf, bufLength, capturedBytes); } bufLength += capturedBytes; capture.ReleaseBuffer(numFramesToRead); if (bufLength >= minPacketSize) { if (DataAvailable != null) { DataAvailable(this, new WaveInEventArgs(buf, bufLength)); } bufLength = 0; } packetSize = capture.GetNextPacketSize(); } } catch (Exception ex) { RaiseRecordingStopped(ex); Debug.WriteLine("stop wasapi"); } finally { RaiseRecordingStopped(null); NativeMethods.CloseHandle(hEvent); client.Stop(); client.Dispose(); } }