/// <summary> /// Dispose /// </summary> public void Dispose() { if (audioClient != null) { Stop(); this.audioClient.Dispose(); this.audioClient = null; this.audioRenderClient = null; NativeMethods.CloseHandle(frameEventWaitHandle); } }
private void CaptureThread(IAudioClient2 audioClientIn) { AudioClient2 audioClient = new AudioClient2(audioClientIn); Exception exception = null; try { DoRecording(audioClient); } catch (Exception e) { exception = e; this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "WasapiCapture::CaptureThread => Exception: " + e.ToString()); } }
/// <summary> /// Start Recording /// </summary> public async void StartRecording() { try { IAudioClient2 audioClient = null; if (this.device == null) { // no audio device is attached... so go get one if you can await this.GetDefaultCaptureDevice(); if (this.device == null) { // OK, we tried and failed, so exit as gracefully as possible return; } } audioClient = await Activate(); if (!this.initialized) { return; // couldn't initialize. } // force this to wait until the audioClient is done if (audioClient == null) { this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "WasapiCapture::StartRecording => Could not activate audio client"); return; } var t = Task.Run(() => CaptureThread(audioClient)); this.EventWriterDLL.BuildLine("+4 => WasapiCapture::StartRecording => StartRecording finished"); } catch (Exception e) { this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "WasapiCapture::StartRecording => Exception: " + e.ToString()); } this.EventWriterDLL.FlushBuildString(EventWriterDLL.SeverityTypes.Information, 0x01); }
private void InitializeAudio(IAudioClient2 audioClient2) { long latencyRefTimes = latencyMilliseconds * 10000; try { this.audioClient = new AudioClient2(audioClient2); this.renderWaveFormat = audioClient.MixFormat; // use the mix format by default this.audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0, ref this.renderWaveFormat, Guid.Empty); this.EventWriterDLL.BuildLine("+4 start => WasapiOutRT::Init2 => Initialized OK"); } catch (Exception e) { this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "Exception in WasapiOutRT::Init2 trying to initialize audioClient: " + e.ToString()); } }
/// <summary> /// This is the loop that does all of the hard work, each packet is recorded here and then dispatched. /// </summary> /// <param name="client"></param> private void DoRecording(AudioClient2 audioClient) { this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Information, 0x01, "WasapiCapture::DoRecording => has a buffer size of " + audioClient.BufferSize); int sampleBufferByteSize = audioClient.BufferSize * bytesPerFrame; byte[] sampleBuffer = new byte[sampleBufferByteSize]; IntPtr audioSamplesReadyEventHandle = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS); audioClient.SetEventHandle(audioSamplesReadyEventHandle); try { AudioCaptureClient captureClient = audioClient.AudioCaptureClient; audioClient.Start(); while (!this.pendingStopRequest) { NativeMethods.WaitForSingleObjectEx(audioSamplesReadyEventHandle, 1000, true); int packetSize = captureClient.GetNextPacketSize(); if (packetSize > 0) { int numFramesToRead = 0; int numBytesInSampleBuffer = 0; AudioClientBufferFlags dwFlags = 0; IntPtr micDataIn; micDataIn = captureClient.GetBuffer(out numFramesToRead, out dwFlags); int capturedBytes = numFramesToRead * bytesPerFrame; if ((int)(dwFlags & AudioClientBufferFlags.Silent) > 0) { int maxBytes = Math.Min(capturedBytes, sampleBufferByteSize); while (maxBytes-- > 0) { sampleBuffer[numBytesInSampleBuffer++] = 0; } } else { System.Runtime.InteropServices.Marshal.Copy(micDataIn, sampleBuffer, 0, capturedBytes); numBytesInSampleBuffer = capturedBytes; } captureClient.ReleaseBuffer(numFramesToRead); if (DataAvailableEventHandler != null) { if (this.waveFormat.Channels == 2) { // convert stereo to mono inline! ConvertStereoToMono(sampleBuffer, numBytesInSampleBuffer); numBytesInSampleBuffer /= 2; } else if (this.waveFormat.Channels == 6) { // convert 6 to mono inline! Convert6ToMono(sampleBuffer, numBytesInSampleBuffer, Channel2 | Channel3, 2); numBytesInSampleBuffer /= 6; } DataAvailableEventHandler(this, new WaveInEventArgs(sampleBuffer, numBytesInSampleBuffer)); } } } } catch (Exception ex) { this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "WasapiCapture::DoRecording => Exception: " + ex.ToString()); } finally { NativeMethods.CloseHandle(audioSamplesReadyEventHandle); audioClient.Stop(); audioClient.Dispose(); this.pendingStopRequest = false; } }
private void InitializeCaptureDevice(IAudioClient2 audioClientInterface) { this.audioClient = new AudioClient2(audioClientInterface); // Activation complete. Set the client properties AudioClientProperties props = new AudioClientProperties(); props.cbSize = 16; // (uint)System.Runtime.InteropServices.Marshal.SizeOf<AudioClientProperties>(); props.bIsOffload = false; // FALSE props.eCategory = AudioStreamCategory.Communications; // AUDIO_STREAM_CATEGORY::AudioCategory_Communications in C++ props.Options = AudioClientStreamOptions.None; // AUDCLNT_STREAMOPTIONS_NONE in C++ int hresult = audioClientInterface.SetClientProperties(props); if (hresult != 0) { Marshal.ThrowExceptionForHR(hresult); } this.waveFormat = this.audioClient.MixFormat; //if (this.isXMOS) //{ // // the mix format for the XMOS is likely a 6 channel interleaved audio stream that we don't need. // // in theory, we should be able to just request a single channel 48K stream and it will just work // // and it will be good!!! Of course, this doesn't actually work... Set the veil audio mic to be 1 channel // // before doing anything else... // this.waveFormat = WaveFormatExtensible.CreateIeeeFloatWaveFormat(48000, 1); //} long requestedDuration = REFTIMES_PER_MILLISEC * 100; this.frequency = 8 * waveFormat.AverageBytesPerSecond / (waveFormat.Channels * waveFormat.BitsPerSample); this.EventWriterDLL.BuildLine( "+2 start => WasapiCapture::InitializeCaptureDevice => Wave Format =\n" + " => average bytes per second = " + waveFormat.AverageBytesPerSecond + "\n" + " => bits per sample = " + waveFormat.BitsPerSample + "\n" + " => channels = " + waveFormat.Channels + "\n" + " => encoding = " + waveFormat.WaveFormatTag + "\n" + " => extra size = " + waveFormat.ExtraSize + "\n" + " => frequency = " + frequency); hresult = this.audioClient.Initialize(AudioClientShareMode.Shared, AudioClientStreamFlags.EventCallback, requestedDuration, 0, ref this.waveFormat, Guid.Empty); if (hresult == 0) { int bufferFrameCount = this.audioClient.BufferSize; this.bytesPerFrame = this.waveFormat.Channels * this.waveFormat.BitsPerSample / 8; this.recordBuffer = new byte[bufferFrameCount * bytesPerFrame]; this.EventWriterDLL.BuildLine( "+3 => WasapiCapture::InitializeCaptureDevice => " + string.Format("record buffer size = {0}", this.recordBuffer.Length)); // Get back the effective latency from AudioClient this.latencyMilliseconds = (int)(this.audioClient.StreamLatency / 10000); this.initialized = true; } else { this.EventWriterDLL.BuildLine("-3 => WasapiCapture::InitializeCaptureDevice => Error:" + string.Format("{0:X}", hresult)); } }