internal AudioClient2(IAudioClient2 audioClient2InterfaceIn) { this.EventWriterDLL.Initialize("AudioClient2"); this.audioClient2Interface = audioClient2InterfaceIn; this.audioClientInterface = (IAudioClient)audioClient2InterfaceIn; }
/// <summary> /// Dispose /// </summary> public void Dispose() { if (audioClientInterface != null) { if (audioClockClient != null) { audioClockClient.Dispose(); audioClockClient = null; } if (audioRenderClient != null) { audioRenderClient.Dispose(); audioRenderClient = null; } if (audioCaptureClient != null) { audioCaptureClient.Dispose(); audioCaptureClient = null; } if (audioStreamVolume != null) { audioStreamVolume.Dispose(); audioStreamVolume = null; } Marshal.ReleaseComObject(audioClientInterface); audioClientInterface = null; GC.SuppressFinalize(this); } }
private void CaptureThread(IAudioClient2 audioClientIn) { AudioClient2 audioClient = new AudioClient2(audioClientIn); Exception exception = null; try { DoRecording(audioClient); } catch (Exception e) { exception = e; this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "WasapiCapture::CaptureThread => Exception: " + e.ToString()); } }
/// <summary> /// Start Recording /// </summary> public async void StartRecording() { try { IAudioClient2 audioClient = null; if (this.device == null) { // no audio device is attached... so go get one if you can await this.GetDefaultCaptureDevice(); if (this.device == null) { // OK, we tried and failed, so exit as gracefully as possible return; } } audioClient = await Activate(); if (!this.initialized) { return; // couldn't initialize. } // force this to wait until the audioClient is done if (audioClient == null) { this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "WasapiCapture::StartRecording => Could not activate audio client"); return; } var t = Task.Run(() => CaptureThread(audioClient)); this.EventWriterDLL.BuildLine("+4 => WasapiCapture::StartRecording => StartRecording finished"); } catch (Exception e) { this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "WasapiCapture::StartRecording => Exception: " + e.ToString()); } this.EventWriterDLL.FlushBuildString(EventWriterDLL.SeverityTypes.Information, 0x01); }
private void InitializeAudio(IAudioClient2 audioClient2) { long latencyRefTimes = latencyMilliseconds * 10000; try { this.audioClient = new AudioClient2(audioClient2); this.renderWaveFormat = audioClient.MixFormat; // use the mix format by default this.audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0, ref this.renderWaveFormat, Guid.Empty); this.EventWriterDLL.BuildLine("+4 start => WasapiOutRT::Init2 => Initialized OK"); } catch (Exception e) { this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "Exception in WasapiOutRT::Init2 trying to initialize audioClient: " + e.ToString()); } }
private void InitializeCaptureDevice(IAudioClient2 audioClientInterface) { var audioClient = new AudioClient((IAudioClient2)audioClientInterface); if (waveFormat == null) { waveFormat = audioClient.MixFormat; } long requestedDuration = REFTIMES_PER_MILLISEC * 100; if (!audioClient.IsFormatSupported(AudioClientShareMode.Shared, waveFormat)) { throw new ArgumentException("Unsupported Wave Format"); } var streamFlags = GetAudioClientStreamFlags(); audioClient.Initialize(AudioClientShareMode.Shared, streamFlags, requestedDuration, 0, waveFormat, Guid.Empty); int bufferFrameCount = audioClient.BufferSize; this.bytesPerFrame = this.waveFormat.Channels * this.waveFormat.BitsPerSample / 8; this.recordBuffer = new byte[bufferFrameCount * bytesPerFrame]; Debug.WriteLine(string.Format("record buffer size = {0}", this.recordBuffer.Length)); // Get back the effective latency from AudioClient latencyMilliseconds = (int)(audioClient.StreamLatency / 10000); }
internal AudioClient(IAudioClient2 audioClientInterface) { this.audioClientInterface = audioClientInterface; }
/// <summary> /// Initialize the capturer. /// </summary> /// <param name="engineLatency"> /// Number of milliseconds of acceptable lag between live sound being produced and recording operation. /// </param> /// <param name="gain"> /// The gain to be applied to the audio after capture. /// </param> /// <param name="outFormat"> /// The format of the audio to be captured. If this is NULL, the default audio format of the /// capture device will be used. /// </param> /// <param name="callback"> /// Callback function delegate which will handle the captured data. /// </param> /// <param name="speech"> /// If true, sets the audio category to speech to optimize audio pipeline for speech recognition. /// </param> public void Initialize(int engineLatency, float gain, WaveFormat outFormat, AudioDataAvailableCallback callback, bool speech) { // Create our shutdown event - we want a manual reset event that starts in the not-signaled state. this.shutdownEvent = new ManualResetEvent(false); // Now activate an IAudioClient object on our preferred endpoint and retrieve the mix format for that endpoint. object obj = this.endpoint.Activate(ref audioClientIID, ClsCtx.INPROC_SERVER, IntPtr.Zero); this.audioClient = (IAudioClient)obj; // The following block enables advanced mic array APO pipeline on Windows 10 RS2 builds >= 15004. // This must be called before the call to GetMixFormat() in LoadFormat(). if (speech) { IAudioClient2 audioClient2 = (IAudioClient2)this.audioClient; if (audioClient2 != null) { AudioClientProperties properties = new AudioClientProperties { Size = Marshal.SizeOf <AudioClientProperties>(), Category = AudioStreamCategory.Speech }; int hr = audioClient2.SetClientProperties(ref properties); if (hr != 0) { Console.WriteLine("Failed to set audio stream category to AudioCategory_Speech: {0}", hr); } } else { Console.WriteLine("Unable to get IAudioClient2 interface"); } } // Load the MixFormat. This may differ depending on the shared mode used. this.LoadFormat(); // Remember our configured latency this.engineLatencyInMs = engineLatency; // Set the gain this.gain = gain; // Determine whether or not we need a resampler this.resampler = null; if (outFormat != null) { // Check if the desired format is supported IntPtr closestMatchPtr; IntPtr outFormatPtr = WaveFormat.MarshalToPtr(outFormat); int hr = this.audioClient.IsFormatSupported(AudioClientShareMode.Shared, outFormatPtr, out closestMatchPtr); // Free outFormatPtr to prevent leaking memory Marshal.FreeHGlobal(outFormatPtr); if (hr == 0) { // Replace _MixFormat with outFormat. Since it is supported, we will initialize // the audio capture client with that format and capture without resampling. this.mixFormat = outFormat; this.mixFrameSize = (this.mixFormat.BitsPerSample / 8) * this.mixFormat.Channels; } else { // In all other cases, we need to resample to OutFormat if ((hr == 1) && (closestMatchPtr != IntPtr.Zero)) { // Use closest match suggested by IsFormatSupported() and resample this.mixFormat = WaveFormat.MarshalFromPtr(closestMatchPtr); this.mixFrameSize = (this.mixFormat.BitsPerSample / 8) * this.mixFormat.Channels; // Free closestMatchPtr to prevent leaking memory Marshal.FreeCoTaskMem(closestMatchPtr); } this.inputBufferSize = (int)(this.engineLatencyInMs * this.mixFormat.AvgBytesPerSec / 1000); this.outputBufferSize = (int)(this.engineLatencyInMs * outFormat.AvgBytesPerSec / 1000); DeviceUtil.CreateResamplerBuffer(this.inputBufferSize, out this.inputSample, out this.inputBuffer); DeviceUtil.CreateResamplerBuffer(this.outputBufferSize, out this.outputSample, out this.outputBuffer); // Create resampler object this.resampler = DeviceUtil.CreateResampler(this.mixFormat, outFormat); } } this.InitializeAudioEngine(); // Set the callback function this.dataAvailableCallback = callback; }
private void InitializeCaptureDevice(IAudioClient2 audioClientInterface) { this.audioClient = new AudioClient2(audioClientInterface); // Activation complete. Set the client properties AudioClientProperties props = new AudioClientProperties(); props.cbSize = 16; // (uint)System.Runtime.InteropServices.Marshal.SizeOf<AudioClientProperties>(); props.bIsOffload = false; // FALSE props.eCategory = AudioStreamCategory.Communications; // AUDIO_STREAM_CATEGORY::AudioCategory_Communications in C++ props.Options = AudioClientStreamOptions.None; // AUDCLNT_STREAMOPTIONS_NONE in C++ int hresult = audioClientInterface.SetClientProperties(props); if (hresult != 0) { Marshal.ThrowExceptionForHR(hresult); } this.waveFormat = this.audioClient.MixFormat; //if (this.isXMOS) //{ // // the mix format for the XMOS is likely a 6 channel interleaved audio stream that we don't need. // // in theory, we should be able to just request a single channel 48K stream and it will just work // // and it will be good!!! Of course, this doesn't actually work... Set the veil audio mic to be 1 channel // // before doing anything else... // this.waveFormat = WaveFormatExtensible.CreateIeeeFloatWaveFormat(48000, 1); //} long requestedDuration = REFTIMES_PER_MILLISEC * 100; this.frequency = 8 * waveFormat.AverageBytesPerSecond / (waveFormat.Channels * waveFormat.BitsPerSample); this.EventWriterDLL.BuildLine( "+2 start => WasapiCapture::InitializeCaptureDevice => Wave Format =\n" + " => average bytes per second = " + waveFormat.AverageBytesPerSecond + "\n" + " => bits per sample = " + waveFormat.BitsPerSample + "\n" + " => channels = " + waveFormat.Channels + "\n" + " => encoding = " + waveFormat.WaveFormatTag + "\n" + " => extra size = " + waveFormat.ExtraSize + "\n" + " => frequency = " + frequency); hresult = this.audioClient.Initialize(AudioClientShareMode.Shared, AudioClientStreamFlags.EventCallback, requestedDuration, 0, ref this.waveFormat, Guid.Empty); if (hresult == 0) { int bufferFrameCount = this.audioClient.BufferSize; this.bytesPerFrame = this.waveFormat.Channels * this.waveFormat.BitsPerSample / 8; this.recordBuffer = new byte[bufferFrameCount * bytesPerFrame]; this.EventWriterDLL.BuildLine( "+3 => WasapiCapture::InitializeCaptureDevice => " + string.Format("record buffer size = {0}", this.recordBuffer.Length)); // Get back the effective latency from AudioClient this.latencyMilliseconds = (int)(this.audioClient.StreamLatency / 10000); this.initialized = true; } else { this.EventWriterDLL.BuildLine("-3 => WasapiCapture::InitializeCaptureDevice => Error:" + string.Format("{0:X}", hresult)); } }