public void Setup(Action <IAudioInDriver, float[][]> receiveCallback, int bufferSize = 0) { if (receiveCallback == null) { throw new ArgumentNullException("renderCallback"); } Stop(); try { Cleanup(); InternalSetup(DataFlow, StreamFlags, bufferSize); if ((outputIsFloat && bitsPerSample != 32 && bitsPerSample != 64) || (!outputIsFloat && bitsPerSample != 16 && bitsPerSample != 32)) { throw new NotSupportedException(); } audioCaptureClient = GetService <IAudioCaptureClient>(IID_IAudioCaptureClient); this.receiveCallback = receiveCallback; } catch { Dispose(); throw; } }
private void InitializeAudio(AudioDataFlow audioFlow, IMMDeviceEnumerator deviceEnumerator) { //Get Audio Device COMResult result = deviceEnumerator.GetDefaultAudioEndpoint(audioFlow, EndPointRole.eMultimedia, out _audioDevice); //Register End point notification _notifyClient = new MMNotificationClient(); result = deviceEnumerator.RegisterEndpointNotificationCallback(_notifyClient); //Get Audio Client from device result = _audioDevice.Activate(typeof(IAudioClient).GUID, 0, IntPtr.Zero, out object obj); _audioClient = (IAudioClient)obj; //Get Audio Meter from device result = _audioDevice.Activate(typeof(IAudioMeterInformation).GUID, 0, IntPtr.Zero, out obj); _audioMeter = (IAudioMeterInformation)obj; //Initialize Audio Client. _sessionGuid = new Guid(); result = _audioClient.GetMixFormat(out waveFormat); AudioClientStreamFlags streamFlag = AudioClientStreamFlags.None; if (audioFlow == AudioDataFlow.eRender) { streamFlag = AudioClientStreamFlags.Loopback; } result = _audioClient.Initialize(AudioClientMode.Shared, streamFlag, 10000000, 0, waveFormat, ref _sessionGuid); //Get Capture Client. result = _audioClient.GetService(typeof(IAudioCaptureClient).GUID, out obj); Marshal.ThrowExceptionForHR((int)result); _audioCaptureClient = (IAudioCaptureClient)obj; result = _audioClient.Start(); //Change wave format here SetupWaveFormat(waveFormat); }
protected override void Cleanup() { if (audioCaptureClient != null) { Marshal.ReleaseComObject(audioCaptureClient); audioCaptureClient = null; } }
/// <summary> /// Disposes the <see cref="WasapiCaptureClient"/> object. /// </summary> public void Dispose() { if (this.captureThread != null) { this.shutdownEvent.Set(); this.captureThread.Join(); this.captureThread = null; } if (this.shutdownEvent != null) { this.shutdownEvent.Close(); this.shutdownEvent = null; } if (this.audioClient != null) { Marshal.ReleaseComObject(this.audioClient); this.audioClient = null; } if (this.captureClient != null) { Marshal.ReleaseComObject(this.captureClient); this.captureClient = null; } if (this.resampler != null) { Marshal.ReleaseComObject(this.resampler); this.resampler = null; } if (this.inputBuffer != null) { Marshal.ReleaseComObject(this.inputBuffer); this.inputBuffer = null; } if (this.inputSample != null) { Marshal.ReleaseComObject(this.inputSample); this.inputSample = null; } if (this.outputBuffer != null) { Marshal.ReleaseComObject(this.outputBuffer); this.outputBuffer = null; } if (this.outputSample != null) { Marshal.ReleaseComObject(this.outputSample); this.outputSample = null; } }
public void Dispose() { if (this.audioCaptureClientInterface != null) { Marshal.ReleaseComObject(this.audioCaptureClientInterface); this.audioCaptureClientInterface = null; GC.SuppressFinalize(this); } }
/// <summary> /// Initialize WASAPI in timer driven mode, and retrieve a capture client for the transport. /// </summary> private void InitializeAudioEngine() { IntPtr mixFormatPtr = WaveFormat.MarshalToPtr(this.mixFormat); this.audioClient.Initialize(AudioClientShareMode.Shared, AudioClientStreamFlags.NoPersist, this.engineLatencyInMs * 10000, 0, mixFormatPtr, Guid.Empty); Marshal.FreeHGlobal(mixFormatPtr); object obj = this.audioClient.GetService(new Guid(Guids.IAudioCaptureClientIIDString)); this.captureClient = (IAudioCaptureClient)obj; }
/// <summary> /// Release the COM object /// </summary> public void Dispose() { if (audioCaptureClientInterface != null) { // althugh GC would do this for us, we want it done now // to let us reopen WASAPI Marshal.ReleaseComObject(audioCaptureClientInterface); audioCaptureClientInterface = null; GC.SuppressFinalize(this); } }
/// <summary> /// Release the COM object /// </summary> public void Dispose() { if (audioCaptureClientInterface != null) { // althugh GC would do this for us, we want it done now // to let us reopen WASAPI Marshal.ReleaseComObject(audioCaptureClientInterface); audioCaptureClientInterface = null; GC.SuppressFinalize(this); } }
public RecordingClient_WindowsCoreApi(Factory_WindowsCoreApi.IAudioClient Client, int NumChannel, int FrameSize, uint SamplesRate, Type DataFormat) { object opaqueService; _IAudioClient = Client; _IAudioClient.GetService(ref IID_IAudioCaptureClient, out opaqueService); _IAudioCaptureClient = (IAudioCaptureClient)opaqueService; _IAudioClient.GetBufferSize(out _BufferFrameCount); _SampleRate = SamplesRate; _Format = DataFormat; _ChannelCount = NumChannel; _IAudioClient = Client; _Stream = new RecordingStream(this); _Thread = new System.Threading.Thread(Loop); _FrameSize = FrameSize; }
public void Dispose() { keepgoing = false; Thread.Sleep(20); if (iAudioCaptureClient != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(iAudioCaptureClient); iAudioCaptureClient = null; } if (iAudioClient != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(iAudioClient); iAudioClient = null; } if (iAudioEndpoint != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(iAudioEndpoint); iAudioEndpoint = null; } if (imd != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(imd); imd = null; } if (iMde != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(iMde); iMde = null; } if (oEnumerator != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(oEnumerator); oEnumerator = null; } }
public void Dispose() { ChatEndpoint = null; RenderClient = null; CaptureClient = null; AudioClient = null; if (ChatThread is not null) { ChatThread.Dispose(); ChatThread = default; } if (ShutdownEvent is not null) { ShutdownEvent.Dispose(); ShutdownEvent = null; } if (AudioSamplesReadyEvent is not null) { AudioSamplesReadyEvent.Dispose(); AudioSamplesReadyEvent = null; } }
/// <summary> /// Initialize WASAPI in timer driven mode, and retrieve a capture client for the transport. /// </summary> private void InitializeAudioEngine() { var streamFlags = AudioClientStreamFlags.NoPersist; if (this.isEventDriven) { streamFlags |= AudioClientStreamFlags.EventCallback; this.audioAvailableEvent = new AutoResetEvent(false); } else { // ensure buffer is at least twice the latency (only in pull mode) if (this.engineBufferInMs < 2 * this.engineLatencyInMs) { this.engineBufferInMs = 2 * this.engineLatencyInMs; } } IntPtr mixFormatPtr = WaveFormat.MarshalToPtr(this.mixFormat); this.audioClient.Initialize(AudioClientShareMode.Shared, streamFlags, this.engineBufferInMs * 10000, 0, mixFormatPtr, Guid.Empty); Marshal.FreeHGlobal(mixFormatPtr); if (this.isEventDriven) { this.audioClient.SetEventHandle(this.audioAvailableEvent.SafeWaitHandle.DangerousGetHandle()); } // get the actual audio engine buffer size int bufferFrames = this.audioClient.GetBufferSize(); this.engineBufferInMs = (int)(bufferFrames * 1000L / this.mixFormat.SamplesPerSec); object obj = this.audioClient.GetService(new Guid(Guids.IAudioCaptureClientIIDString)); this.captureClient = (IAudioCaptureClient)obj; }
internal AudioCaptureClient(IAudioCaptureClient audioCaptureClientInterface) { this.audioCaptureClientInterface = audioCaptureClientInterface; }
internal AudioCaptureClient(IAudioCaptureClient audioCaptureClientInterface) { this.audioCaptureClientInterface = audioCaptureClientInterface; }
internal AudioCaptureClient(AudioClient parent, IAudioCaptureClient realClient) { _Parent = parent; _RealClient = realClient; }
public CoreAudio(bool microphone = false) { const uint REFTIMES_PER_SEC = 10000000; const uint CLSCTX_INPROC_SERVER = 1; Guid clsid = new Guid("BCDE0395-E52F-467C-8E3D-C4579291692E"); Guid IID_IUnknown = new Guid("00000000-0000-0000-C000-000000000046"); oEnumerator = null; uint hResult = CoCreateInstance(ref clsid, null, CLSCTX_INPROC_SERVER, ref IID_IUnknown, out oEnumerator); if (hResult != 0 || oEnumerator == null) { throw new Exception("CoCreateInstance() pInvoke failed"); } iMde = oEnumerator as IMMDeviceEnumerator; if (iMde == null) { throw new Exception("COM cast failed to IMMDeviceEnumerator"); } IntPtr pDevice = IntPtr.Zero; //iMde.EnumAudioEndpoints(EDataFlow.eCapture, DEVICE_STATE_ACTIVE,ref pDevice); int retVal; if (microphone) { retVal = iMde.GetDefaultAudioEndpoint(EDataFlow.eCapture, ERole.eConsole, ref pDevice); } else { retVal = iMde.GetDefaultAudioEndpoint(EDataFlow.eRender, ERole.eConsole, ref pDevice); } if (retVal != 0) { throw new Exception("IMMDeviceEnumerator.GetDefaultAudioEndpoint()"); } //int dwStateMask = DEVICE_STATE_ACTIVE | DEVICE_STATE_NOTPRESENT | DEVICE_STATE_UNPLUGGED; //IntPtr pCollection = IntPtr.Zero; //retVal = iMde.EnumAudioEndpoints(EDataFlow.eRender, dwStateMask, ref pCollection); //if (retVal != 0) //{ // throw new Exception("IMMDeviceEnumerator.EnumAudioEndpoints()"); //} imd = (IMMDevice)System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(pDevice); if (imd == null) { throw new Exception("COM cast failed to IMMDevice"); } Guid iid = new Guid("5CDF2C82-841E-4546-9722-0CF74078229A"); uint dwClsCtx = (uint)CLSCTX.CLSCTX_ALL; IntPtr pActivationParams = IntPtr.Zero; IntPtr pEndPoint = IntPtr.Zero; retVal = imd.Activate(iid, dwClsCtx, pActivationParams, out pEndPoint); if (retVal != 0) { throw new Exception("IMMDevice.Activate()"); } iAudioEndpoint = (IAudioEndpointVolume)System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(pEndPoint); if (iAudioEndpoint == null) { throw new Exception("COM cast failed to IAudioEndpointVolume"); } iid = new Guid("1CB9AD4C-DBFA-4c32-B178-C2F568A703B2"); dwClsCtx = (uint)CLSCTX.CLSCTX_ALL; pActivationParams = IntPtr.Zero; pEndPoint = IntPtr.Zero; retVal = imd.Activate(iid, dwClsCtx, pActivationParams, out pEndPoint); if (retVal != 0) { throw new Exception("IAudioClient.Activate() " + Convert.ToString(retVal, 2)); } iAudioClient = (IAudioClient)System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(pEndPoint); if (iAudioClient == null) { throw new Exception("COM cast failed to iAudioClient"); } ulong processInterval; ulong minimumInterval; retVal = iAudioClient.GetDevicePeriod(out processInterval, out minimumInterval); if (retVal != 0) { throw new Exception("iAudioClient.GetDevicePeriod()"); } waveformat = new WAVEFORMATEX(); waveformat.wFormatTag = (ushort)WaveFormatEncoding.Pcm; waveformat.nChannels = 2; waveformat.nBlockAlign = 4; waveformat.wBitsPerSample = 16; waveformat.nSamplesPerSec = 44100; waveformat.cbSize = 0; waveformat.nAvgBytesPerSec = 176400; IntPtr reqForm = Marshal.AllocHGlobal(Marshal.SizeOf(waveformat)); Marshal.StructureToPtr(waveformat, reqForm, false); IntPtr propForm = Marshal.AllocHGlobal(Marshal.SizeOf(waveformat)); retVal = iAudioClient.IsFormatSupported(AUDCLNT_SHAREMODE.AUDCLNT_SHAREMODE_SHARED, reqForm, out propForm); if (retVal != 0) { throw new Exception("IAudioClient.IsFormatSupported()"); } if (microphone) { retVal = iAudioClient.Initialize(AUDCLNT_SHAREMODE.AUDCLNT_SHAREMODE_EXCLUSIVE, 0, 2000000, 0, reqForm, Guid.Empty); } else { retVal = iAudioClient.Initialize(AUDCLNT_SHAREMODE.AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_LOOPBACK, 2000000, 0, reqForm, Guid.Empty); } if (retVal != 0) { throw new Exception("IAudioClient.Initialize() " + retVal); } uint buffersize = 0; retVal = iAudioClient.GetBufferSize(out buffersize); if (retVal != 0) { throw new Exception("IAudioClient.GetBufferSize()"); } iid = new Guid("C8ADBD64-E71E-48a0-A4DE-185C395CD317"); IntPtr capclient = IntPtr.Zero; retVal = iAudioClient.GetService(iid, out capclient); if (retVal != 0) { throw new Exception("IAudioClient.GetService()"); } iAudioCaptureClient = (IAudioCaptureClient)System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(capclient); if (iAudioCaptureClient == null) { throw new Exception("COM cast failed to iAudioCaptureClient"); } hnsActualDuration = (double)(REFTIMES_PER_SEC * buffersize / waveformat.nSamplesPerSec); // 8391 smallest possible value recordingthread = new Thread(recordingloop); recordingthread.IsBackground = false; recordingthread.Start(); }
internal AudioInputClient(IAudioClient client, WaveFormat format) : base(client, format) { _capture = this.GetService <IAudioCaptureClient>(); }
internal AudioCaptureClient(IAudioCaptureClient audioCaptureClientInterface) { this.EventWriterDLL.Initialize("AudioCaptureClient"); this.audioCaptureClientInterface = audioCaptureClientInterface; }
internal AudioCaptureClient(IAudioCaptureClient audipCaptureClientInterface) { this._realAudioCaptureClient = audipCaptureClientInterface; }
internal AudioInputClient(IAudioClient client, WaveFormat format) : base(client, format) { _capture = this.GetService<IAudioCaptureClient>(); }
internal AudioCaptureClient(AudioClient parent, IAudioCaptureClient realClient) { _Parent = parent; _RealClient = realClient; }
public WaveFormatEx Open() { StopSilenceThread(); Log.Information("Opening DesktopAudio"); IMMDeviceEnumerator deviceEnumerator = null; IntPtr mixFormatPtr = IntPtr.Zero; try { bool render = true; deviceEnumerator = Activator.CreateInstance(typeof(MMDeviceEnumerator)) as IMMDeviceEnumerator; var res = deviceEnumerator.GetDefaultAudioEndpoint( render ? DataFlowEnum.Render : DataFlowEnum.Capture, render ? RoleEnum.Console : RoleEnum.Communications, out _endpoint); if (render) { StartSilenceGeneration(); } Checked(_endpoint.Activate(ref IAudioClientId, ClsCtxEnum.All, IntPtr.Zero, out var obj), "Activate"); _audioClient = (IAudioClient)obj; Checked(_audioClient.GetMixFormat(out mixFormatPtr), "GetMixFormat"); WaveFormatEx outputFormat = (WaveFormatEx)Marshal.PtrToStructure(mixFormatPtr, typeof(WaveFormatEx)); if (!render) // for render it is checked in the StartSilenceGeneration(); { CheckFormat(outputFormat); } _bytesPerFrame = outputFormat.BlockAlign; var flags = AudioClientStreamFlagsEnum.StreamFlagsEventCallback | (render ? AudioClientStreamFlagsEnum.StreamFlagsLoopback : AudioClientStreamFlagsEnum.None); Checked(_audioClient.Initialize(AudioClientShareModeEnum.Shared, flags, 10_000_000 * 5, 0, mixFormatPtr, Guid.Empty), "Initialize"); Checked(_audioClient.GetService(IAudioCaptureClientId, out var captureObj), "GetService"); _audioCaptureClient = (IAudioCaptureClient)captureObj; #pragma warning disable CS0618 // Type or member is obsolete Checked(_audioClient.SetEventHandle(_dataAvailable.Handle), "SetEventHandle"); #pragma warning restore CS0618 // Type or member is obsolete Checked(_audioClient.Start(), "Start"); return(outputFormat); } catch (Exception e) { Core.LogError(e, "Open desktop audio failed"); StopSilenceThread(); ReleaseComFields(); throw; } finally { if (mixFormatPtr != IntPtr.Zero) { Marshal.FreeCoTaskMem(mixFormatPtr); } ReleaseComObject(ref deviceEnumerator); } }