/// <summary> /// The Initialize Audio Client /// </summary> /// <param name="audioFlow"></param> /// <param name="_deviceEnumerator"></param> private void InitializeAudioClient() { //Get Audio Client from device COMResult result = _audioDevice.Activate(typeof(IAudioClient).GUID, 0, IntPtr.Zero, out object obj); _audioClient = (IAudioClient)obj; //Get Audio Meter from device result = _audioDevice.Activate(typeof(IAudioMeterInformation).GUID, 0, IntPtr.Zero, out obj); _audioMeter = (IAudioMeterInformation)obj; //Get Audio End Point result = _audioDevice.Activate(typeof(IAudioEndpointVolume).GUID, 0, IntPtr.Zero, out obj); _audioEndpointVolume = (IAudioEndpointVolume)obj; _audioEndpointVolume.RegisterControlChangeNotify(classCallBack); //Initialize Audio Client. _sessionGuid = new Guid(); result = _audioClient.GetMixFormat(out waveFormat); AudioClientStreamFlags streamFlag = AudioClientStreamFlags.None; if (_audioDataFlow == AudioDataFlow.eRender) { streamFlag = AudioClientStreamFlags.Loopback; } result = _audioClient.Initialize(AudioClientMode.Shared, streamFlag, 10000000, 0, waveFormat, ref _sessionGuid); result = _audioClient.Start(); //Change wave format here SetupWaveFormat(waveFormat); result = _audioEndpointVolume.GetChannelCount(out _channelCount); }
private void InitializeAudio(AudioDataFlow audioFlow, IMMDeviceEnumerator deviceEnumerator) { //Get Audio Device COMResult result = deviceEnumerator.GetDefaultAudioEndpoint(audioFlow, EndPointRole.eMultimedia, out _audioDevice); //Register End point notification _notifyClient = new MMNotificationClient(); result = deviceEnumerator.RegisterEndpointNotificationCallback(_notifyClient); //Get Audio Client from device result = _audioDevice.Activate(typeof(IAudioClient).GUID, 0, IntPtr.Zero, out object obj); _audioClient = (IAudioClient)obj; //Get Audio Meter from device result = _audioDevice.Activate(typeof(IAudioMeterInformation).GUID, 0, IntPtr.Zero, out obj); _audioMeter = (IAudioMeterInformation)obj; //Initialize Audio Client. _sessionGuid = new Guid(); result = _audioClient.GetMixFormat(out waveFormat); AudioClientStreamFlags streamFlag = AudioClientStreamFlags.None; if (audioFlow == AudioDataFlow.eRender) { streamFlag = AudioClientStreamFlags.Loopback; } result = _audioClient.Initialize(AudioClientMode.Shared, streamFlag, 10000000, 0, waveFormat, ref _sessionGuid); //Get Capture Client. result = _audioClient.GetService(typeof(IAudioCaptureClient).GUID, out obj); Marshal.ThrowExceptionForHR((int)result); _audioCaptureClient = (IAudioCaptureClient)obj; result = _audioClient.Start(); //Change wave format here SetupWaveFormat(waveFormat); }
/// <summary> /// Initialize the Audio Client /// </summary> /// <param name="shareMode">Share Mode</param> /// <param name="streamFlags">Stream Flags</param> /// <param name="bufferDuration">Buffer Duration</param> /// <param name="periodicity">Periodicity</param> /// <param name="waveFormat">Wave Format</param> /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param> public void Initialize(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long bufferDuration, long periodicity, WaveFormat waveFormat, Guid audioSessionGuid) { audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, waveFormat, ref audioSessionGuid); // may have changed the mix format so reset it mixFormat = null; }
/// <summary> /// Initializes the Audio Client /// </summary> /// <param name="shareMode">Share Mode</param> /// <param name="streamFlags">Stream Flags</param> /// <param name="bufferDuration">Buffer Duration</param> /// <param name="periodicity">Periodicity</param> /// <param name="waveFormat">Wave Format</param> /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param> public void Initialize(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long bufferDuration, long periodicity, WaveFormatProvider waveFormat, Guid audioSessionGuid) { int hresult = audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, waveFormat, ref audioSessionGuid); Marshal.ThrowExceptionForHR(hresult); // may have changed the mix format so reset it mixFormat = null; }
/// <summary> /// Initializes the Audio Client /// </summary> /// <param name="shareMode">Share Mode</param> /// <param name="streamFlags">Stream Flags</param> /// <param name="bufferDuration">Buffer Duration</param> /// <param name="periodicity">Periodicity</param> /// <param name="waveFormat">Wave Format</param> /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param> public int Initialize(ShareMode shareMode, StreamFlags streamFlags, long bufferDuration, long periodicity, WaveFormat waveFormat, Guid audioSessionGuid) { this.shareMode = shareMode; int hret = audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, waveFormat, ref audioSessionGuid); mixFormat = null; return(hret); }
public void Initialize(EAudioClientShareMode shareMode, EAudioClientStreamFlags streamFlags, long bufferDuration, long periodicity, WaveFormat format, Guid audioSessionGuid) { var hresult = _realAudioClient.Initialize(shareMode, streamFlags, bufferDuration, periodicity, format, ref audioSessionGuid); Marshal.ThrowExceptionForHR(hresult); // may have changed the mix format so reset it mixFormat = null; }
/// <summary> /// Initialize the Audio Client /// </summary> /// <param name="shareMode">Share Mode</param> /// <param name="streamFlags">Stream Flags</param> /// <param name="bufferDuration">Buffer Duration</param> /// <param name="periodicity">Periodicity</param> /// <param name="waveFormat">Wave Format</param> /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param> public int Initialize(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long bufferDuration, long periodicity, ref WaveFormatExtensible waveFormat, Guid audioSessionGuid) { int hresult = 0; hresult = audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, ref waveFormat, ref audioSessionGuid); if (hresult != 0) { this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "Error Code in AudioClient::Initialize: " + hresult); } // may have changed the mix format so reset it mixFormat = new WaveFormatExtensible(); return(hresult); }
private void StartSilenceGeneration() { IntPtr mixFormatPtr = IntPtr.Zero; try { Checked(_endpoint.Activate(ref IAudioClientId, ClsCtxEnum.All, IntPtr.Zero, out var obj), "Silence.Activate"); _audioClientForRendering = (IAudioClient)obj; Checked(_audioClientForRendering.GetMixFormat(out mixFormatPtr), "Silence.GetMixFormat"); WaveFormatEx format = (WaveFormatEx)Marshal.PtrToStructure(mixFormatPtr, typeof(WaveFormatEx)); CheckFormat(format); Checked(_audioClientForRendering.Initialize(AudioClientShareModeEnum.Shared, AudioClientStreamFlagsEnum.None, 10_000_000 * 5, 0, mixFormatPtr, Guid.Empty), "Silence.Initialize"); Checked(_audioClientForRendering.GetBufferSize(out var bufferSize), "Silence.GetBufferSize"); Checked(_audioClientForRendering.GetService(IAudioRenderClientId, out var renderObj), "Silence.GetService"); _audioRenderClient = (IAudioRenderClient)renderObj; Checked(_audioClientForRendering.Start(), "Silence.Start"); _silenceThread = new Thread(() => SilenceGenerationRoutine(bufferSize, format)); _silenceThread.Name = "Silence generator"; _silenceThread.Start(); } catch (Exception e) { ReleaseComObject(ref _audioClientForRendering); ReleaseComObject(ref _audioRenderClient); Core.LogError(e, "Faied to StartSilenceGeneration"); } finally { if (mixFormatPtr != IntPtr.Zero) { Marshal.FreeCoTaskMem(mixFormatPtr); } } }
public void Initialize( AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long bufferDuration, long periodicity, WAVEFORMATEXTENSIBLE format, Guid audioSessionGuid) { int hr = _RealClient.Initialize(shareMode, streamFlags, bufferDuration, periodicity, format, ref audioSessionGuid); Marshal.ThrowExceptionForHR(hr); if ((streamFlags & AudioClientStreamFlags.EventCallback) != 0) { _audioSampleReady = new AutoResetEvent(false); IntPtr eventHandle = CreateEventEx(IntPtr.Zero, "audioSampleReady", CreateEventFlags.None, AccessRight.Synchronize | AccessRight.EventModifyState); _audioSampleReady.SafeWaitHandle = new Microsoft.Win32.SafeHandles.SafeWaitHandle(eventHandle, true); hr = _RealClient.SetEventHandle(eventHandle); Marshal.ThrowExceptionForHR(hr); _audioSampleReadyRegistered = ThreadPool.RegisterWaitForSingleObject( _audioSampleReady, new WaitOrTimerCallback(sampleReady), null, -1, false); } _isInitialized = true; }
public CoreAudio(bool microphone = false) { const uint REFTIMES_PER_SEC = 10000000; const uint CLSCTX_INPROC_SERVER = 1; Guid clsid = new Guid("BCDE0395-E52F-467C-8E3D-C4579291692E"); Guid IID_IUnknown = new Guid("00000000-0000-0000-C000-000000000046"); oEnumerator = null; uint hResult = CoCreateInstance(ref clsid, null, CLSCTX_INPROC_SERVER, ref IID_IUnknown, out oEnumerator); if (hResult != 0 || oEnumerator == null) { throw new Exception("CoCreateInstance() pInvoke failed"); } iMde = oEnumerator as IMMDeviceEnumerator; if (iMde == null) { throw new Exception("COM cast failed to IMMDeviceEnumerator"); } IntPtr pDevice = IntPtr.Zero; //iMde.EnumAudioEndpoints(EDataFlow.eCapture, DEVICE_STATE_ACTIVE,ref pDevice); int retVal; if (microphone) { retVal = iMde.GetDefaultAudioEndpoint(EDataFlow.eCapture, ERole.eConsole, ref pDevice); } else { retVal = iMde.GetDefaultAudioEndpoint(EDataFlow.eRender, ERole.eConsole, ref pDevice); } if (retVal != 0) { throw new Exception("IMMDeviceEnumerator.GetDefaultAudioEndpoint()"); } //int dwStateMask = DEVICE_STATE_ACTIVE | DEVICE_STATE_NOTPRESENT | DEVICE_STATE_UNPLUGGED; //IntPtr pCollection = IntPtr.Zero; //retVal = iMde.EnumAudioEndpoints(EDataFlow.eRender, dwStateMask, ref pCollection); //if (retVal != 0) //{ // throw new Exception("IMMDeviceEnumerator.EnumAudioEndpoints()"); //} imd = (IMMDevice)System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(pDevice); if (imd == null) { throw new Exception("COM cast failed to IMMDevice"); } Guid iid = new Guid("5CDF2C82-841E-4546-9722-0CF74078229A"); uint dwClsCtx = (uint)CLSCTX.CLSCTX_ALL; IntPtr pActivationParams = IntPtr.Zero; IntPtr pEndPoint = IntPtr.Zero; retVal = imd.Activate(iid, dwClsCtx, pActivationParams, out pEndPoint); if (retVal != 0) { throw new Exception("IMMDevice.Activate()"); } iAudioEndpoint = (IAudioEndpointVolume)System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(pEndPoint); if (iAudioEndpoint == null) { throw new Exception("COM cast failed to IAudioEndpointVolume"); } iid = new Guid("1CB9AD4C-DBFA-4c32-B178-C2F568A703B2"); dwClsCtx = (uint)CLSCTX.CLSCTX_ALL; pActivationParams = IntPtr.Zero; pEndPoint = IntPtr.Zero; retVal = imd.Activate(iid, dwClsCtx, pActivationParams, out pEndPoint); if (retVal != 0) { throw new Exception("IAudioClient.Activate() " + Convert.ToString(retVal, 2)); } iAudioClient = (IAudioClient)System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(pEndPoint); if (iAudioClient == null) { throw new Exception("COM cast failed to iAudioClient"); } ulong processInterval; ulong minimumInterval; retVal = iAudioClient.GetDevicePeriod(out processInterval, out minimumInterval); if (retVal != 0) { throw new Exception("iAudioClient.GetDevicePeriod()"); } waveformat = new WAVEFORMATEX(); waveformat.wFormatTag = (ushort)WaveFormatEncoding.Pcm; waveformat.nChannels = 2; waveformat.nBlockAlign = 4; waveformat.wBitsPerSample = 16; waveformat.nSamplesPerSec = 44100; waveformat.cbSize = 0; waveformat.nAvgBytesPerSec = 176400; IntPtr reqForm = Marshal.AllocHGlobal(Marshal.SizeOf(waveformat)); Marshal.StructureToPtr(waveformat, reqForm, false); IntPtr propForm = Marshal.AllocHGlobal(Marshal.SizeOf(waveformat)); retVal = iAudioClient.IsFormatSupported(AUDCLNT_SHAREMODE.AUDCLNT_SHAREMODE_SHARED, reqForm, out propForm); if (retVal != 0) { throw new Exception("IAudioClient.IsFormatSupported()"); } if (microphone) { retVal = iAudioClient.Initialize(AUDCLNT_SHAREMODE.AUDCLNT_SHAREMODE_EXCLUSIVE, 0, 2000000, 0, reqForm, Guid.Empty); } else { retVal = iAudioClient.Initialize(AUDCLNT_SHAREMODE.AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_LOOPBACK, 2000000, 0, reqForm, Guid.Empty); } if (retVal != 0) { throw new Exception("IAudioClient.Initialize() " + retVal); } uint buffersize = 0; retVal = iAudioClient.GetBufferSize(out buffersize); if (retVal != 0) { throw new Exception("IAudioClient.GetBufferSize()"); } iid = new Guid("C8ADBD64-E71E-48a0-A4DE-185C395CD317"); IntPtr capclient = IntPtr.Zero; retVal = iAudioClient.GetService(iid, out capclient); if (retVal != 0) { throw new Exception("IAudioClient.GetService()"); } iAudioCaptureClient = (IAudioCaptureClient)System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(capclient); if (iAudioCaptureClient == null) { throw new Exception("COM cast failed to iAudioCaptureClient"); } hnsActualDuration = (double)(REFTIMES_PER_SEC * buffersize / waveformat.nSamplesPerSec); // 8391 smallest possible value recordingthread = new Thread(recordingloop); recordingthread.IsBackground = false; recordingthread.Start(); }
protected void InternalSetup(int dataFlow, int flags, int bufferSize = 0) { lock (mutex) { int hr; object obj; Guid guid; if (endpoint == null) { if (enumerator == null) { enumerator = Activator.CreateInstance(typeof(MMDeviceEnumerator)) as IMMDeviceEnumerator; if (enumerator == null) { throw new NotSupportedException(); } } hr = enumerator.GetDefaultAudioEndpoint((DataFlowEnum)dataFlow, RoleEnum.Multimedia, out endpoint); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } if (endpoint == null) { throw new NotSupportedException(); } } if (audioClient != null) { Marshal.ReleaseComObject(audioClient); audioClient = null; } guid = IID_IAudioClient; hr = endpoint.Activate(ref guid, ClsCtxEnum.ALL, IntPtr.Zero, out obj); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } audioClient = obj as IAudioClient; if (audioClient == null) { throw new NotSupportedException(); } IntPtr mixFormatPtr = IntPtr.Zero; try { hr = audioClient.GetMixFormat(out mixFormatPtr); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } WaveFormat outputFormat = (WaveFormat)Marshal.PtrToStructure(mixFormatPtr, typeof(WaveFormat)); outputIsFloat = false; if (outputFormat.ExtraSize >= WaveFormatEx.WaveFormatExExtraSize) { WaveFormatEx ex = (WaveFormatEx)Marshal.PtrToStructure(mixFormatPtr, typeof(WaveFormatEx)); if (ex.SubFormat == IeeeFloat) { outputIsFloat = true; } } sampleRate = outputFormat.SampleRate; channelCount = outputFormat.Channels; bitsPerSample = outputFormat.BitsPerSample; if (bufferSize <= 0) { bufferSize = 8192; } latencyMs = (int)(bufferSize * 500 / sampleRate); if (latencyMs <= 0) { latencyMs = 1; } hr = audioClient.Initialize(AudioClientShareModeEnum.Shared, (AudioClientStreamFlagsEnum)flags, latencyMs * 40000, 0, mixFormatPtr, Guid.Empty); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } } finally { if (mixFormatPtr != IntPtr.Zero) { Marshal.FreeCoTaskMem(mixFormatPtr); } } hr = audioClient.GetBufferSize(out outputBufferSize); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } this.bufferSize = bufferSize; buffers = new float[channelCount][]; for (int i = 0; i < buffers.Length; i++) { buffers[i] = new float[bufferSize]; } isInited = true; } }
internal static unsafe Factory_WindowsCoreApi.IAudioClient CreateClient(IMMDevice IDevice, out WAVEFORMATEX format, out Type dataFormat) { Factory_WindowsCoreApi.IAudioClient IAudioClient; IDevice.Activate(Factory_WindowsCoreApi.IID_IAudioClient, (uint)PlaybackClient_WindowsCoreApi.CLSCTX.CLSCTX_ALL, new IntPtr(0), out IAudioClient); IntPtr rawFormatPtr = new IntPtr(); IAudioClient.GetMixFormat(out rawFormatPtr); WAVEFORMATEX *pFormat = (WAVEFORMATEX *)rawFormatPtr.ToPointer(); if (pFormat->wBitsPerSample % 8 != 0) { throw new Exception("Unsupported bits per sample value"); } dataFormat = typeof(byte); if (pFormat->wFormatTag == 0xFFFE) { WAVEFORMATEXTENSIBLE *pFormatEx = (WAVEFORMATEXTENSIBLE *)pFormat; if (pFormatEx->SubFormat == FormatEx_IEEE) { switch (pFormat->wBitsPerSample) { case 0: case 32: dataFormat = typeof(float); break; case 64: dataFormat = typeof(double); break; default: throw new Exception("Unsupported underlying data format"); } } else if (pFormatEx->SubFormat == FormatEx_PCM) { switch (pFormat->wBitsPerSample) { case 8: dataFormat = typeof(byte); break; case 16: dataFormat = typeof(Int16); break; case 32: dataFormat = typeof(Int32); break; case 64: dataFormat = typeof(Int64); break; default: throw new Exception("Unsupported underlying data format"); } } } else { switch ((WAVE_FORMAT)pFormat->wFormatTag) { case WAVE_FORMAT.PCM: switch (pFormat->wBitsPerSample) { case 8: dataFormat = typeof(byte); break; case 16: dataFormat = typeof(Int16); break; case 32: dataFormat = typeof(Int32); break; case 64: dataFormat = typeof(Int64); break; default: throw new Exception("Unsupported underlying data format"); } break; case WAVE_FORMAT.IEEE: switch (pFormat->wBitsPerSample) { case 0: case 32: dataFormat = typeof(float); break; case 64: dataFormat = typeof(double); break; default: throw new Exception("Unsupported underlying data format"); } break; } } try { IAudioClient.Initialize(Factory_WindowsCoreApi.AUDCLNT_SHAREMODE.AUDCLNT_SHAREMODE_SHARED, 0, 10000000, 0, new IntPtr(pFormat), Guid.Empty); } catch { throw new Exception("Unexpected error when creating the client"); } format = *pFormat; return(IAudioClient); }
public WaveFormatEx Open() { StopSilenceThread(); Log.Information("Opening DesktopAudio"); IMMDeviceEnumerator deviceEnumerator = null; IntPtr mixFormatPtr = IntPtr.Zero; try { bool render = true; deviceEnumerator = Activator.CreateInstance(typeof(MMDeviceEnumerator)) as IMMDeviceEnumerator; var res = deviceEnumerator.GetDefaultAudioEndpoint( render ? DataFlowEnum.Render : DataFlowEnum.Capture, render ? RoleEnum.Console : RoleEnum.Communications, out _endpoint); if (render) { StartSilenceGeneration(); } Checked(_endpoint.Activate(ref IAudioClientId, ClsCtxEnum.All, IntPtr.Zero, out var obj), "Activate"); _audioClient = (IAudioClient)obj; Checked(_audioClient.GetMixFormat(out mixFormatPtr), "GetMixFormat"); WaveFormatEx outputFormat = (WaveFormatEx)Marshal.PtrToStructure(mixFormatPtr, typeof(WaveFormatEx)); if (!render) // for render it is checked in the StartSilenceGeneration(); { CheckFormat(outputFormat); } _bytesPerFrame = outputFormat.BlockAlign; var flags = AudioClientStreamFlagsEnum.StreamFlagsEventCallback | (render ? AudioClientStreamFlagsEnum.StreamFlagsLoopback : AudioClientStreamFlagsEnum.None); Checked(_audioClient.Initialize(AudioClientShareModeEnum.Shared, flags, 10_000_000 * 5, 0, mixFormatPtr, Guid.Empty), "Initialize"); Checked(_audioClient.GetService(IAudioCaptureClientId, out var captureObj), "GetService"); _audioCaptureClient = (IAudioCaptureClient)captureObj; #pragma warning disable CS0618 // Type or member is obsolete Checked(_audioClient.SetEventHandle(_dataAvailable.Handle), "SetEventHandle"); #pragma warning restore CS0618 // Type or member is obsolete Checked(_audioClient.Start(), "Start"); return(outputFormat); } catch (Exception e) { Core.LogError(e, "Open desktop audio failed"); StopSilenceThread(); ReleaseComFields(); throw; } finally { if (mixFormatPtr != IntPtr.Zero) { Marshal.FreeCoTaskMem(mixFormatPtr); } ReleaseComObject(ref deviceEnumerator); } }