public void Setup(Func <IAudioOutDriver, float[][], int> renderCallback, int bufferSize = 0) { if (renderCallback == null) { throw new ArgumentNullException("renderCallback"); } Stop(); try { Cleanup(); InternalSetup((int)DataFlowEnum.Render, (int)AudioClientStreamFlagsEnum.None, bufferSize); if ((outputIsFloat && bitsPerSample != 32 && bitsPerSample != 64) || (!outputIsFloat && bitsPerSample != 16 && bitsPerSample != 32)) { throw new NotSupportedException(); } audioRenderClient = GetService <IAudioRenderClient>(IID_IAudioRenderClient); this.renderCallback = renderCallback; } catch { Dispose(); throw; } }
protected override void Cleanup() { if (audioRenderClient != null) { Marshal.ReleaseComObject(audioRenderClient); audioRenderClient = null; } }
/// <summary> /// Disposes the <see cref="WasapiRenderClient"/> object. /// </summary> public void Dispose() { if (this.renderThread != null) { this.shutdownEvent.Set(); this.renderThread.Join(); this.renderThread = null; } if (this.shutdownEvent != null) { this.shutdownEvent.Close(); this.shutdownEvent = null; } if (this.audioClient != null) { Marshal.ReleaseComObject(this.audioClient); this.audioClient = null; } if (this.renderClient != null) { Marshal.ReleaseComObject(this.renderClient); this.renderClient = null; } if (this.resampler != null) { Marshal.ReleaseComObject(this.resampler); this.resampler = null; } if (this.inputBuffer != null) { Marshal.ReleaseComObject(this.inputBuffer); this.inputBuffer = null; } if (this.inputSample != null) { Marshal.ReleaseComObject(this.inputSample); this.inputSample = null; } if (this.outputBuffer != null) { Marshal.ReleaseComObject(this.outputBuffer); this.outputBuffer = null; } if (this.outputSample != null) { Marshal.ReleaseComObject(this.outputSample); this.outputSample = null; } }
public void Dispose() { if (this.audioRenderClientInterface != null) { Marshal.ReleaseComObject(this.audioRenderClientInterface); this.audioRenderClientInterface = null; GC.SuppressFinalize(this); } }
private void InitializeAudioDevice() { // Get default audio device MMDeviceEnumerator deviceEnumeratorClass = new MMDeviceEnumerator(); IMMDeviceEnumerator deviceEnumerator = (IMMDeviceEnumerator)deviceEnumeratorClass; IMMDevice defaultDevice; deviceEnumerator.GetDefaultAudioEndpoint(EDataFlow.eRender, ERole.eMultimedia, out defaultDevice); // Log device name IPropertyStore store; defaultDevice.OpenPropertyStore(STGM.STGM_READ, out store); PROPVARIANT pv = new PROPVARIANT(); PropertyKey pkey = PKEY.PKEY_DeviceInterface_FriendlyName; store.GetValue(ref pkey, out pv); Debug.WriteLine("Using audio device '{0}'", pv.Value); // Retrieve IAudioClient Guid iid = new Guid(Constants.IID_IAudioClient); IntPtr propVar = IntPtr.Zero; object obj; defaultDevice.Activate(ref iid, CLSCTX.CLSCTX_ALL, ref propVar, out obj); this.audioClient = (IAudioClient)obj; // Get default format IntPtr defaultFormat; this.audioClient.GetMixFormat(out defaultFormat); this.UpdateWaveFormatInfo(defaultFormat); // Initialize IAudioClient Guid g = Guid.Empty; this.audioClient.Initialize(AUDCLNT_SHAREMODE.AUDCLNT_SHAREMODE_SHARED, (uint)AUDCLNT_STREAMFLAGS.AUDCLNT_STREAMFLAGS_EVENTCALLBACK, 0, 0, defaultFormat, ref g); this.audioClient.GetBufferSize(out this.wasapiBufferSize); this.audioClient.SetEventHandle(this.bufferReadyEvent.SafeWaitHandle.DangerousGetHandle()); // Retrieve IAudioRenderClient iid = new Guid(Constants.IID_IAudioRenderClient); object ppv; this.audioClient.GetService(ref iid, out ppv); this.audioRenderClient = (IAudioRenderClient)ppv; // Start processing samples this.audioClient.Start(); }
/// <summary> /// Release the COM object /// </summary> public void Dispose() { if (audioRenderClientInterface != null) { // althugh GC would do this for us, we want it done now // to let us reopen WASAPI Marshal.ReleaseComObject(audioRenderClientInterface); audioRenderClientInterface = null; GC.SuppressFinalize(this); } }
/// <summary> /// Initialize WASAPI in timer driven mode, and retrieve a render client for the transport. /// </summary> private void InitializeAudioEngine() { IntPtr mixFormatPtr = WaveFormat.MarshalToPtr(this.mixFormat); this.audioClient.Initialize(AudioClientShareMode.Shared, AudioClientStreamFlags.NoPersist, this.engineLatencyInMs * 10000, 0, mixFormatPtr, Guid.Empty); Marshal.FreeHGlobal(mixFormatPtr); this.bufferFrameCount = this.audioClient.GetBufferSize(); object obj = this.audioClient.GetService(new Guid(Guids.IAudioRenderClientIIDString)); this.renderClient = (IAudioRenderClient)obj; }
internal PlaybackClient_WindowsCoreApi(Factory_WindowsCoreApi.IAudioClient IAudioClient, System.IO.Stream Stream, int NumChannel, int FrameSize, uint SamplesRate, Type DataFormat) { object opaqueService; _IAudioClient = IAudioClient; _IAudioClient.GetService(ref IID_IAudioRenderClient, out opaqueService); _IAudioRenderClient = (IAudioRenderClient)opaqueService; _IAudioClient.GetBufferSize(out _BufferFrameCount); _Stream = Stream; _FrameSize = FrameSize; _BufferDuration = _BufferFrameCount / (SamplesRate / 1000); _FullBuffer = new byte[_FrameSize * _BufferFrameCount]; _Thread = new System.Threading.Thread(Loop); _ChannelCount = NumChannel; _SampleRate = SamplesRate; _Format = DataFormat; }
private void StartSilenceGeneration() { IntPtr mixFormatPtr = IntPtr.Zero; try { Checked(_endpoint.Activate(ref IAudioClientId, ClsCtxEnum.All, IntPtr.Zero, out var obj), "Silence.Activate"); _audioClientForRendering = (IAudioClient)obj; Checked(_audioClientForRendering.GetMixFormat(out mixFormatPtr), "Silence.GetMixFormat"); WaveFormatEx format = (WaveFormatEx)Marshal.PtrToStructure(mixFormatPtr, typeof(WaveFormatEx)); CheckFormat(format); Checked(_audioClientForRendering.Initialize(AudioClientShareModeEnum.Shared, AudioClientStreamFlagsEnum.None, 10_000_000 * 5, 0, mixFormatPtr, Guid.Empty), "Silence.Initialize"); Checked(_audioClientForRendering.GetBufferSize(out var bufferSize), "Silence.GetBufferSize"); Checked(_audioClientForRendering.GetService(IAudioRenderClientId, out var renderObj), "Silence.GetService"); _audioRenderClient = (IAudioRenderClient)renderObj; Checked(_audioClientForRendering.Start(), "Silence.Start"); _silenceThread = new Thread(() => SilenceGenerationRoutine(bufferSize, format)); _silenceThread.Name = "Silence generator"; _silenceThread.Start(); } catch (Exception e) { ReleaseComObject(ref _audioClientForRendering); ReleaseComObject(ref _audioRenderClient); Core.LogError(e, "Faied to StartSilenceGeneration"); } finally { if (mixFormatPtr != IntPtr.Zero) { Marshal.FreeCoTaskMem(mixFormatPtr); } } }
public void Dispose() { ChatEndpoint = null; RenderClient = null; CaptureClient = null; AudioClient = null; if (ChatThread is not null) { ChatThread.Dispose(); ChatThread = default; } if (ShutdownEvent is not null) { ShutdownEvent.Dispose(); ShutdownEvent = null; } if (AudioSamplesReadyEvent is not null) { AudioSamplesReadyEvent.Dispose(); AudioSamplesReadyEvent = null; } }
/// <summary> /// Fill a render client buffer. /// </summary> /// <param name="client">The client to fill.</param> /// <param name="bufferFrameCount">The number of samples to fill with.</param> /// <returns>Whether the buffer has been read to the end.</returns> private unsafe bool FillBuffer(IAudioRenderClient client, int bufferFrameCount) { if (bufferFrameCount == 0) { return(false); } int error = client.GetBuffer(bufferFrameCount, out IntPtr bufferPtr); if (error != 0) { Engine.Log.Warning($"Couldn't get device buffer, error {error}.", MessageSource.WasApi); } var buffer = new Span <byte>((void *)bufferPtr, bufferFrameCount * _layerContext.AudioClientFormat.FrameSize); int frames = GetDataForCurrentTrack(_layerContext.AudioClientFormat, bufferFrameCount, buffer); error = client.ReleaseBuffer(frames, frames == 0 ? AudioClientBufferFlags.Silent : AudioClientBufferFlags.None); if (error != 0) { Engine.Log.Warning($"Couldn't release device buffer, error {error}.", MessageSource.WasApi); } return(frames == 0); }
/// <summary> /// Fill a render client buffer. /// </summary> /// <param name="client">The client to fill.</param> /// <param name="getFrames">The number of samples to fill with.</param> /// <returns>Whether the buffer has been read to the end.</returns> private unsafe bool FillBuffer(IAudioRenderClient client, int getFrames) { if (getFrames == 0) { return(false); } int error = client.GetBuffer(getFrames, out IntPtr bufferPtr); if (error != 0) { Engine.Log.Warning($"Couldn't get device buffer, error {error}.", MessageSource.WasApi); } var buffer = new Span <byte>((void *)bufferPtr, getFrames * _layerContext.AudioClientFormat.FrameSize); int framesGotten = BackendGetData(_layerContext.AudioClientFormat, getFrames, buffer); error = client.ReleaseBuffer(framesGotten, framesGotten == 0 ? AudioClientBufferFlags.Silent : AudioClientBufferFlags.None); if (error != 0) { Engine.Log.Warning($"Couldn't release device buffer, error {error}.", MessageSource.WasApi); } return(framesGotten == 0); // This should only be true if the buffer was exactly exhausted. }
public bool Open(REFERENCE_TIME bufferDuration, int nChannels, int nSamplesPerSec, FormatType format) { try { if (IsOpen) { throw new Exception("AudioRenderClient is already open!"); } var iid = typeof(IAudioClient).GUID; const uint CLSCTX_ALL = 0x17; Wasapi.AudioEndpoint.Activate(ref iid, CLSCTX_ALL, (IntPtr)0, out IntPtr pAudioClient); audioClient = (IAudioClient)Marshal.GetObjectForIUnknown(pAudioClient); audioClient.GetDevicePeriod(out REFERENCE_TIME defaultPeriod, out REFERENCE_TIME minimumPeriod); int formatTag = 1; int smpBits = 8; switch (format) { case FormatType.UInt8: smpBits = 8; break; case FormatType.Int16: smpBits = 16; break; case FormatType.Float32: formatTag = 3; smpBits = 32; break; } var wfmt = new WAVEFORMATEX { wFormatTag = (short)formatTag, nChannels = (short)nChannels, nSamplesPerSec = nSamplesPerSec, nBlockAlign = (short)((nChannels * smpBits + 7) / 8), wBitsPerSample = (short)smpBits }; wfmt.nAvgBytesPerSec = wfmt.nSamplesPerSec * wfmt.nBlockAlign; DWORD streamFlags = 0x00040000; audioClient.Initialize(AUDCLNT_SHAREMODE.SHARED, streamFlags, bufferDuration, 0, ref wfmt, (IntPtr)0); WaitEvent = new EventWaitHandle(false, EventResetMode.AutoReset); audioClient.SetEventHandle(WaitEvent.Handle); audioClient.GetBufferSize(out numBufferFrames); iid = typeof(IAudioRenderClient).GUID; audioClient.GetService(iid, out IntPtr pAudioRenderClient); audioRenderClient = (IAudioRenderClient)Marshal.GetObjectForIUnknown(pAudioRenderClient); IsOpen = true; blockAlign = wfmt.nBlockAlign; SampleRate = wfmt.nSamplesPerSec; return(true); } catch (Exception e) { ErrorMessage = e.Message; return(false); } }
internal AudioRenderClient(IAudioRenderClient audioRenderClientInterface) { this.audioRenderClientInterface = audioRenderClientInterface; }
internal AudioRenderClient(IAudioRenderClient realClient) { _RealClient = realClient; }
internal AudioOutputClient(IAudioClient client, WaveFormat format) : base(client, format) { _render = this.GetService<IAudioRenderClient>(); }
internal AudioRenderClient(IAudioRenderClient audioRenderClientInterface) { this.EventWriterDLL.Initialize("AudioRenderClient"); this.audioRenderClientInterface = audioRenderClientInterface; }
internal AudioOutputClient(IAudioClient client, WaveFormat format) : base(client, format) { _render = this.GetService <IAudioRenderClient>(); }