void Loop() { int padding = 0; _IAudioClient.GetCurrentPadding(out padding); int waitTime = (int)((ulong)padding / (_SampleRate / 1000) / 2); while (true) { IntPtr pBuffer; System.Threading.Thread.Sleep((int)(waitTime)); lock (this) { _IAudioClient.GetCurrentPadding(out padding); int count = _Stream.Read(_FullBuffer, 0, (((int)_BufferFrameCount - padding) * _FrameSize)); if (count > ((int)_BufferFrameCount - padding) * _FrameSize) { throw new Exception("More data provided by than asked for"); } if (count > 0) { _IAudioRenderClient.GetBuffer(count / _FrameSize, out pBuffer); System.Runtime.InteropServices.Marshal.Copy(_FullBuffer, 0, pBuffer, count); _IAudioRenderClient.ReleaseBuffer(count / _FrameSize, 0); waitTime = (int)((ulong)(padding + count / _FrameSize) / (_SampleRate / 1000)) / 2; } if (waitTime == 0 && count == 0) { waitTime = 20; } } } }
private void SilenceGenerationRoutine(int bufferSize, WaveFormatEx format) { try { var buffer = new float[bufferSize * format.Channels]; var bufferDurationMs = bufferSize * 1000 / format.SampleRate; while (_continueSilenceThread) { _audioClientForRendering.GetCurrentPadding(out var padding); int numFramesAvailable = bufferSize - padding; _audioRenderClient.GetBuffer(numFramesAvailable, out var nativeBuffer); if (nativeBuffer != IntPtr.Zero) { Marshal.Copy(buffer, 0, nativeBuffer, numFramesAvailable * format.Channels); _audioRenderClient.ReleaseBuffer(numFramesAvailable, AudioClientBufferFlags.None); } _stopReading.WaitOne(bufferDurationMs / 2); } } catch (Exception e) { Log.Warning(e, "Error in Silence generator. Exiting"); } }
/// <summary> /// Gets a pointer to the buffer /// </summary> /// <param name="numFramesRequested">Number of frames requested</param> /// <returns>Pointer to the buffer</returns> public IntPtr GetBuffer(int numFramesRequested) { IntPtr bufferPointer; Marshal.ThrowExceptionForHR(audioRenderClientInterface.GetBuffer(numFramesRequested, out bufferPointer)); return(bufferPointer); }
private int RenderI16() { int i, c, sample, samplesToRender, totalSamples, writeIndex; int sampleStart = samplesRead; int sampleEnd = samplesWritten; samplesToRender = sampleStart + (int)outputBufferSize - GetCurrentPadding(); if (sampleEnd > samplesToRender) { sampleEnd = samplesToRender; } samplesToRender = sampleEnd - sampleStart; if (samplesToRender <= 0) { return(0); } totalSamples = samplesToRender * channelCount; if (i16Buf == null || i16Buf.Length < totalSamples) { i16Buf = new short[totalSamples]; } for (c = 0; c < channelCount; c++) { float[] chanBuffer = buffers[c]; for (sample = sampleStart, writeIndex = c; sample < sampleEnd; sample++, writeIndex += channelCount) { i = (int)(chanBuffer[sample] * 32767); if (i < -32768) { i = -32768; } else if (i > 32767) { i = 32767; } i16Buf[writeIndex] = (short)i; } } samplesRead += samplesToRender; IntPtr ptr; audioRenderClient.GetBuffer(samplesToRender, out ptr); Marshal.Copy(i16Buf, 0, ptr, totalSamples); audioRenderClient.ReleaseBuffer(samplesToRender, AudioClientBufferFlags.None); return(samplesToRender); }
/// <summary> /// Gets a pointer to the buffer /// </summary> /// <param name="numFramesRequested">Number of frames requested</param> /// <returns>Pointer to the buffer</returns> public IntPtr GetBuffer(int numFramesRequested) { IntPtr bufferPointer; int hresult = audioRenderClientInterface.GetBuffer(numFramesRequested, out bufferPointer); if (hresult != 0) { this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "Error Code in AudioRenderClient::GetBuffer: " + hresult); } return(bufferPointer); }
public byte[] GetBuffer(uint numFramesRequested) { IntPtr p; int hr = _RealClient.GetBuffer(numFramesRequested, out p); Marshal.ThrowExceptionForHR(hr); byte[] data = new byte[numFramesRequested]; Marshal.Copy(p, data, 0, (int)numFramesRequested); return(data); }
public void Write(byte[] data, int offset, int num) { if (!IsOpen) { throw new Exception("AudioRenderClient is not open!"); } if (num > 0) { int numFrames = num / blockAlign; audioRenderClient.GetBuffer((DWORD)numFrames, out IntPtr pData); Marshal.Copy(data, offset, pData, num); audioRenderClient.ReleaseBuffer((DWORD)numFrames, 0); } }
private bool ReadBuffer(Stream stream, int count) { int total = 0, remainder = 0, bytes; do { bytes = stream.Read(this.Buffer, total, remainder > 0 ? remainder : count * this.FrameSize); total += bytes; remainder = remainder > 0 ? (remainder -= bytes) : total % this.FrameSize; }while (remainder != 0 && bytes > 0); if (total == 0) { return(false); } IntPtr p; _render.GetBuffer(count, out p); Marshal.Copy(this.Buffer, 0, p, bytes); _render.ReleaseBuffer(bytes / this.FrameSize, AudioClientBufferFlags.None); return(true); }
/// <summary> /// Fill a render client buffer. /// </summary> /// <param name="client">The client to fill.</param> /// <param name="getFrames">The number of samples to fill with.</param> /// <returns>Whether the buffer has been read to the end.</returns> private unsafe bool FillBuffer(IAudioRenderClient client, int getFrames) { if (getFrames == 0) { return(false); } int error = client.GetBuffer(getFrames, out IntPtr bufferPtr); if (error != 0) { Engine.Log.Warning($"Couldn't get device buffer, error {error}.", MessageSource.WasApi); } var buffer = new Span <byte>((void *)bufferPtr, getFrames * _layerContext.AudioClientFormat.FrameSize); int framesGotten = BackendGetData(_layerContext.AudioClientFormat, getFrames, buffer); error = client.ReleaseBuffer(framesGotten, framesGotten == 0 ? AudioClientBufferFlags.Silent : AudioClientBufferFlags.None); if (error != 0) { Engine.Log.Warning($"Couldn't release device buffer, error {error}.", MessageSource.WasApi); } return(framesGotten == 0); // This should only be true if the buffer was exactly exhausted. }
/// <summary> /// Fill a render client buffer. /// </summary> /// <param name="client">The client to fill.</param> /// <param name="bufferFrameCount">The number of samples to fill with.</param> /// <returns>Whether the buffer has been read to the end.</returns> private unsafe bool FillBuffer(IAudioRenderClient client, int bufferFrameCount) { if (bufferFrameCount == 0) { return(false); } int error = client.GetBuffer(bufferFrameCount, out IntPtr bufferPtr); if (error != 0) { Engine.Log.Warning($"Couldn't get device buffer, error {error}.", MessageSource.WasApi); } var buffer = new Span <byte>((void *)bufferPtr, bufferFrameCount * _layerContext.AudioClientFormat.FrameSize); int frames = GetDataForCurrentTrack(_layerContext.AudioClientFormat, bufferFrameCount, buffer); error = client.ReleaseBuffer(frames, frames == 0 ? AudioClientBufferFlags.Silent : AudioClientBufferFlags.None); if (error != 0) { Engine.Log.Warning($"Couldn't release device buffer, error {error}.", MessageSource.WasApi); } return(frames == 0); }
/// <summary> /// Gets a pointer to the buffer /// </summary> /// <param name="numFramesRequested">Number of frames requested</param> /// <returns>Pointer to the buffer</returns> public IntPtr GetBuffer(int numFramesRequested) { return(audioRenderClientInterface.GetBuffer(numFramesRequested)); }