private void FillBuffer(IWaveProvider playbackProvider, int frameCount) { var buffer = renderClient.GetBuffer(frameCount); var readLength = frameCount * bytesPerFrame; int read = playbackProvider.Read(readBuffer, 0, readLength); if (read == 0) { playbackState = PlaybackState.Stopped; } Marshal.Copy(readBuffer, 0, buffer, read); if (this.isUsingEventSync && this.shareMode == AudioClientShareMode.Exclusive) { renderClient.ReleaseBuffer(frameCount, AudioClientBufferFlags.None); } else { int actualFrameCount = read / bytesPerFrame; /*if (actualFrameCount != frameCount) * { * Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount )); * }*/ renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); } }
private void FillBuffer(IWaveProvider playbackProvider, int frameCount) { var buffer = renderClient.GetBuffer(frameCount); var readLength = frameCount * bytesPerFrame; int actualFrameCount = 0; if (playbackState == PlaybackState.Paused) { byte[] rb = new byte[readLength]; Marshal.Copy(rb, 0, buffer, readLength); actualFrameCount = readLength / bytesPerFrame; } else if (playbackState == PlaybackState.Playing) { int read = playbackProvider.Read(readBuffer, 0, readLength); if (read == 0) { playbackState = PlaybackState.Stopped; PlaybackStopped?.Invoke(this, new StoppedEventArgs()); } Marshal.Copy(readBuffer, 0, buffer, read); actualFrameCount = read / bytesPerFrame; /*if (actualFrameCount != frameCount) * { * Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount )); * }*/ } renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); }
public void CanPopulateABuffer() { AudioClient audioClient = InitializeClient(AudioClientShareMode.Shared); AudioRenderClient renderClient = audioClient.AudioRenderClient; int bufferFrameCount = audioClient.BufferSize; IntPtr buffer = renderClient.GetBuffer(bufferFrameCount); // TODO put some stuff in // will tell it it has a silent buffer renderClient.ReleaseBuffer(bufferFrameCount, AudioClientBufferFlags.Silent); }
private bool FeedBuffer(AudioRenderClient renderClient, byte[] buffer, int numFramesCount, int frameSize) { //calculate the number of bytes to "feed" int count = numFramesCount * frameSize; count -= (count % _source.WaveFormat.BlockAlign); //if the driver did not request enough data, return true to continue playback if (count <= 0) { return(true); } //get the requested data int read = _source.Read(buffer, 0, count); //if the source did not provide enough data, we abort the playback by returning false if (read <= 0) { return(false); } //calculate the number of FRAMES to request int actualNumFramesCount = read / frameSize; //again there are some special requirements for exclusive mode AND eventsync if (_shareMode == AudioClientShareMode.Exclusive && _eventSync && read < count) { /* The caller can request a packet size that is less than or equal to the amount * of available space in the buffer (except in the case of an exclusive-mode stream * that uses event-driven buffering; for more information, see IAudioClient::Initialize). * see https://msdn.microsoft.com/en-us/library/windows/desktop/dd368243%28v=vs.85%29.aspx - remarks*/ //since we have to provide exactly the requested number of frames, we clear the rest of the array Array.Clear(buffer, read, count - read); //set the number of frames to request memory for, to the number of requested frames actualNumFramesCount = numFramesCount; } IntPtr ptr = renderClient.GetBuffer(actualNumFramesCount); //we may should introduce a try-finally statement here, but the Marshal.Copy method should not //throw any relevant exceptions ... so we should be able to always release the packet Marshal.Copy(buffer, 0, ptr, read); renderClient.ReleaseBuffer(actualNumFramesCount, AudioClientBufferFlags.None); return(true); }
private void FillBuffer(IWaveProvider playbackProvider, int frameCount) { IntPtr buffer = renderClient.GetBuffer(frameCount); int readLength = frameCount * bytesPerFrame; int read = playbackProvider.Read(readBuffer, 0, readLength); if (read == 0) { playbackState = WasapiOutState.Stopping; } Marshal.Copy(readBuffer, 0, buffer, read); int actualFrameCount = read / bytesPerFrame; /*if (actualFrameCount != frameCount) * { * Debug.WriteLine(String.Format("WASAPI wanted {0} frames, supplied {1}", frameCount, actualFrameCount )); * }*/ renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); }
private bool FeedBuffer(AudioRenderClient renderClient, byte[] buffer, int numFramesCount, int frameSize) { int count = numFramesCount * frameSize; count -= (count % _source.WaveFormat.BlockAlign); if (count <= 0) { return(true); } int read = _source.Read(buffer, 0, count); var ptr = renderClient.GetBuffer(numFramesCount); Marshal.Copy(buffer, 0, ptr, read); renderClient.ReleaseBuffer((int)(read / frameSize), AudioClientBufferFlags.None); return(read > 0); }
private void FillBuffer(IWaveProvider playbackProvider, int frameCount, int requestedChannelCount, int providedChannelCount, int providedBitsPerSample) { IntPtr buffer = renderClient.GetBuffer(frameCount); //bytesPerFrame = outputFormat.Channels*outputFormat.BitsPerSample/8; int desiredReadLength = frameCount * bytesPerFrame; int providableReadLength = frameCount * providedChannelCount * providedBitsPerSample / 8; //readBuffer = new byte[providableReadLength]; int read = playbackProvider.Read(readBuffer, 0, providableReadLength); int actualFrameCount; if (read == 0) { playbackState = WasapiOutState.Stopping; actualFrameCount = 0; } else if (requestedChannelCount != providedChannelCount) { if (requestedChannelCount < providedChannelCount) //There are more channels provided than requested { //byte[] newReadBuffer = new byte[providableReadLength]; int currentWrittenChannels = 0; int unusedChannelsOffset = 0; int i; for (i = 0; i < read; i++) { if (currentWrittenChannels >= requestedChannelCount) { unusedChannelsOffset += providedChannelCount - currentWrittenChannels; currentWrittenChannels = 0; } newReadBuffer[i] = readBuffer[i + unusedChannelsOffset]; } Marshal.Copy(newReadBuffer, 0, buffer, i); actualFrameCount = i / bytesPerFrame; } else // if (providedChannelCount < requestedChannelCount) (There are less channels provided than are requested, space out the data accordingly in the buffer) { //byte[] newReadBuffer = new byte[desiredReadLength]; int unusedChannelsOffset = 0; int currentWrittenChannels = 0; int i; for (i = 0; i < read; i++) { if (currentWrittenChannels >= providedChannelCount) { unusedChannelsOffset += requestedChannelCount - providedChannelCount; currentWrittenChannels = 0; } newReadBuffer[i + unusedChannelsOffset] = readBuffer[i]; currentWrittenChannels++; } Marshal.Copy(newReadBuffer, 0, buffer, i + unusedChannelsOffset + requestedChannelCount - providedChannelCount); //We have provided actualFrameCount = (i + unusedChannelsOffset + requestedChannelCount - providedChannelCount) / bytesPerFrame; } } else { Marshal.Copy(readBuffer, 0, buffer, read); actualFrameCount = read / bytesPerFrame; } renderClient.ReleaseBuffer(actualFrameCount, AudioClientBufferFlags.None); Array.Clear(readBuffer, 0, read); Array.Clear(newReadBuffer, 0, newReadBuffer.Length); }
private void PlayThread() { try { AudioBuffer buff = GetBuffer(false); if (buff == null) { RaisePlaybackStopped(); return; } audioClient.Reset(); // fill a whole buffer IntPtr buffer = renderClient.GetBuffer(buff.Length); Marshal.Copy(buff.Bytes, 0, buffer, buff.ByteLength); renderClient.ReleaseBuffer(buff.Length, AudioClientBufferFlags.None); ReleaseBuffer(buff, false, 0); // Create WaitHandle for sync if (frameEventWaitHandle != null) { frameEventWaitHandle.Reset(); } audioClient.Start(); if (isUsingEventSync && shareMode == AudioClientShareMode.Exclusive) { while (playbackState != PlaybackState.Stopped) { int indexHandle = WaitHandle.WaitAny(waitHandles, 10 * latencyMilliseconds, false); if (playbackState == PlaybackState.Playing && indexHandle != WaitHandle.WaitTimeout) { // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize buff = GetBuffer(false); if (buff == null) { break; } buffer = renderClient.GetBuffer(buff.Length); Marshal.Copy(buff.Bytes, 0, buffer, buff.ByteLength); renderClient.ReleaseBuffer(buff.Length, AudioClientBufferFlags.None); ReleaseBuffer(buff, false, 0); } } } else { buff = null; int offs = 0; while (playbackState != PlaybackState.Stopped) { // If using Event Sync, Wait for notification from AudioClient or Sleep half latency int indexHandle = 0; if (isUsingEventSync) { indexHandle = WaitHandle.WaitAny(waitHandles, 3 * latencyMilliseconds, false); } else { Thread.Sleep(latencyMilliseconds / 2); } // If still playing and notification is ok if (playbackState == PlaybackState.Playing && indexHandle != WaitHandle.WaitTimeout) { // See how much buffer space is available. int numFramesAvailable = bufferFrameCount - audioClient.CurrentPadding; if (numFramesAvailable > 0) { if (buff == null) { buff = GetBuffer(false); offs = 0; } if (buff == null) { break; } numFramesAvailable = Math.Min(numFramesAvailable, buff.Length - offs); buffer = renderClient.GetBuffer(numFramesAvailable); Marshal.Copy(buff.Bytes, offs * Settings.PCM.BlockAlign, buffer, numFramesAvailable * Settings.PCM.BlockAlign); renderClient.ReleaseBuffer(numFramesAvailable, AudioClientBufferFlags.None); offs += numFramesAvailable; if (offs == buff.Length) { ReleaseBuffer(buff, false, 0); buff = null; } } } } } //Thread.Sleep(isUsingEventSync ? latencyMilliseconds : latencyMilliseconds / 2); audioClient.Stop(); if (playbackState == PlaybackState.Stopped) { audioClient.Reset(); } } catch (Exception ex) { playbackState = PlaybackState.Stopped; ReleaseBuffer(readBuffers[0], false, 0); ReleaseBuffer(readBuffers[1], false, 0); playThread = null; try { audioClient.Stop(); } catch { } RaisePlaybackException(ex); return; } ReleaseBuffer(readBuffers[0], false, 0); ReleaseBuffer(readBuffers[1], false, 0); RaisePlaybackStopped(); }