/// <summary> /// Stops rendering audio data. /// </summary> public void StopRendering() { if (this.wasapiRenderClient != null) { this.wasapiRenderClient.Dispose(); this.wasapiRenderClient = null; } if (this.audioBufferStream != null) { this.audioBufferStream.Dispose(); this.audioBufferStream = null; } }
/// <summary> /// Starts rendering audio data. /// </summary> /// <param name="maxBufferSeconds"> /// The maximum duration of audio that can be buffered for playback. /// </param> /// <param name="targetLatencyInMs"> /// The target maximum number of milliseconds of acceptable lag between /// playback of samples and live sound being produced. /// </param> /// <param name="gain"> /// The gain to be applied prior to rendering the audio. /// </param> /// <param name="inFormat"> /// The input audio format. /// </param> public void StartRendering(double maxBufferSeconds, int targetLatencyInMs, float gain, WaveFormat inFormat) { if (this.wasapiRenderClient != null) { this.StopRendering(); } // Create an audio buffer to buffer audio awaiting playback. this.audioBufferStream = new CircularBufferStream((long)Math.Ceiling(maxBufferSeconds * inFormat.AvgBytesPerSec), false); this.wasapiRenderClient = new WasapiRenderClient(this.audioDevice); // Create a callback delegate and marshal it to a function pointer. Keep a // reference to the delegate as a class field to prevent it from being GC'd. this.callbackDelegate = new AudioDataRequestedCallback(this.AudioDataRequestedCallback); // initialize the renderer with the desired parameters this.wasapiRenderClient.Initialize(targetLatencyInMs, gain, inFormat, this.callbackDelegate); // tell WASAPI to start rendering this.wasapiRenderClient.Start(); }