public CachedSound(string audioFileName) { using (var audioFileReader = new AudioFileReader(audioFileName)) { WaveFormat = audioFileReader.WaveFormat; if (WaveFormat.SampleRate != 44100 || WaveFormat.Channels != 2) { using (var resampled = new ResamplerDmoStream(audioFileReader, WaveFormat.CreateIeeeFloatWaveFormat(44100, 2))) { var resampledSampleProvider = resampled.ToSampleProvider(); WaveFormat = resampledSampleProvider.WaveFormat; var wholeFile = new List<float>((int) (resampled.Length)); var readBuffer = new float[resampled.WaveFormat.SampleRate * resampled.WaveFormat.Channels]; int samplesRead; while ((samplesRead = resampledSampleProvider.Read(readBuffer, 0, readBuffer.Length)) > 0) { wholeFile.AddRange(readBuffer.Take(samplesRead)); } AudioData = wholeFile.ToArray(); } } else { var wholeFile = new List<float>((int) (audioFileReader.Length / 4)); var readBuffer = new float[audioFileReader.WaveFormat.SampleRate * audioFileReader.WaveFormat.Channels]; int samplesRead; while ((samplesRead = audioFileReader.Read(readBuffer, 0, readBuffer.Length)) > 0) { wholeFile.AddRange(readBuffer.Take(samplesRead)); } AudioData = wholeFile.ToArray(); } } }
public void CanCreateResamplerStream() { //using (WaveFileReader reader = new WaveFileReader("C:\\Users\\Mark\\Recording\\REAPER\\ideas-2008-05-17.wav")) using (WaveStream reader = new NullWaveStream(new WaveFormat(44100,16,1),1000 )) { using (ResamplerDmoStream resampler = new ResamplerDmoStream(reader, WaveFormat.CreateIeeeFloatWaveFormat(48000,2))) { Assert.Greater(resampler.Length, reader.Length, "Length"); Assert.AreEqual(0, reader.Position, "Position"); Assert.AreEqual(0, resampler.Position, "Position"); } } }
public void CanReadABlockFromResamplerStream() { //using (WaveFileReader reader = new WaveFileReader("C:\\Users\\Mark\\Recording\\REAPER\\ideas-2008-05-17.wav")) WaveFormat inputFormat = new WaveFormat(44100, 16, 1); using (WaveStream reader = new NullWaveStream(inputFormat, inputFormat.AverageBytesPerSecond * 20)) { using (ResamplerDmoStream resampler = new ResamplerDmoStream(reader, WaveFormat.CreateIeeeFloatWaveFormat(48000, 2))) { // try to read 10 ms; int bytesToRead = resampler.WaveFormat.AverageBytesPerSecond / 100; byte[] buffer = new byte[bytesToRead]; int count = resampler.Read(buffer, 0, bytesToRead); Assert.That(count > 0, "Bytes Read"); } } }
private void ResampleAWholeStream(WaveFormat inputFormat, WaveFormat outputFormat) { using (WaveStream reader = new NullWaveStream(inputFormat, inputFormat.AverageBytesPerSecond * 20)) { using (ResamplerDmoStream resampler = new ResamplerDmoStream(reader, outputFormat)) { // try to read 10 ms; int bytesToRead = resampler.WaveFormat.AverageBytesPerSecond / 100; byte[] buffer = new byte[bytesToRead]; int count; int total = 0; do { count = resampler.Read(buffer, 0, bytesToRead); total += count; //Assert.AreEqual(count, bytesToRead, "Bytes Read"); } while (count > 0); //Debug.WriteLine(String.Format("Converted input length {0} to {1}", reader.Length, total)); } } }
private void PlayThread() { ResamplerDmoStream resamplerDmoStream = null; IWaveProvider playbackProvider = this.sourceProvider; Exception exception = null; try { if (this.dmoResamplerNeeded) { resamplerDmoStream = new ResamplerDmoStream(sourceProvider, outputFormat); playbackProvider = resamplerDmoStream; } // fill a whole buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8; readBuffer = new byte[bufferFrameCount * bytesPerFrame]; FillBuffer(playbackProvider, bufferFrameCount); // Create WaitHandle for sync WaitHandle[] waitHandles = new WaitHandle[] { frameEventWaitHandle }; audioClient.Start(); while (playbackState != PlaybackState.Stopped) { // If using Event Sync, Wait for notification from AudioClient or Sleep half latency int indexHandle = 0; if (isUsingEventSync) { indexHandle = WaitHandle.WaitAny(waitHandles, 3 * latencyMilliseconds, false); } else { Thread.Sleep(latencyMilliseconds / 2); } // If still playing and notification is ok if (playbackState == PlaybackState.Playing && indexHandle != WaitHandle.WaitTimeout) { // See how much buffer space is available. int numFramesPadding = 0; if (isUsingEventSync) { // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; } else { numFramesPadding = audioClient.CurrentPadding; } int numFramesAvailable = bufferFrameCount - numFramesPadding; if (numFramesAvailable > 0) { FillBuffer(playbackProvider, numFramesAvailable); } } } Thread.Sleep(latencyMilliseconds / 2); audioClient.Stop(); if (playbackState == PlaybackState.Stopped) { audioClient.Reset(); } } catch (Exception e) { exception = e; } finally { if (resamplerDmoStream != null) { resamplerDmoStream.Dispose(); } RaisePlaybackStopped(exception); } }
/// <summary> /// Dispose /// </summary> public void Dispose() { if (audioClient != null) { Stop(); audioClient.Dispose(); audioClient = null; renderClient = null; } if (resamplerDmoStream != null) { resamplerDmoStream.Dispose(); resamplerDmoStream = null; } }
/// <summary> /// Initialize for playing the specified wave stream /// </summary> /// <param name="waveProvider">IWaveProvider to play</param> public void Init(IWaveProvider waveProvider) { long latencyRefTimes = latencyMilliseconds * 10000; outputFormat = waveProvider.WaveFormat; // first attempt uses the WaveFormat from the WaveStream WaveFormatExtensible closestSampleRateFormat; if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat)) { // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat) // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx // They say : "In shared mode, the audio engine always supports the mix format" // The MixFormat is more likely to be a WaveFormatExtensible. if (closestSampleRateFormat == null) { WaveFormat correctSampleRateFormat = audioClient.MixFormat; if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat)) { // Iterate from Worst to Best Format WaveFormatExtensible[] bestToWorstFormats = { new WaveFormatExtensible( outputFormat.SampleRate, 32, outputFormat.Channels), new WaveFormatExtensible( outputFormat.SampleRate, 24, outputFormat.Channels), new WaveFormatExtensible( outputFormat.SampleRate, 16, outputFormat.Channels), }; // Check from best Format to worst format ( Float32, Int24, Int16 ) for (int i = 0; i < bestToWorstFormats.Length; i++) { correctSampleRateFormat = bestToWorstFormats[i]; if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat)) { break; } correctSampleRateFormat = null; } // If still null, then test on the PCM16, 2 channels if (correctSampleRateFormat == null) { // Last Last Last Chance (Thanks WASAPI) correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2); if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat)) { throw new NotSupportedException("Can't find a supported format to use"); } } } outputFormat = correctSampleRateFormat; } else { outputFormat = closestSampleRateFormat; } // just check that we can make it. resamplerDmoStream = new ResamplerDmoStream(waveProvider, outputFormat); sourceProvider = resamplerDmoStream; } else { sourceProvider = waveProvider; } // Normal setup for both sharedMode audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0, outputFormat, Guid.Empty); // Get the RenderClient renderClient = audioClient.AudioRenderClient; // set up the read buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8; readBuffer = new byte[bufferFrameCount * bytesPerFrame]; }
private void PlayThread() { ResamplerDmoStream resamplerDmoStream = null; IWaveProvider playbackProvider = this.sourceProvider; Exception e = null; try { if (this.dmoResamplerNeeded) { resamplerDmoStream = new ResamplerDmoStream(this.sourceProvider, this.outputFormat); playbackProvider = resamplerDmoStream; } this.bufferFrameCount = this.audioClient.BufferSize; this.bytesPerFrame = this.outputFormat.Channels * this.outputFormat.BitsPerSample / 8; this.readBuffer = new byte[this.bufferFrameCount * this.bytesPerFrame]; this.FillBuffer(playbackProvider, this.bufferFrameCount); WaitHandle[] waitHandles = new WaitHandle[] { this.frameEventWaitHandle }; this.audioClient.Start(); while (this.playbackState != PlaybackState.Stopped) { int num = 0; if (this.isUsingEventSync) { num = WaitHandle.WaitAny(waitHandles, 3 * this.latencyMilliseconds, false); } else { Thread.Sleep(this.latencyMilliseconds / 2); } if (this.playbackState == PlaybackState.Playing && num != 258) { int num2; if (this.isUsingEventSync) { num2 = ((this.shareMode == AudioClientShareMode.Shared) ? this.audioClient.CurrentPadding : 0); } else { num2 = this.audioClient.CurrentPadding; } int num3 = this.bufferFrameCount - num2; if (num3 > 10) { this.FillBuffer(playbackProvider, num3); } } } Thread.Sleep(this.latencyMilliseconds / 2); this.audioClient.Stop(); if (this.playbackState == PlaybackState.Stopped) { this.audioClient.Reset(); } } catch (Exception ex) { e = ex; } finally { if (resamplerDmoStream != null) { resamplerDmoStream.Dispose(); } this.RaisePlaybackStopped(e); } }
private void PlayThread() { ResamplerDmoStream resamplerDmoStream = null; IWaveProvider playbackProvider = sourceProvider; Exception exception = null; Stopwatch sw = new Stopwatch(); sw.Start(); Thread.CurrentThread.Priority = ThreadPriority.Highest; var lastOutput = sw.ElapsedMilliseconds; try { if (dmoResamplerNeeded) { resamplerDmoStream = new ResamplerDmoStream(sourceProvider, outputFormat); playbackProvider = resamplerDmoStream; } // fill a whole buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8; readBuffer = new byte[bufferFrameCount * bytesPerFrame]; FillBuffer(playbackProvider, bufferFrameCount); // Create WaitHandle for sync var waitHandles = new WaitHandle[] { frameEventWaitHandle }; audioClient.Start(); started = true; while (playbackState != PlaybackState.Stopped) { // If using Event Sync, Wait for notification from AudioClient or Sleep half latency int indexHandle = 0; if (isUsingEventSync) { indexHandle = WaitHandle.WaitAny(waitHandles, 3 * latencyMilliseconds, false); } else { Thread.Sleep(latencyMilliseconds / 2); } var now = sw.ElapsedMilliseconds; // If still playing and notification is ok if (playbackState == PlaybackState.Playing && indexHandle != WaitHandle.WaitTimeout) { // See how much buffer space is available. int numFramesPadding; if (isUsingEventSync) { // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; } else { numFramesPadding = audioClient.CurrentPadding; } int numFramesAvailable = bufferFrameCount - numFramesPadding; if (numFramesAvailable > 10) // see https://naudio.codeplex.com/workitem/16363 { Console.WriteLine("time:" + now + ", count:" + numFramesAvailable); FillBuffer(playbackProvider, numFramesAvailable); Console.WriteLine("fill buffer time:" + now + ", count:" + numFramesAvailable); lastOutput = now; } else { if (now - lastOutput > 15) { Console.WriteLine("not finished:" + (now - lastOutput) + ", CurrentPadding:" + audioClient.CurrentPadding); } } } if (now - lastOutput > 15) { Console.WriteLine(); } } Thread.Sleep(latencyMilliseconds / 2); audioClient.Stop(); if (playbackState == PlaybackState.Stopped) { audioClient.Reset(); } } catch (Exception e) { exception = e; } finally { if (resamplerDmoStream != null) { resamplerDmoStream.Dispose(); } RaisePlaybackStopped(exception); } }