public WaveChannel32 AddInputStream(WaveStream waveStream, float volume = 1f) { lock (_syncObject) { var rateConvertedStream = waveStream; if (NeedSampleRateConversion(waveStream.WaveFormat)) { rateConvertedStream = new ResamplerDmoStream(waveStream, _waveStream.WaveFormat); } var addedStream = new WaveChannel32(rateConvertedStream, volume, 0f); if (_channels.Count == 0 && _musicChannel == null) { // The first stream is always the music stream. _musicChannel = addedStream; } else { if (!_channels.ContainsKey(waveStream)) { _channels.Add(waveStream, addedStream); } } _waveStream.AddInputStream(addedStream); return(addedStream); } }
public string ConvertAACToWAV(string songTitle, string cacheDir) { try { // im going to add this in an atempt to have easier playback though naudio // https://stackoverflow.com/questions/13486747/convert-aac-to-wav // create media foundation reader to read the AAC encoded file using (MediaFoundationReader reader = new MediaFoundationReader(cacheDir + songTitle + ".aac")) // resample the file to PCM with same sample rate, channels and bits per sample using (ResamplerDmoStream resampledReader = new ResamplerDmoStream(reader, new WaveFormat(reader.WaveFormat.SampleRate, reader.WaveFormat.BitsPerSample, reader.WaveFormat.Channels))) // create WAVe file using (WaveFileWriter waveWriter = new WaveFileWriter(cacheDir + songTitle + ".wav", resampledReader.WaveFormat)) { // copy samples resampledReader.CopyTo(waveWriter); } return(cacheDir + songTitle + ".wav"); } catch (Exception error) { _logs.logMessage("Error", "downloader.ConvertAACToWAV", error.ToString(), "system"); return(null); } }
public static async Task wavConvert(string mp4Conv, Channel voice) { // convert source audio to AAC // create media foundation reader to read the source (can be any supported format, mp3, wav, ...) /*using (MediaFoundationReader reader = new MediaFoundationReader(@"d:\source.mp3")) * { * MediaFoundationEncoder.EncodeToAac(reader, @"D:\test.mp4"); * }*/ var channelCount = discord.GetService <AudioService>().Config.Channels; // convert "back" to WAV // create media foundation reader to read the AAC encoded file using (MediaFoundationReader reader = new MediaFoundationReader(mp4Conv)) // resample the file to PCM with same sample rate, channels and bits per sample using (ResamplerDmoStream resampledReader = new ResamplerDmoStream(reader, new WaveFormat(48000, 16, channelCount))) // create WAVe file using (WaveFileWriter waveWriter = new WaveFileWriter(@"C:\music\tempaud.wav", resampledReader.WaveFormat)) { // copy samples resampledReader.CopyTo(waveWriter); } await SendAudio(@"C:\music\tempaud.wav", voice); }
public CachedSound(string audioFileName) { using (var audioFileReader = new AudioFileReader(audioFileName)) { this.WaveFormat = audioFileReader.WaveFormat; if (this.WaveFormat.SampleRate != 44100 || this.WaveFormat.Channels != 2) { using (var resampled = new ResamplerDmoStream(audioFileReader, WaveFormat.CreateIeeeFloatWaveFormat(44100, 2))) { ISampleProvider resampledSampleProvider = resampled.ToSampleProvider(); this.WaveFormat = resampledSampleProvider.WaveFormat; List <float> wholeFile = new List <float>((int)resampled.Length); float[] readBuffer = new float[resampled.WaveFormat.SampleRate * resampled.WaveFormat.Channels]; int samplesRead; while ((samplesRead = resampledSampleProvider.Read(readBuffer, 0, readBuffer.Length)) > 0) { wholeFile.AddRange(readBuffer.Take(samplesRead)); } this.AudioData = wholeFile.ToArray(); } } else { List <float> wholeFile = new List <float>((int)(audioFileReader.Length / 4)); float[] readBuffer = new float[audioFileReader.WaveFormat.SampleRate * audioFileReader.WaveFormat.Channels]; int samplesRead; while ((samplesRead = audioFileReader.Read(readBuffer, 0, readBuffer.Length)) > 0) { wholeFile.AddRange(readBuffer.Take(samplesRead)); } this.AudioData = wholeFile.ToArray(); } } }
public void Play([CanBeNull] string fileName, [NotNull] IAudioFormat format) { if (fileName == null) { return; } fileName = Path.GetFullPath(fileName); PreloadSfx(fileName, format); var key = Environment.OSVersion.Platform == PlatformID.Win32NT ? fileName.ToLowerInvariant() : fileName; var currentTime = _audioManager.MixerTime; var free = GetFreeStream(key); if (free.OffsetStream != null) { free.OffsetStream.StartTime = currentTime; free.OffsetStream.CurrentTime = currentTime; _playingStates[free.Index] = true; _audioManager.AddInputStream(free.OffsetStream, Volume); return; } var(data, waveFormat) = _preloaded[key]; var source = new RawSourceWaveStream(data, 0, data.Length, waveFormat); // Offset requires 16-bit integer input. WaveStream toOffset; if (AudioHelper.NeedsFormatConversionFrom(waveFormat, RequiredFormat)) { toOffset = new ResamplerDmoStream(source, RequiredFormat); } else { toOffset = source; } var offset = new WaveOffsetStream(toOffset, currentTime, TimeSpan.Zero, toOffset.TotalTime); _audioManager.AddInputStream(offset, Volume); lock (_queueLock) { _playingWaveStreams.Add((key, offset, toOffset, source)); } _playingStates.Add(true); }
/// <summary> /// Dispose /// </summary> /// <param name="disposing">True if disposing (not from finalizer)</param> public void Dispose(bool disposing) { buf.ClearBuffer(); if (windowsN) { buf.ClearBuffer(); } else { dmoResampler?.Dispose(); dmoResampler = null; } }
public void CanCreateResamplerStream() { //using (WaveFileReader reader = new WaveFileReader("C:\\Users\\Mark\\Recording\\REAPER\\ideas-2008-05-17.wav")) using (WaveStream reader = new NullWaveStream(new WaveFormat(44100, 16, 1), 1000)) { using (ResamplerDmoStream resampler = new ResamplerDmoStream(reader, WaveFormat.CreateIeeeFloatWaveFormat(48000, 2))) { Assert.Greater(resampler.Length, reader.Length, "Length"); Assert.AreEqual(0, reader.Position, "Position"); Assert.AreEqual(0, resampler.Position, "Position"); } } }
public void PlayLooped([CanBeNull] string fileName, [NotNull] IAudioFormat format, [NotNull] object state) { if (fileName == null) { return; } if (state == null) { throw new ArgumentNullException(nameof(state)); } if (_loopedStreams.ContainsKey(state)) { return; } fileName = Path.GetFullPath(fileName); PreloadSfx(fileName, format); var key = Environment.OSVersion.Platform == PlatformID.Win32NT ? fileName.ToLowerInvariant() : fileName; var currentTime = _audioManager.MixerTime; var(data, waveFormat) = _preloaded[key]; var source = new RawSourceWaveStream(data, 0, data.Length, waveFormat); var looped = new LoopedWaveStream(source, LoopedWaveStream.DefaultMaxLoops); // Offset requires 16-bit integer input. WaveStream toOffset; if (AudioHelper.NeedsFormatConversionFrom(waveFormat, RequiredFormat)) { toOffset = new ResamplerDmoStream(looped, RequiredFormat); } else { toOffset = looped; } var offset = new WaveOffsetStream(toOffset, currentTime, TimeSpan.Zero, toOffset.TotalTime); _audioManager.AddInputStream(offset, Volume); lock (_queueLock) { _loopedStreams[state] = (offset, toOffset, looped, source); } }
/// <summary> /// Dispose /// </summary> public void Dispose() { if (audioClient != null) { Stop(); audioClient.Dispose(); audioClient = null; renderClient = null; } if (resamplerDmoStream != null) { resamplerDmoStream.Dispose(); resamplerDmoStream = null; } }
public static void Concatenate(string outputFile, IEnumerable <string> sourceFiles) { byte[] buffer = new byte[1024]; WaveFileWriter waveFileWriter = null; try { foreach (string sourceFile in sourceFiles) { using (var reader = new MediaFoundationReader(sourceFile)) { using (var resampledReader = new ResamplerDmoStream(reader, new WaveFormat( reader.WaveFormat.SampleRate, reader.WaveFormat.BitsPerSample, reader.WaveFormat.Channels))) { if (waveFileWriter == null) { waveFileWriter = new WaveFileWriter(outputFile, reader.WaveFormat); } else { if (!reader.WaveFormat.Equals(waveFileWriter.WaveFormat)) { throw new InvalidOperationException( "Can't concatenate WAV Files that don't share the same format"); } } int read; while ((read = resampledReader.Read(buffer, 0, buffer.Length)) > 0) { waveFileWriter.WriteData(buffer, 0, read); } } } } } finally { if (waveFileWriter != null) { waveFileWriter.Dispose(); } } }
public void CanReadABlockFromResamplerStream() { //using (WaveFileReader reader = new WaveFileReader("C:\\Users\\Mark\\Recording\\REAPER\\ideas-2008-05-17.wav")) WaveFormat inputFormat = new WaveFormat(44100, 16, 1); using (WaveStream reader = new NullWaveStream(inputFormat, inputFormat.AverageBytesPerSecond * 20)) { using (ResamplerDmoStream resampler = new ResamplerDmoStream(reader, WaveFormat.CreateIeeeFloatWaveFormat(48000, 2))) { // try to read 10 ms; int bytesToRead = resampler.WaveFormat.AverageBytesPerSecond / 100; byte[] buffer = new byte[bytesToRead]; int count = resampler.Read(buffer, 0, bytesToRead); Assert.That(count > 0, "Bytes Read"); } } }
public EventDrivenResampler(WaveFormat input, WaveFormat output) { windowsN = DetectWindowsN(); this.input = input; this.output = output; buf = new BufferedWaveProvider(input); buf.ReadFully = false; if (windowsN) { mediaFoundationResampler = new WdlResamplingSampleProvider(buf.ToSampleProvider(), output.SampleRate); waveOut = mediaFoundationResampler.ToMono().ToWaveProvider16(); } else { dmoResampler = new ResamplerDmoStream(buf, output); } }
private void ResampleAWholeStream(WaveFormat inputFormat, WaveFormat outputFormat) { using (WaveStream reader = new NullWaveStream(inputFormat, inputFormat.AverageBytesPerSecond * 20)) { using (ResamplerDmoStream resampler = new ResamplerDmoStream(reader, outputFormat)) { // try to read 10 ms; int bytesToRead = resampler.WaveFormat.AverageBytesPerSecond / 100; byte[] buffer = new byte[bytesToRead]; int count; int total = 0; do { count = resampler.Read(buffer, 0, bytesToRead); total += count; //Assert.AreEqual(count, bytesToRead, "Bytes Read"); } while (count > 0); //Debug.WriteLine(String.Format("Converted input length {0} to {1}", reader.Length, total)); } } }
internal Music(AudioManager audioManager, [NotNull] WaveStream waveStream, float volume, bool externalWaveStream) { _audioManager = audioManager; _baseWaveStream = waveStream; _isExternalWaveStream = externalWaveStream; if (AudioHelper.NeedsFormatConversionFrom(waveStream.WaveFormat, RequiredFormat)) { _formatConvertedStream = new ResamplerDmoStream(waveStream, RequiredFormat); } else { _formatConvertedStream = waveStream; } OffsetStream = new WaveOffsetStream(_formatConvertedStream); IsStopped = true; CachedVolume = volume; }
static void Main(string[] args) { // convert source audio to AAC // create media foundation reader to read the source (can be any supported format, mp3, wav, ...) using (MediaFoundationReader reader = new MediaFoundationReader(@"d:\source.mp3")) { MediaFoundationEncoder.EncodeToAac(reader, @"D:\test.mp4"); } // convert "back" to WAV // create media foundation reader to read the AAC encoded file using (MediaFoundationReader reader = new MediaFoundationReader(@"D:\test.mp4")) // resample the file to PCM with same sample rate, channels and bits per sample using (ResamplerDmoStream resampledReader = new ResamplerDmoStream(reader, new WaveFormat(reader.WaveFormat.SampleRate, reader.WaveFormat.BitsPerSample, reader.WaveFormat.Channels))) // create WAVe file using (WaveFileWriter waveWriter = new WaveFileWriter(@"d:\test.wav", resampledReader.WaveFormat)) { // copy samples resampledReader.CopyTo(waveWriter); } }
private async Task <Tuple <string, TimeSpan> > convertToWav(string file, CancellationToken token) { string fileWithouExtension = Path.Combine(Path.GetFullPath(file).Replace(Path.GetFileName(file), ""), Path.GetFileNameWithoutExtension(file)); string outFile = file; if (file.ToLower().EndsWith(".mp3")) { outFile = fileWithouExtension + ".wav"; token.ThrowIfCancellationRequested(); using (Mp3FileReader reader = new Mp3FileReader(file)) { WaveFileWriter.CreateWaveFile(outFile, reader); } } else if (file.ToLower().EndsWith(".m4a")) { outFile = fileWithouExtension + ".wav"; token.ThrowIfCancellationRequested(); using (MediaFoundationReader reader = new MediaFoundationReader(file)) { using (ResamplerDmoStream resampledReader = new ResamplerDmoStream(reader, new WaveFormat(reader.WaveFormat.SampleRate, reader.WaveFormat.BitsPerSample, reader.WaveFormat.Channels))) using (WaveFileWriter waveWriter = new WaveFileWriter(outFile, resampledReader.WaveFormat)) { resampledReader.CopyTo(waveWriter); } } } token.ThrowIfCancellationRequested(); TimeSpan totalTime; using (WaveFileReader maleReader = new WaveFileReader(outFile)) { totalTime = maleReader.TotalTime; } return(await Task.FromResult <Tuple <string, TimeSpan> >(new Tuple <string, TimeSpan>(outFile, totalTime))); }
public EventDrivenResampler(bool windowsN, WaveFormat input, WaveFormat output) { this.windowsN = windowsN; this.input = input; this.output = output; buf = new BufferedWaveProvider(input); buf.ReadFully = false; if (output.BitsPerSample > input.BitsPerSample) { bufferMultiplier = 2; } if (windowsN) { mediaFoundationResampler = new WdlResamplingSampleProvider(buf.ToSampleProvider(), output.SampleRate); waveOut = mediaFoundationResampler.ToMono().ToWaveProvider16(); } else { dmoResampler = new ResamplerDmoStream(buf, output); } }
private async Task SendAsync(IGuild guild, string path) { _container.TryGetValue(guild.Id, out AudioContainer container); var audioOutStream = container.AudioOutStream; var token = container.CancellationTokenSource.Token; var format = new WaveFormat(48000, 16, 2); using var reader = new MediaFoundationReader(path); using var resamplerDmo = new ResamplerDmoStream(reader, format); try { container.ResamplerDmoStream = resamplerDmo; await resamplerDmo.CopyToAsync(audioOutStream, token) .ContinueWith(t => { return; }); } finally { await audioOutStream.FlushAsync(); container.CancellationTokenSource = new CancellationTokenSource(); } }
private static string Convertm4aWav(string fullFilePath) { var wavFileName = String.Format(@"{0}\{1}{2}", Path.GetDirectoryName(fullFilePath), Path.GetFileNameWithoutExtension(fullFilePath), ".wav"); // convert "back" to WAV // create media foundation reader to read the AAC encoded file using (MediaFoundationReader reader = new MediaFoundationReader(fullFilePath)) { // resample the file to PCM with same sample rate, channels and bits per sample using (ResamplerDmoStream resampledReader = new ResamplerDmoStream(reader, new WaveFormat(reader.WaveFormat.SampleRate, reader.WaveFormat.BitsPerSample, reader.WaveFormat.Channels))) { // create WAVe file using (WaveFileWriter waveWriter = new WaveFileWriter(wavFileName, resampledReader.WaveFormat)) { // copy samples resampledReader.CopyTo(waveWriter); waveWriter.Flush();; } } } return(wavFileName); }
private void PlayThread() { ResamplerDmoStream resamplerDmoStream = null; IWaveProvider playbackProvider = this.sourceProvider; Exception exception = null; try { if (this.dmoResamplerNeeded) { resamplerDmoStream = new ResamplerDmoStream(sourceProvider, outputFormat); playbackProvider = resamplerDmoStream; } // fill a whole buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8; readBuffer = new byte[bufferFrameCount * bytesPerFrame]; FillBuffer(playbackProvider, bufferFrameCount); // Create WaitHandle for sync var waitHandles = new WaitHandle[] { frameEventWaitHandle }; audioClient.Start(); while (playbackState != PlaybackState.Stopped) { // If using Event Sync, Wait for notification from AudioClient or Sleep half latency int indexHandle = 0; if (isUsingEventSync) { indexHandle = WaitHandle.WaitAny(waitHandles, 3 * latencyMilliseconds, false); } else { Thread.Sleep(latencyMilliseconds / 2); } // If still playing and notification is ok if (playbackState == PlaybackState.Playing && indexHandle != WaitHandle.WaitTimeout) { // See how much buffer space is available. int numFramesPadding = 0; if (isUsingEventSync) { // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0; } else { numFramesPadding = audioClient.CurrentPadding; } int numFramesAvailable = bufferFrameCount - numFramesPadding; if (numFramesAvailable > 10) // see https://naudio.codeplex.com/workitem/16363 { FillBuffer(playbackProvider, numFramesAvailable); } } } Thread.Sleep(latencyMilliseconds / 2); audioClient.Stop(); if (playbackState == PlaybackState.Stopped) { audioClient.Reset(); } } catch (Exception e) { exception = e; } finally { if (resamplerDmoStream != null) { resamplerDmoStream.Dispose(); } RaisePlaybackStopped(exception); } }
public byte[] Decode(string audioType, byte[] input) { byte[] output = null; _calls++; lock (_decoderLock) { string fileType = ""; switch (audioType) { case "audio/mpg": case "audio/mpeg": fileType = "mp3"; break; case "audio/aac": case "audio/aacp": fileType = "aac"; break; } try { string _inFile = PathUtils.GetTempFilePath(string.Format("opm-dec-in-{1}.{0}", fileType, _calls % 10)); string _outFile = PathUtils.GetTempFilePath(string.Format("opm-dec-out.wav")); DeleteFile(_outFile); DeleteFile(_inFile); IO.File.WriteAllBytes(_inFile, input); WaveFormat wf = new WaveFormat(WaveFormatEx.Cdda.nSamplesPerSec, 2); using (MediaFoundationReader mfr = new MediaFoundationReader(_inFile)) using (ResamplerDmoStream res = new ResamplerDmoStream(mfr, wf)) using (WaveFileWriter wav = new WaveFileWriter(_outFile, wf)) { res.CopyTo(wav); wav.Close(); res.Close(); mfr.Close(); } if (IO.File.Exists(_outFile)) { byte[] outWav = IO.File.ReadAllBytes(_outFile); if (outWav.Length > 44) { output = new byte[outWav.Length]; Array.Copy(outWav, 44, output, 0, outWav.Length - 44); } } } catch (Exception ex) { Logger.LogException(ex); } } return(output); }
/// <summary> /// Initialize for playing the specified wave stream /// </summary> /// <param name="waveProvider">IWaveProvider to play</param> public void Init(IWaveProvider waveProvider) { long latencyRefTimes = latencyMilliseconds * 10000; outputFormat = waveProvider.WaveFormat; // first attempt uses the WaveFormat from the WaveStream WaveFormatExtensible closestSampleRateFormat; if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat)) { // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat) // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx // They say : "In shared mode, the audio engine always supports the mix format" // The MixFormat is more likely to be a WaveFormatExtensible. if (closestSampleRateFormat == null) { WaveFormat correctSampleRateFormat = audioClient.MixFormat; if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat)) { // Iterate from Worst to Best Format WaveFormatExtensible[] bestToWorstFormats = { new WaveFormatExtensible( outputFormat.SampleRate, 32, outputFormat.Channels), new WaveFormatExtensible( outputFormat.SampleRate, 24, outputFormat.Channels), new WaveFormatExtensible( outputFormat.SampleRate, 16, outputFormat.Channels), }; // Check from best Format to worst format ( Float32, Int24, Int16 ) for (int i = 0; i < bestToWorstFormats.Length; i++) { correctSampleRateFormat = bestToWorstFormats[i]; if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat)) { break; } correctSampleRateFormat = null; } // If still null, then test on the PCM16, 2 channels if (correctSampleRateFormat == null) { // Last Last Last Chance (Thanks WASAPI) correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2); if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat)) { throw new NotSupportedException("Can't find a supported format to use"); } } } outputFormat = correctSampleRateFormat; } else { outputFormat = closestSampleRateFormat; } // just check that we can make it. resamplerDmoStream = new ResamplerDmoStream(waveProvider, outputFormat); sourceProvider = resamplerDmoStream; } else { sourceProvider = waveProvider; } // Normal setup for both sharedMode audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0, outputFormat, Guid.Empty); // Get the RenderClient renderClient = audioClient.AudioRenderClient; // set up the read buffer bufferFrameCount = audioClient.BufferSize; bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8; readBuffer = new byte[bufferFrameCount * bytesPerFrame]; }