private static IWaveProvider CreateWaveConverter(WaveFormat format, WaveStream stream) { if (stream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) { try { return(new WaveFormatConversionStream(format, stream)); } catch (Exception exc) { // This happens for example with floating point wave files. logger.Warn(exc, "'WaveFormatConversionStream' failed. Attempting to use sample provider conversion now."); } } // Try going through the sample provider and then remix/resample manually if necessary... ISampleProvider sampleProvider = stream.ToSampleProvider(); if (sampleProvider.WaveFormat.Channels > 1) { sampleProvider = new StereoToMonoSampleProvider(sampleProvider); } if (sampleProvider.WaveFormat.SampleRate != format.SampleRate) { sampleProvider = new WdlResamplingSampleProvider(sampleProvider, format.SampleRate); } return(sampleProvider.ToWaveProvider16()); }
public Stream GetSampleStream(int sampleRate, int channels) { if (channels != 1 && channels != 2) { throw new ArgumentOutOfRangeException(nameof(channels), "Only 1 or 2 channels are allowed."); } using (var reader = new MediaFoundationReader(_path)) { ISampleProvider sampler = new WdlResamplingSampleProvider(reader.ToSampleProvider(), sampleRate); switch (channels) { case 1: sampler = sampler.ToMono(); break; case 2: sampler = sampler.ToStereo(); break; } return(new SamplerWrapper(sampler)); } }
public MixedAudioProvider(params NAudioProvider[] AudioProviders) { foreach (var provider in AudioProviders) { var bufferedProvider = new BufferedWaveProvider(provider.NAudioWaveFormat) { DiscardOnBufferOverflow = true }; provider.DataAvailable += (S, E) => { bufferedProvider.AddSamples(E.Buffer, 0, E.Length); }; var sampleProvider = bufferedProvider.ToSampleProvider(); var providerWf = provider.WaveFormat; // Mono to Stereo if (providerWf.Channels == 1) { sampleProvider = sampleProvider.ToStereo(); } // Resample if (providerWf.SampleRate != WaveFormat.SampleRate) { sampleProvider = new WdlResamplingSampleProvider(sampleProvider, WaveFormat.SampleRate); } _audioProviders.Add(provider, sampleProvider); } if (_audioProviders.Count == 1) { _mixingWaveProvider = _audioProviders .Values .First() .ToWaveProvider16(); } else { var mixingSampleProvider = new MixingSampleProvider(_audioProviders.Values); // Screna expects 44.1 kHz 16-bit Stereo _mixingWaveProvider = mixingSampleProvider.ToWaveProvider16(); } var bufferSize = (int) ( (ReadInterval / 1000.0) * WaveFormat.SampleRate * WaveFormat.Channels * (WaveFormat.BitsPerSample / 8.0) ); _buffer = new byte[bufferSize]; Task.Factory.StartNew(Loop, TaskCreationOptions.LongRunning); }
public SwavStream(SWAR.Wave wave) { m_Wave = wave; m_WaveFormat = new WaveFormat(wave.m_SampleRate, wave.m_WaveType == SWAR.WaveType.PCM8 ? 8 : 16, 1); m_Position = 0; if (m_Wave.m_WaveType == SWAR.WaveType.ADPCM) { m_CurrPcm16Val = (short)m_Wave.m_TheWave.Read16(0); m_CurrIndex = m_Wave.m_TheWave.Read16(2); m_Position = 4; if (m_Wave.m_Loop) { byte[] buffer = new byte[4 * (m_Wave.m_LoopStart - 4)]; Read(buffer, 0, (int)(4 * (m_Wave.m_LoopStart - 4))); m_LoopStartPcm16Val = m_CurrPcm16Val; m_LoopStartIndex = m_CurrIndex; //restore the starting state m_CurrPcm16Val = (short)m_Wave.m_TheWave.Read16(0); m_CurrIndex = m_Wave.m_TheWave.Read16(2); m_Position = 4; } } m_Resampled = new WdlResamplingSampleProvider(this, 44100); }
public void CanDownsampleAnMp3File() { string testFile = @"D:\Audio\Music\Coldplay\Mylo Xyloto\03 - Paradise.mp3"; if (!File.Exists(testFile)) { Assert.Ignore(testFile); } string outFile = @"d:\test22.wav"; using (var reader = new AudioFileReader(testFile)) { // downsample to 22kHz var resampler = new WdlResamplingSampleProvider(reader, 22050); var wp = new SampleToWaveProvider(resampler); using (var writer = new WaveFileWriter(outFile, wp.WaveFormat)) { byte[] b = new byte[wp.WaveFormat.AverageBytesPerSecond]; while (true) { int read = wp.Read(b, 0, b.Length); if (read > 0) { writer.Write(b, 0, read); } else { break; } } } //WaveFileWriter.CreateWaveFile(outFile, ); } }
private void AudioOutput_SelectedIndexChanged(object sender, EventArgs e) { if (output != null && output.PlaybackState != PlaybackState.Stopped) { output.Pause(); } output = new WasapiOut(outputs[audioOutputSelector.SelectedIndex], AudioClientShareMode.Shared, true, outputLatency); bitsPrSample = output.OutputWaveFormat.BitsPerSample; sampleRate = output.OutputWaveFormat.SampleRate; channels = output.OutputWaveFormat.Channels; // Set the WaveFormat outputFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels); pflBuffer = new BufferedWaveProvider(internalFormatStereo); pflBuffer.ReadFully = true; pflBuffer.DiscardOnBufferOverflow = true; WdlResamplingSampleProvider resampler = new WdlResamplingSampleProvider(pflBuffer.ToSampleProvider(), outputFormat.SampleRate); output.Init(resampler); output.Play(); Logger.WriteLine("SET OUTPUT FORMAT: " + "Sample Rate: " + sampleRate + ", BitsPrSasmple: " + bitsPrSample + ", Channels: " + channels); }
/// <summary> /// Resample source into 16 bit WAV mono output with the target sampling rate. /// Output stream includes modified RIFF header. /// </summary> /// <param name="sourceStream"></param> /// <param name="targetSampleRate"></param> /// <param name="targetChannels"></param> /// <returns></returns> public static MemoryStream Resample(MemoryStream sourceStream, int targetSampleRate) { /*Read from the wav file's contents using stream */ using (var inputReader = new WaveFileReader(sourceStream)) { int sourceChannels = inputReader.WaveFormat.Channels; WdlResamplingSampleProvider resampler; /*Stereo source. Must convert to mono with StereoToMonoSampleProvider */ if (sourceChannels == 2) { var monoSampleProvider = new StereoToMonoSampleProvider(inputReader.ToSampleProvider()); resampler = new WdlResamplingSampleProvider(monoSampleProvider, targetSampleRate); } else { resampler = new WdlResamplingSampleProvider(inputReader.ToSampleProvider(), targetSampleRate); } MemoryStream outStream = new MemoryStream(); /*Ensure that header has correct RIFF format with data appended after header.*/ WaveFileWriter.WriteWavFileToStream(outStream, resampler.ToWaveProvider16()); outStream.Position = 0; return(outStream); } }
// function that converts an audio file into mono 8-bit PCM, at the current sampling rate private void formatAudio(String filePath, float audioAmplify, int samplingRate) { // convert to mono var ir = new AudioFileReader(filePath); try { var mono = new StereoToMonoSampleProvider(ir); mono.LeftVolume = 0.0f; mono.RightVolume = audioAmplify; var resampler = new WdlResamplingSampleProvider(mono, samplingRate); WaveFileWriter.CreateWaveFile16("temp.wav", resampler); ir.Close(); } catch (System.ArgumentException) { var resampler = new WdlResamplingSampleProvider(ir, samplingRate); WaveFileWriter.CreateWaveFile16("temp.wav", resampler); ir.Close(); } //File.Delete("temp.wav"); }
public static Stream ConvertWavToMp3(Stream wavStream) { _uniqueTempFileCounter += 1; var tempFile = Path.GetTempFileName(); using (var rdr = new WaveFileReader(wavStream)) { if (rdr.WaveFormat.BitsPerSample == 24) //Can't go from 24 bits wav to mp3 directly, create temporary 16 bit wav { ISampleProvider sampleprovider = new Pcm24BitToSampleProvider(rdr); //24 bit to sample var resampler = new WdlResamplingSampleProvider(sampleprovider, SampleRate); //sample to new sample rate WaveFileWriter.CreateWaveFile16(tempFile, resampler); //sample to actual wave file return(ConvertWavFileToMp3MemoryStream(tempFile, true)); //file to mp3 bytes } else if (!SupportedMPEGSampleRates.Contains(rdr.WaveFormat.SampleRate)) //Can't go from unsupported Sample Rate wav to mp3 directly { var resampler = new WdlResamplingSampleProvider(rdr.ToSampleProvider(), SampleRate); //sample to new sample rate WaveFileWriter.CreateWaveFile16(tempFile, resampler); //sample to actual wave file return(ConvertWavFileToMp3MemoryStream(tempFile, true)); //file to mp3 bytes } else { var retMs = FilesystemUtils.recyclableMemoryStreamManager.GetStream(); using (var wtr = new LameMP3FileWriter(retMs, rdr.WaveFormat, BitRate)) { rdr.CopyTo(wtr); return(retMs); } } } }
public static Stream ConvertAiffToMp3(Stream aiffStream, string directory) { _uniqueTempFileCounter += 1; var tempFile = Path.GetTempFileName(); using (var rdr = new AiffFileReader(aiffStream)) { //can't go from 24 bits aif to mp3 directly, create temporary 16 bit wav if (rdr.WaveFormat.BitsPerSample == 24) { ISampleProvider sampleprovider = new Pcm24BitToSampleProvider(rdr); //24 bit to sample var resampler = new WdlResamplingSampleProvider(sampleprovider, SampleRate); //sample to new sample rate WaveFileWriter.CreateWaveFile16(tempFile, resampler); //sample to actual wave file return(ConvertWavFileToMp3MemoryStream(tempFile, true)); //file to mp3 bytes } else { var retMs = FilesystemUtils.recyclableMemoryStreamManager.GetStream(); using (var wtr = new LameMP3FileWriter(retMs, rdr.WaveFormat, BitRate)) { rdr.CopyTo(wtr); return(retMs); } } } }
public static float[] GetSamples(WaveStream waveStream) { ISampleProvider provider = waveStream.ToSampleProvider(); if (provider.WaveFormat.SampleRate != 44100) { provider = new WdlResamplingSampleProvider(provider, 44100); } if (provider.WaveFormat.Channels > 1) { provider = new StereoToMonoSampleProvider(provider) { LeftVolume = 1, RightVolume = 0, }; } List <float> samples = new List <float>(); float[] buffer = new float[128 * 1024]; int n; while ((n = provider.Read(buffer, 0, buffer.Length)) > 0) { samples.AddRange(buffer.Take(n)); } return(samples.ToArray()); }
public async Task <IActionResult> Test() { try { var file = Request.Form.Files.First(); int outRate = 44000; var source = new RawSourceWaveStream(file.OpenReadStream(), new WaveFormat(outRate, 2)); using (var wavFileReader = new WaveFileReader(source)) { var resampler = new WdlResamplingSampleProvider(wavFileReader.ToSampleProvider(), 16000); var monoSource = resampler.ToMono().ToWaveProvider16(); using (var outputStream = new MemoryStream()) { WaveFileWriter.WriteWavFileToStream(outputStream, monoSource); outputStream.Seek(0, SeekOrigin.Begin); var result = await client.VerifyAsync(outputStream, Guid.Parse("fb786241-9f01-41cc-a585-50b65bd52c38")); if (result.Result == Result.Accept) { // verification successful } } } } catch (Exception e) { int x = 1; } return(Ok()); }
public async Task Resample(string srcFile, string destFile) { var wavFile = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(destFile), $"{System.IO.Path.GetFileNameWithoutExtension(destFile)}.wav"); if (System.IO.File.Exists(wavFile)) { System.IO.File.Delete(wavFile); } using (var reader = new AudioFileReader(srcFile)) { var resampler = new WdlResamplingSampleProvider(reader, 44100); WaveFileWriter.CreateWaveFile16(wavFile, resampler); } using (var reader = new WaveFileReader(wavFile)) { if (System.IO.File.Exists(destFile)) { System.IO.File.Delete(destFile); } using (var writer = new LameMP3FileWriter(destFile, reader.WaveFormat, LAMEPreset.EXTREME)) { //reader.CopyTo(writer); await reader.CopyToAsync(writer); await writer.FlushAsync(); } } System.IO.File.Delete(wavFile); }
static void Convert( string inputPath, out string intermediateSampleRatePath, SampleRate?sampleRate, string outputDirectory, string outputFileName, ref bool isAlreadyConvertedToBitRateHigh) { if (!sampleRate.HasValue) { intermediateSampleRatePath = inputPath; return; } var actualSampleRate = SampleRateHelper.GetSampleRate(sampleRate.Value); using (var reader = new AudioFileReader(inputPath)) { if (reader.WaveFormat.SampleRate != actualSampleRate) { var intermediateSampleRateFileName = $"{outputFileName}-intermediate-samplerate.wav"; intermediateSampleRatePath = Path.Combine(outputDirectory, intermediateSampleRateFileName); var resampler = new WdlResamplingSampleProvider(reader, actualSampleRate); WaveFileWriter.CreateWaveFile16(intermediateSampleRatePath, resampler); isAlreadyConvertedToBitRateHigh = true; } else { intermediateSampleRatePath = inputPath; } } }
public ChannelConverter([NotNull] IChannel channel) { _channel = channel; var resampler = new WdlResamplingSampleProvider(channel.ToMono(), MixingFormat.SampleRate); _resampled = resampler; }
/// <summary> /// Event handler to capture waspi device and convert to pcm16. /// </summary> /// <remarks> /// see also: https://qiita.com/zufall/items/2e027a2bc996864fe4af /// </remarks> /// <param name="sender"></param> /// <param name="eventArgs"></param> private void WaspiDataAvailable(object sender, WaveInEventArgs eventArgs) { if (eventArgs.BytesRecorded == 0) { ResampledDataAvailable?.Invoke(this, new byte[0]); ResampledMaxValueAvailable?.Invoke(this, 0); return; } using (var memStream = new MemoryStream(eventArgs.Buffer, 0, eventArgs.BytesRecorded)) { using (var inputStream = new RawSourceWaveStream(memStream, capture.WaveFormat)) { var sampleStream = new WaveToSampleProvider(inputStream); var resamplingProvider = new WdlResamplingSampleProvider(sampleStream, TargetWaveFormat.SampleRate); var pcmProvider = new SampleToWaveProvider16(resamplingProvider); IWaveProvider targetProvider = pcmProvider; if (capture.WaveFormat.Channels == 2) { var stereoToMonoProvider = new StereoToMonoProvider16(pcmProvider); stereoToMonoProvider.RightVolume = 0.5f; stereoToMonoProvider.LeftVolume = 0.5f; targetProvider = stereoToMonoProvider; } byte[] buffer = new byte[eventArgs.BytesRecorded]; var outputStream = new MemoryStream(); int readBytes; int writeBytes = 0; while ((readBytes = targetProvider.Read(buffer, 0, eventArgs.BytesRecorded)) > 0) { outputStream.Write(buffer, 0, readBytes); writeBytes += readBytes; } var aryOutputStream = outputStream.ToArray(); ResampledDataAvailable?.Invoke(this, aryOutputStream); float max = 0; var tempBuffer = new WaveBuffer(aryOutputStream); for (int index = 0; index < aryOutputStream.Length / 2; index++) { var sample = (double)tempBuffer.ShortBuffer[index]; // absolute value if (sample < 0.0) { sample = -sample; } // is this the max value? if (sample > max) { max = (float)sample; } } ResampledMaxValueAvailable?.Invoke(this, max); } } }
/// <summary> /// Converts sound file to List of arrays. One array - data for 1 second /// </summary> /// <param name="v"></param> internal static List <float[]> GetArraysFromFile(string inFile) { var res = new List <float[]>(); if (!File.Exists(inFile)) { Program.LogError("No file " + inFile); return(null); } using (var reader = new AudioFileReader(inFile)) { Program.LogInfo("TotalTime:" + reader.TotalTime); Program.LogInfo("Length:" + reader.Length); Program.LogInfo("Volume:" + reader.Volume); var waveFormat = reader.WaveFormat; Program.LogInfo("SampleRate:" + waveFormat.SampleRate); Program.LogInfo("AverageBytesPerSecond:" + waveFormat.AverageBytesPerSecond); Program.LogInfo("BitsPerSample:" + waveFormat.BitsPerSample); Program.LogInfo("BlockAlign:" + waveFormat.BlockAlign); Program.LogInfo("Channels:" + waveFormat.Channels); Program.LogInfo("Encoding:" + waveFormat.Encoding); int valuesPerSecond = (int)(reader.Length / reader.TotalTime.TotalSeconds); Program.LogInfo("Values per second:" + valuesPerSecond); var resampler = new WdlResamplingSampleProvider(reader, 44100); var wp = new SampleToWaveProvider(reader); var rdr = wp.ToSampleProvider(); var wf2 = rdr.WaveFormat; Program.LogInfo("SampleRate:" + wf2.SampleRate); Program.LogInfo("AverageBytesPerSecond:" + wf2.AverageBytesPerSecond); Program.LogInfo("BitsPerSample:" + wf2.BitsPerSample); Program.LogInfo("BlockAlign:" + wf2.BlockAlign); Program.LogInfo("Channels:" + wf2.Channels); Program.LogInfo("Encoding:" + wf2.Encoding); // 1 sec length buffer var buf = new float[rdr.WaveFormat.SampleRate * rdr.WaveFormat.Channels]; while (true) { int read = rdr.Read(buf, 0, buf.Length); if (read > 0) { // only 1 channel res.Add(buf.Where((x, i) => i % 2 == 0).ToArray()); } else { break; } } return(res); } }
/// <summary> /// BOTH FORMATS HAVE TO BE IEEE-FLOAT AND 2 CHANNELS /// </summary> /// <param name="input"></param> /// <param name="output"></param> public Resampler(WaveFormat input, WaveFormat output) { inputFormat = input; outputFormat = output; provider = new BufferedWaveProvider(input); provider.ReadFully = true; provider.DiscardOnBufferOverflow = true; resampler = new WdlResamplingSampleProvider(provider.ToSampleProvider(), outputFormat.SampleRate); }
/// <summary> /// Кнопка изменения частоты дискретизации /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void button1_Click(object sender, EventArgs e) { SaveFileDialog save = new SaveFileDialog(); save.InitialDirectory = "C:\\"; save.Filter = "wav files (*.wav)|*.wav"; save.FilterIndex = 1; save.Title = "Сохранить файл"; if (save.ShowDialog() == DialogResult.Cancel) { return; } string filename = save.FileName; if (filename != null) { switch (listBox1.SelectedIndex) { case 0: outRate = 16000; break; case 1: outRate = 32000; break; case 2: outRate = 44100; break; case 3: outRate = 48000; break; case 4: outRate = 96000; break; case 5: outRate = 192000; break; } var inFile = rfn; var outFile = filename; using (var reader = new AudioFileReader(inFile)) { var resampler = new WdlResamplingSampleProvider(reader, outRate); WaveFileWriter.CreateWaveFile16(outFile, resampler); } MessageBox.Show("Файл сохранен!"); this.Close(); } else { System.Windows.Forms.MessageBox.Show("Не выбран путь сохранения!", "Ошибка", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
/// <summary> /// demo function fo reading and saving sound files /// </summary> /// <param name="testWavFile"></param> /// <param name="testOutWavFile"></param> public static void ReadAndWriteSound(string testWavFile, string testOutWavFile) { string testFile = testWavFile; if (!File.Exists(testFile)) { Program.LogError("No file " + testFile); return; } using (var reader = new AudioFileReader(testFile)) { Program.LogInfo("TotalTime:" + reader.TotalTime); Program.LogInfo("Length:" + reader.Length); Program.LogInfo("Volume:" + reader.Volume); var waveFormat = reader.WaveFormat; Program.LogInfo("SampleRate:" + waveFormat.SampleRate); Program.LogInfo("AverageBytesPerSecond:" + waveFormat.AverageBytesPerSecond); Program.LogInfo("BitsPerSample:" + waveFormat.BitsPerSample); Program.LogInfo("BlockAlign:" + waveFormat.BlockAlign); Program.LogInfo("Channels:" + waveFormat.Channels); Program.LogInfo("Encoding:" + waveFormat.Encoding); Program.LogInfo("Values per second:" + reader.Length / reader.TotalTime.TotalSeconds); var resampler = new WdlResamplingSampleProvider(reader, 44100); var wp = new SampleToWaveProvider(reader); var rdr = wp.ToSampleProvider(); var wf2 = rdr.WaveFormat; Program.LogInfo("SampleRate:" + wf2.SampleRate); Program.LogInfo("AverageBytesPerSecond:" + wf2.AverageBytesPerSecond); Program.LogInfo("BitsPerSample:" + wf2.BitsPerSample); Program.LogInfo("BlockAlign:" + wf2.BlockAlign); Program.LogInfo("Channels:" + wf2.Channels); Program.LogInfo("Encoding:" + wf2.Encoding); var wf = WaveFormat.CreateIeeeFloatWaveFormat(rdr.WaveFormat.SampleRate, rdr.WaveFormat.Channels); using (WaveFileWriter writer = new WaveFileWriter(testOutWavFile, wf)) { var buf = new float[rdr.WaveFormat.SampleRate]; while (true) { int read = rdr.Read(buf, 0, buf.Length); if (read > 0) { writer.WriteSamples(buf, 0, read); } else { break; } } } } }
//Upsampling (wav -> wav) public void upsampler(String x, String y) { Console.WriteLine("UPSAMPLER START"); using (var reader = new AudioFileReader(x)) { var resampler = new WdlResamplingSampleProvider(reader, outRate); WaveFileWriter.CreateWaveFile16(path + y + ".wav", resampler); } Console.WriteLine("UPSAMPLER END"); }
private static void ConvertMp3ToWav(string inPath, string outPath) { //mp3 file is already mono using (var mp3 = new AudioFileReader(inPath)) { //resample to 32kHz var resampler = new WdlResamplingSampleProvider(mp3, 32000); // and store as 16 bit WaveFileWriter.CreateWaveFile16(outPath, resampler); } }
internal WdlResampling16Stream([NotNull] WaveStream sourceStream, int sampleRate) { _sourceStream = sourceStream; _sourceSampleProvider = sourceStream.ToSampleProvider(); _sampleProvider = new WdlResamplingSampleProvider(_sourceSampleProvider, sampleRate); // WdlResamplingSampleProvider always outputs IEEE float samples, so a 16-bit PCM wrapper is required. // See the source code of WdlResamplingSampleProvider in NAudio. // The wrapping technique is adapted from WaveFileWriter.CreateWaveFile16(), inspired from // Mark Heath's (creator of NAudio) article: http://markheath.net/post/fully-managed-input-driven-resampling-wdl. _to16Provider = new SampleToWaveProvider16(_sampleProvider); }
public MixedAudioProvider(params NAudioProvider[] AudioProviders) { foreach (var provider in AudioProviders) { var bufferedProvider = new BufferedWaveProvider(provider.NAudioWaveFormat) { DiscardOnBufferOverflow = true, ReadFully = false }; provider.WaveIn.DataAvailable += (S, E) => { bufferedProvider.AddSamples(E.Buffer, 0, E.BytesRecorded); }; var sampleProvider = bufferedProvider.ToSampleProvider(); var providerWf = provider.WaveFormat; // Mono to Stereo if (providerWf.Channels == 1) { sampleProvider = sampleProvider.ToStereo(); } // Resample if (providerWf.SampleRate != WaveFormat.SampleRate) { sampleProvider = new WdlResamplingSampleProvider(sampleProvider, WaveFormat.SampleRate); } _audioProviders.Add(provider, sampleProvider); } if (_audioProviders.Count == 1) { _mixingWaveProvider = _audioProviders .Values .First() .ToWaveProvider16(); } else { var waveProviders = _audioProviders.Values.Select(M => M.ToWaveProvider()); // MixingSampleProvider cannot be used here due to it removing inputs that don't return as many bytes as requested. // Screna expects 44.1 kHz 16-bit Stereo _mixingWaveProvider = new MixingWaveProvider32(waveProviders) .ToSampleProvider() .ToWaveProvider16(); } }
private void AudioOutput_SelectedIndexChanged(object sender, EventArgs e) { if (output != null && output.PlaybackState != PlaybackState.Stopped) { output.Pause(); } output = new WasapiOut(outputs[audioOutputSelector.SelectedIndex], AudioClientShareMode.Shared, true, outputLatency); StoreSetting("output_device", audioOutputSelector.SelectedIndex.ToString()); Logger.WriteLine("Set outputDevice to: " + audioOutputSelector.SelectedIndex.ToString()); bitsPrSample = output.OutputWaveFormat.BitsPerSample; sampleRate = output.OutputWaveFormat.SampleRate; channels = output.OutputWaveFormat.Channels; // Set the WaveFormat outputFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels); // (Re)Setup the mixer and buffers if (outputBufferTimer != null) { outputBufferTimer.Stop(); } outputBuffer = new BufferedWaveProvider(internalFormatStereo); outputBuffer.ReadFully = true; outputBuffer.DiscardOnBufferOverflow = true; WdlResamplingSampleProvider resampler = new WdlResamplingSampleProvider(outputBuffer.ToSampleProvider(), outputFormat.SampleRate); outputMeter = new MeteringSampleProvider(resampler, samplesPrIntegration); outputMeter.StreamVolume += (a, b) => RunOutputMeter(a, b, meter); SetOutputMode(outputMode); output.Init(outputMeter); output.Play(); outputBufferTimer = new System.Windows.Forms.Timer(); outputBufferTimer.Interval = mainServerInterval; outputBufferTimer.Tick += outputBufferTimerCheck; outputBufferTimer.Start(); Logger.WriteLine("SET OUTPUT FORMAT: " + "Sample Rate: " + sampleRate + ", BitsPrSasmple: " + bitsPrSample + ", Channels: " + channels); }
//改变音频采样率 public static string change(string inFile) { int outRate = 44100; //44.1kHz string outFile = Path.ChangeExtension(Path.GetTempFileName(), ".mp3"); using (var reader = new AudioFileReader(inFile)) { var resampler = new WdlResamplingSampleProvider(reader, outRate); WaveFileWriter.CreateWaveFile16(outFile, resampler); } return(outFile); }
public static ISampleProvider ConvertSampleFormat(ISampleProvider source, WaveFormat dFormat) { // resample, if required if (source.WaveFormat.SampleRate != dFormat.SampleRate) { source = new WdlResamplingSampleProvider(source, dFormat.SampleRate); } // adjust channel count, if required if (source.WaveFormat.Channels != dFormat.Channels) { source = SFXUtilities.AdjustChannelCount(source, (uint)dFormat.Channels); } return(source); }
public void CanResampleUpAndDown(int from, int to) { var channels = 1; var offset = CreateSignalGenerator(@from, channels); var resampler = new WdlResamplingSampleProvider(offset, to); //string fileName = "From {0}" //WaveFileWriter.CreateWaveFile16(; var buffer = new float[to * channels]; Debug.WriteLine(String.Format("From {0} to {1}", from, to)); for (int n = 0; n < 10; n++) { var read = resampler.Read(buffer, 0, buffer.Length); Debug.WriteLine(String.Format("read {0}", read)); } }
private byte[] convert32bitFloat48000HzStereoPCMTo16bitMonoPCM_Alpha(WaveInEventArgs e, int sampleRate) { byte[] recorded_buf = e.Buffer; int recorded_length = e.BytesRecorded; byte[] result_buf = null; int result_len = -1; try { //// 生データを再生可能なデータに変換 var waveBufferResample = new BufferedWaveProvider(this._WaveIn.WaveFormat); waveBufferResample.DiscardOnBufferOverflow = true; waveBufferResample.ReadFully = false; // leave a buffer? waveBufferResample.BufferLength = recorded_length; var sampleStream = new WaveToSampleProvider(waveBufferResample); // Downsample var resamplingProvider = new WdlResamplingSampleProvider(sampleStream, sampleRate); // Stereo to mono var monoProvider = new StereoToMonoSampleProvider(resamplingProvider) { LeftVolume = 1f, RightVolume = 1f }; // Convert to 32bit float to 16bit PCM var ieeeToPcm = new SampleToWaveProvider16(monoProvider); var depthConvertProvider = new WaveFormatConversionProvider(new WaveFormat(sampleRate, 8, 1), ieeeToPcm); var depthConvertProviderRev = new WaveFormatConversionProvider(new WaveFormat(sampleRate, 16, 1), depthConvertProvider); waveBufferResample.AddSamples(recorded_buf, 0, recorded_length); result_len = recorded_length / (2 * (48000 / sampleRate) * 2); // depth conv and sampling and ch conv result_buf = new byte[result_len]; depthConvertProviderRev.Read(result_buf, 0, result_len); } catch (Exception ex) { Console.WriteLine(ex); Console.WriteLine("exit..."); System.Windows.Forms.Application.Exit(); } return(result_buf); }
public MixedAudioProvider(IEnumerable <NAudioProvider> audioProviders) { foreach (var provider in audioProviders) { var bufferedProvider = new BufferedWaveProvider(provider.NAudioWaveFormat); provider.DataAvailable += (sender, e) => { bufferedProvider.AddSamples(e.Buffer, 0, e.Length); }; var sampleProvider = bufferedProvider.ToSampleProvider(); var providerWf = provider.WaveFormat; // Mono to Stereo if (providerWf.Channels == 1) { sampleProvider = sampleProvider.ToStereo(); } // Resample if (providerWf.SampleRate != WaveFormat.SampleRate) { sampleProvider = new WdlResamplingSampleProvider(sampleProvider, WaveFormat.SampleRate); } _audioProviders.Add(provider, sampleProvider); } var mixingSampleProvider = new MixingSampleProvider(_audioProviders.Values); // Screna expects 44.1 kHz 16-bit Stereo _mixingWaveProvider = mixingSampleProvider.ToWaveProvider16(); var bufferSize = (int) ( (ReadInterval / 1000.0) * WaveFormat.SampleRate * WaveFormat.Channels * (WaveFormat.BitsPerSample / 8.0) ); _buffer = new byte[bufferSize]; Task.Factory.StartNew(Loop, TaskCreationOptions.LongRunning); }