private ISampleProvider CreateSampleChain(WaveStream sourceStream) { //loop is implemented as a wave stream _loopingProvider = new LoopingWaveStream(sourceStream); _loopingProvider.EnableLooping = _loop; var outputSample = _loopingProvider.ToSampleProvider(); //fade-in/fade-out _fadingProvider = new FadeInOutStopSampleProvider(outputSample); outputSample = _fadingProvider; //panning (will convert to mono) if (PanningEnabled) { var monoProvider = new StereoToMonoSampleProvider(outputSample); _panningProvider = new PanningSampleProvider(monoProvider); _panningProvider.PanStrategy = new StereoBalanceStrategy(); _panningProvider.Pan = Pan; outputSample = _panningProvider; } //volume _volumeProvider = new VolumeSampleProvider(outputSample); _volumeProvider.Volume = _volume; outputSample = _volumeProvider; return(outputSample); }
// function that converts an audio file into mono 8-bit PCM, at the current sampling rate private void formatAudio(String filePath, float audioAmplify, int samplingRate) { // convert to mono var ir = new AudioFileReader(filePath); try { var mono = new StereoToMonoSampleProvider(ir); mono.LeftVolume = 0.0f; mono.RightVolume = audioAmplify; var resampler = new WdlResamplingSampleProvider(mono, samplingRate); WaveFileWriter.CreateWaveFile16("temp.wav", resampler); ir.Close(); } catch (System.ArgumentException) { var resampler = new WdlResamplingSampleProvider(ir, samplingRate); WaveFileWriter.CreateWaveFile16("temp.wav", resampler); ir.Close(); } //File.Delete("temp.wav"); }
/// <summary> /// Convert a stereo WAV file to mono. /// </summary> /// <param name="stereoFilePath">The path to stereo wav file.</param> /// <param name="channels">The channels of input file to use in mono output (default: Both).</param> /// <returns>The path to created mono file</returns> public static string StereoToMono(string stereoFilePath, MonoOutChannels channels = MonoOutChannels.Both) { if (Path.GetExtension(stereoFilePath).ToLower() != ".wav") { throw new AudioException("The input file must be a WAV audio file."); } using (var inputReader = new AudioFileReader(stereoFilePath)) { var mono = new StereoToMonoSampleProvider(inputReader); switch (channels) { case MonoOutChannels.Both: mono.LeftVolume = 0.5f; mono.RightVolume = 0.5f; break; case MonoOutChannels.Right: mono.LeftVolume = 0f; mono.RightVolume = 1f; break; case MonoOutChannels.Left: mono.LeftVolume = 1f; mono.RightVolume = 0f; break; } string monoOutDir = Path.Combine(Path.GetDirectoryName(stereoFilePath), "mono");//string.Format("{0}/mono",Path.GetDirectoryName(stereoFilePath)); if (!Directory.Exists(monoOutDir)) { Directory.CreateDirectory(monoOutDir); } string monoFile = string.Format("{0}/{1}.wav", monoOutDir, Path.GetFileNameWithoutExtension(stereoFilePath)); WaveFileWriter.CreateWaveFile16(monoFile, mono); return(monoFile); } }
/// <summary> /// Пункт конвертации в моно /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void конвертацияВМоноToolStripMenuItem_Click(object sender, EventArgs e) { StopAud(); SaveFileDialog save = new SaveFileDialog(); save.InitialDirectory = "C:\\"; save.Filter = "wav files (*.wav)|*.wav"; save.FilterIndex = 1; save.Title = "Сохранить файл"; if (save.ShowDialog() == DialogResult.Cancel) { return; } string filename = save.FileName; if (filename != null) { using (var inputReader = new AudioFileReader(rfname)) { var mono = new StereoToMonoSampleProvider(inputReader); WaveFileWriter.CreateWaveFile16(filename, mono); } MessageBox.Show("Файл сохранен!"); } else { System.Windows.Forms.MessageBox.Show("Не выбран путь сохранения!", "Ошибка", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
private static IWaveProvider CreateWaveConverter(WaveFormat format, WaveStream stream) { if (stream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) { try { return(new WaveFormatConversionStream(format, stream)); } catch (Exception exc) { // This happens for example with floating point wave files. logger.Warn(exc, "'WaveFormatConversionStream' failed. Attempting to use sample provider conversion now."); } } // Try going through the sample provider and then remix/resample manually if necessary... ISampleProvider sampleProvider = stream.ToSampleProvider(); if (sampleProvider.WaveFormat.Channels > 1) { sampleProvider = new StereoToMonoSampleProvider(sampleProvider); } if (sampleProvider.WaveFormat.SampleRate != format.SampleRate) { sampleProvider = new WdlResamplingSampleProvider(sampleProvider, format.SampleRate); } return(sampleProvider.ToWaveProvider16()); }
private void PlaySound() { StartRecordingButton.IsEnabled = false; if (outputDevice == null) { StereoToMonoSampleProvider toMonoSampleProvider; outputDevice = new WaveOutEvent(); try { toMonoSampleProvider = new StereoToMonoSampleProvider(audioFileReader); volumeProvider = new VolumeSampleProvider(toMonoSampleProvider); } catch (ArgumentException) { volumeProvider = new VolumeSampleProvider(audioFileReader); } panningProvider = new PanningSampleProvider(volumeProvider); panningProvider.Pan = (float)(PanningSlider.Value / 100.0f); volumeProvider.Volume = (float)(VolumeSlider.Value / 100.0f); //TestLabel.Content = panningProvider.Pan; outputDevice.Init(panningProvider); } if (outputDevice != null) { outputDevice.Play(); } }
/// <summary> /// Resample source into 16 bit WAV mono output with the target sampling rate. /// Output stream includes modified RIFF header. /// </summary> /// <param name="sourceStream"></param> /// <param name="targetSampleRate"></param> /// <param name="targetChannels"></param> /// <returns></returns> public static MemoryStream Resample(MemoryStream sourceStream, int targetSampleRate) { /*Read from the wav file's contents using stream */ using (var inputReader = new WaveFileReader(sourceStream)) { int sourceChannels = inputReader.WaveFormat.Channels; WdlResamplingSampleProvider resampler; /*Stereo source. Must convert to mono with StereoToMonoSampleProvider */ if (sourceChannels == 2) { var monoSampleProvider = new StereoToMonoSampleProvider(inputReader.ToSampleProvider()); resampler = new WdlResamplingSampleProvider(monoSampleProvider, targetSampleRate); } else { resampler = new WdlResamplingSampleProvider(inputReader.ToSampleProvider(), targetSampleRate); } MemoryStream outStream = new MemoryStream(); /*Ensure that header has correct RIFF format with data appended after header.*/ WaveFileWriter.WriteWavFileToStream(outStream, resampler.ToWaveProvider16()); outStream.Position = 0; return(outStream); } }
public static float[] GetSamples(WaveStream waveStream) { ISampleProvider provider = waveStream.ToSampleProvider(); if (provider.WaveFormat.SampleRate != 44100) { provider = new WdlResamplingSampleProvider(provider, 44100); } if (provider.WaveFormat.Channels > 1) { provider = new StereoToMonoSampleProvider(provider) { LeftVolume = 1, RightVolume = 0, }; } List <float> samples = new List <float>(); float[] buffer = new float[128 * 1024]; int n; while ((n = provider.Read(buffer, 0, buffer.Length)) > 0) { samples.AddRange(buffer.Take(n)); } return(samples.ToArray()); }
public static ulong[] GetFingerprint(string filename) { using var data = new AudioFileReader(filename); ISampleProvider mono; if (data.WaveFormat.Channels == 2) { var monoProvider = new StereoToMonoSampleProvider(data) { LeftVolume = 1.0f, RightVolume = 1.0f }; mono = monoProvider.ToMono(); } else { mono = data; } var buffer = new float[ChunkSize]; var transformed = new List <Complex[]>(); var m = (int)Math.Log(ChunkSize, 2.0); while (true) { var read = mono.Read(buffer, 0, ChunkSize); if (read == 0) { break; } var complex = new Complex[ChunkSize]; for (var i = 0; i < read; i++) { complex[i].X = buffer[i]; complex[i].Y = 0; } FastFourierTransform.FFT(false, m, complex); transformed.Add(complex); } var keyPoints = new List <ulong>(); foreach (var item in transformed) { keyPoints.Add(GetKeyPoints(item)); } return(keyPoints.ToArray()); }
private void ProcessAudioFile(string inputFilePath, string outputFilePath) { using var reader = new AudioFileReader(inputFilePath); var mono = new StereoToMonoSampleProvider(reader); var outputFormat = new WaveFormat(OutputRate, mono.WaveFormat.Channels); using var resampler = new MediaFoundationResampler(mono.ToWaveProvider(), outputFormat); WaveFileWriter.CreateWaveFile16(outputFilePath, resampler.ToSampleProvider()); }
private void convertToMonoChannelAudio(string dualChanelAudio, string monoChannelAudio) { using (var inputReader = new AudioFileReader(dualChanelAudio)) { var mono = new StereoToMonoSampleProvider(inputReader); mono.LeftVolume = 0.0f; mono.RightVolume = 1.0f; WaveFileWriter.CreateWaveFile16(monoChannelAudio, mono); } File.Delete(dualChanelAudio); }
public void StereoToMono() { var inPath = $"{Stereo}/{CurrentFile}"; var outPath = $"{Raw}/{CurrentFile}"; using (var inputReader = new AudioFileReader(inPath)) { // convert our stereo ISampleProvider to mono var mono = new StereoToMonoSampleProvider(inputReader); mono.LeftVolume = 1.0f; // discard the left channel mono.RightVolume = 0.0f; // keep the right channel // ... OR ... could write the mono audio out to a WAV file WaveFileWriter.CreateWaveFile16(outPath, mono); } }
private void rightVolumeToolStripMenuItem_Click(object sender, EventArgs e) { try { if (foundationReader != null) { StereoToMonoSampleProvider toMono = new StereoToMonoSampleProvider(foundationReader.ToSampleProvider()); toMono.LeftVolume = 0; toMono.RightVolume = 1.0f; outputDevice.Stop(); outputDevice.Init(toMono); outputDevice.Play(); } } catch { } }
private byte[] convert32bitFloat48000HzStereoPCMTo16bitMonoPCM_Alpha(WaveInEventArgs e, int sampleRate) { byte[] recorded_buf = e.Buffer; int recorded_length = e.BytesRecorded; byte[] result_buf = null; int result_len = -1; try { //// 生データを再生可能なデータに変換 var waveBufferResample = new BufferedWaveProvider(this._WaveIn.WaveFormat); waveBufferResample.DiscardOnBufferOverflow = true; waveBufferResample.ReadFully = false; // leave a buffer? waveBufferResample.BufferLength = recorded_length; var sampleStream = new WaveToSampleProvider(waveBufferResample); // Downsample var resamplingProvider = new WdlResamplingSampleProvider(sampleStream, sampleRate); // Stereo to mono var monoProvider = new StereoToMonoSampleProvider(resamplingProvider) { LeftVolume = 1f, RightVolume = 1f }; // Convert to 32bit float to 16bit PCM var ieeeToPcm = new SampleToWaveProvider16(monoProvider); var depthConvertProvider = new WaveFormatConversionProvider(new WaveFormat(sampleRate, 8, 1), ieeeToPcm); var depthConvertProviderRev = new WaveFormatConversionProvider(new WaveFormat(sampleRate, 16, 1), depthConvertProvider); waveBufferResample.AddSamples(recorded_buf, 0, recorded_length); result_len = recorded_length / (2 * (48000 / sampleRate) * 2); // depth conv and sampling and ch conv result_buf = new byte[result_len]; depthConvertProviderRev.Read(result_buf, 0, result_len); } catch (Exception ex) { Console.WriteLine(ex); Console.WriteLine("exit..."); System.Windows.Forms.Application.Exit(); } return(result_buf); }
public static byte[] Resample(this WaveFormat waveFormat, byte[] data, WaveFormat outputFormat) { if (waveFormat.Equals(outputFormat)) { return(data); } if (outputFormat.Encoding != WaveFormatEncoding.IeeeFloat) { throw new NotSupportedException("Only float supported."); } if (outputFormat.Channels > 1) { throw new NotSupportedException("Only mono supported."); } var sampleProvider = waveFormat.GetSampleProvider(data); if (waveFormat.Channels > 1) { sampleProvider = new StereoToMonoSampleProvider(sampleProvider); } var numSamples = waveFormat.SampleRate; var buffer = new float[numSamples]; var samples = new List <float>(); var samplesRead = sampleProvider.Read(buffer, 0, buffer.Length); while (samplesRead > 0) { var floats = samplesRead == buffer.Length ? buffer : buffer.Take(samplesRead).ToArray(); samples.AddRange(floats); samplesRead = sampleProvider.Read(buffer, 0, buffer.Length); } buffer = samples.Resampled(waveFormat.SampleRate, outputFormat.SampleRate); var output = new byte[buffer.Length * sizeof(float)]; var waveBuffer = new WaveBuffer(output); for (var i = 0; i < buffer.Length; i++) { waveBuffer.FloatBuffer[i] = buffer[i]; } return(output); }
private byte[] ConvertToWav(byte[] file) { var originalFileStream = new MemoryStream(file); var outputStream = new MemoryStream(); using (var waveStream = WaveFormatConversionStream.CreatePcmStream(new Mp3FileReader(originalFileStream))) { var sample = waveStream.ToSampleProvider(); var mono = new StereoToMonoSampleProvider(sample); mono.LeftVolume = 0.5f; mono.RightVolume = 0.5f; var bitSample = new SampleToWaveProvider16(mono); WaveFileWriter.WriteWavFileToStream(outputStream, bitSample); } return(outputStream.ToArray()); }
/// <summary> /// Converts between Stereo and Mono SampleProviders for 32 bit sampled audio /// </summary> /// <param name="input">The input 32 bit SampleProvider</param> /// <param name="toMono">True for mono audio, falses for stereo</param> /// <returns></returns> public ISampleProvider MonoStereoConvert32(ISampleProvider input, bool toMono) { if (toMono && input.WaveFormat.Channels != 1) { var stmp = new StereoToMonoSampleProvider(input); return(stmp); } else if (!toMono && input.WaveFormat.Channels != 2) { var mtsp = new MonoToStereoSampleProvider(input); mtsp.LeftVolume = 0.7f; mtsp.RightVolume = 0.7f; //0.7 on each to avoid double loud return(mtsp); } else { return(input); } }
/// <summary> /// Converts from 32-bit Ieee Floating-point format to MuLaw 8khz 8-bit 1 channel. /// Used for WasapiCapture and WasapiLoopbackCapture. /// </summary> /// <param name="audio">The raw audio stream.</param> /// <param name="inputFormat">The input format.</param> public MuLawResamplerProvider(byte[] stream, WaveFormat inputFormat) { // Root buffer provider. waveBuffer = new BufferedWaveProvider(inputFormat); waveBuffer.DiscardOnBufferOverflow = false; waveBuffer.ReadFully = false; waveBuffer.AddSamples(stream, 0, stream.Length); var sampleStream = new WaveToSampleProvider(waveBuffer); // Stereo to mono filter. var monoStream = new StereoToMonoSampleProvider(sampleStream) { LeftVolume = 2.0f, RightVolume = 2.0f }; // Downsample to 8000 filter. var resamplingProvider = new WdlResamplingSampleProvider(monoStream, 8000); // Convert to 16-bit in order to use ACM or MuLaw tools. ieeeToPcm = new SampleToWaveProvider16(resamplingProvider); sourceBuffer = new byte[ieeeToPcm.WaveFormat.AverageBytesPerSecond]; }
static void Convert( string inputPath, out string intermediateChannelFormatPath, ChannelFormat?channelFormat, string outputDirectory, string outputFileName) { if (!channelFormat.HasValue) { intermediateChannelFormatPath = inputPath; return; } using (var reader = new AudioFileReader(inputPath)) { var intermediateChannelFormatFileName = $"{outputFileName}-intermediate-channelformat.wav"; intermediateChannelFormatPath = Path.Combine(outputDirectory, intermediateChannelFormatFileName); if (reader.WaveFormat.Channels == 2 && channelFormat.Value == ChannelFormat.Mono) { var resampler = new StereoToMonoSampleProvider(reader) { LeftVolume = 0, RightVolume = 1 }; WaveFileWriter.CreateWaveFile16(intermediateChannelFormatPath, resampler); } else if (reader.WaveFormat.Channels == 1 && channelFormat.Value == ChannelFormat.Stereo) { var resampler = new MonoToStereoSampleProvider(reader) { LeftVolume = 0.5f, RightVolume = 0.5f }; WaveFileWriter.CreateWaveFile16(intermediateChannelFormatPath, resampler); } else { // Do nothing, already same channel format File.Copy(inputPath, intermediateChannelFormatPath, true); } } }
/// <summary> /// Converts data in Wav file into the specified format and reads data section of file (removes header) into AudioData buffer. /// </summary> /// <param name="originalFile" ></param> /// <param name="sampleRate"></param> private void ProcessWavFile(FileInfo originalFile) { byte[] temp = File.ReadAllBytes(AudioFile.FullName); using (MemoryStream stream = new MemoryStream(temp)) { int channels; /*Read from the wav file's contents using stream */ using (var inputReader = new WaveFileReader(stream)) { channels = inputReader.WaveFormat.Channels; WdlResamplingSampleProvider resampler; /*Stereo source. Must convert to mono with StereoToMonoSampleProvider */ if (channels == 2) { var monoSampleProvider = new StereoToMonoSampleProvider(inputReader.ToSampleProvider()); resampler = new WdlResamplingSampleProvider(monoSampleProvider, REQ_SAMPLE_RATE); } else { resampler = new WdlResamplingSampleProvider(inputReader.ToSampleProvider(), REQ_SAMPLE_RATE); } /*Write converted audio to overwrite the original wav file */ WaveFileWriter.CreateWaveFile16(AudioFile.FullName, resampler); } using (WaveFileReader reader = new WaveFileReader(AudioFile.FullName)) { AudioData = new byte[reader.Length]; reader.Read(AudioData, 0, (int)reader.Length); } } }
/// <summary> /// This method initializes audio capture and socket connection. /// </summary> /// <param name="debugMode">enable or disable the debug mode.</param> public async Task InitializeAsync(bool debugMode = true) { if ((!string.IsNullOrEmpty(DeviceId)) && ((device = DeviceManagement.GetDevice(DeviceId)) != null)) { if (DeviceType == DeviceType.Loopback) { capture = new WasapiLoopbackCapture(device); } else if (DeviceType == DeviceType.Microphone) { capture = new WasapiCapture(device); } } else { if (DeviceType == DeviceType.Loopback) { capture = new WasapiLoopbackCapture(); } else if (DeviceType == DeviceType.Microphone) { capture = new WasapiCapture(); } } if (debugMode) { var culture = CultureInfo.InvariantCulture; var inFilePath = Path.Combine(Directory.GetCurrentDirectory(), $"{ConversationId}-{culture.TextInfo.ToLower(DeviceType.ToString())}-raw.wav"); inFileWriter = new WaveFileWriter(inFilePath, capture.WaveFormat); var outFilePath = Path.Combine(Directory.GetCurrentDirectory(), $"{ConversationId}-{culture.TextInfo.ToLower(DeviceType.ToString())}-out.wav"); outFileWriter = new WaveFileWriter(outFilePath, outFormat); DisplayWaveFormat(capture.WaveFormat); Console.WriteLine(); } audioIeee = new StreamSampleProvider(capture.WaveFormat); audioMono = new StereoToMonoSampleProvider(audioIeee); audioResampling = new WdlResamplingSampleProvider(audioMono, 16000); audioPcm = new SampleToWaveProvider16(audioResampling); try { socket = new ClientWebSocket(); socket.Options.SetRequestHeader("ConversationId", ConversationId); socket.Options.SetRequestHeader("SpeakerType", DeviceToSpeakerConverter.Convert(DeviceType).ToString()); await socket.ConnectAsync(SocketUri, CancellationToken.None).ConfigureAwait(false); if (socket.State == WebSocketState.Open) { Console.WriteLine($"Successfully connected to {SocketUri}."); } } catch (AggregateException e) { Console.WriteLine($"Failed to connect to {SocketUri}."); Console.WriteLine(e.Message); throw; } catch (Exception ex) { Console.WriteLine($"Exception : {ex.Message}"); throw; } capture.DataAvailable += async(s, a) => { Console.WriteLine($"Captured {a.BytesRecorded} bytes on {DeviceType}."); if (socket.State == WebSocketState.Open) { var data = ResampleAudioInput(a.Buffer, a.BytesRecorded, debugMode); try { await socket.SendAsync(data, WebSocketMessageType.Binary, false, CancellationToken.None).ConfigureAwait(false); } catch (AggregateException ex) { Console.WriteLine($"Exception on SendAsync: {ex.Message}"); } } }; capture.RecordingStopped += async(s, a) => { Console.WriteLine($"Recording stopped on {DeviceType}."); if (socket.State == WebSocketState.Open) { try { await socket.CloseAsync(WebSocketCloseStatus.NormalClosure, "Recording Stopped", CancellationToken.None).ConfigureAwait(false); } catch (AggregateException ex) { Console.WriteLine($"Exception on CloseAsync: {ex.Message}"); } Console.WriteLine($"Connection closed."); taskCompletionSource.SetResult(0); } }; }