private static void DecodeAudio(string filename) { FFmpegReader reader = new FFmpegReader(filename, Type.Audio); Console.WriteLine("length {0}, frame_size {1}, sample_rate {2}, sample_size {3}, channels {4}", reader.AudioOutputConfig.length, reader.AudioOutputConfig.frame_size, reader.AudioOutputConfig.format.sample_rate, reader.AudioOutputConfig.format.sample_size, reader.AudioOutputConfig.format.channels); int sampleBlockSize = reader.AudioOutputConfig.format.channels * reader.AudioOutputConfig.format.sample_size; int output_buffer_size = reader.AudioOutputConfig.frame_size * reader.AudioOutputConfig.format.channels * reader.AudioOutputConfig.format.sample_size; byte[] output_buffer = new byte[output_buffer_size]; int samplesRead; long timestamp; Type type; MemoryStream ms = new MemoryStream(); // read full stream while ((samplesRead = reader.ReadFrame(out timestamp, output_buffer, output_buffer_size, out type)) > 0) { Console.WriteLine("read " + samplesRead + " @ " + timestamp); // read samples into memory int bytesRead = samplesRead * sampleBlockSize; ms.Write(output_buffer, 0, bytesRead); } // seek back to start reader.Seek(0, Type.Audio); // read again (output should be the same as above) while ((samplesRead = reader.ReadFrame(out timestamp, output_buffer, output_buffer_size, out type)) > 0) { Console.WriteLine("read " + samplesRead + " @ " + timestamp); } reader.Dispose(); // write memory to wav file ms.Position = 0; MemorySourceStream mss = new MemorySourceStream(ms, new AudioProperties( reader.AudioOutputConfig.format.channels, reader.AudioOutputConfig.format.sample_rate, reader.AudioOutputConfig.format.sample_size * 8, reader.AudioOutputConfig.format.sample_size == 4 ? AudioFormat.IEEE : AudioFormat.LPCM)); IeeeStream ieee = new IeeeStream(mss); NAudioSinkStream nAudioSink = new NAudioSinkStream(ieee); WaveFileWriter.CreateWaveFile(filename + ".ffmpeg.wav", nAudioSink); }
public static IAudioStream PrepareStream(IAudioStream stream, int sampleRate) { if (stream.Properties.Format != AudioFormat.IEEE) { stream = new IeeeStream(stream); } if (stream.Properties.Channels > 1) { stream = new MonoStream(stream); } if (stream.Properties.SampleRate != 11050) { stream = new ResamplingStream(stream, ResamplingQuality.Medium, sampleRate); } return(stream); }
private void button1_Click(object sender, RoutedEventArgs e) { Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".wav"; dlg.Filter = "Wave files|*.wav"; if (dlg.ShowDialog() == true) { var stream = new IeeeStream(new NAudioSourceStream(new WaveFileReader(dlg.FileName))); mixer.Clear(); mixer.Add(stream); lblFile.Content = dlg.FileName; fileSampleRateLabel.Content = stream.Properties.SampleRate; sliderSampleRate.Value = stream.Properties.SampleRate; } }
private void button1_Click(object sender, RoutedEventArgs e) { Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".wav"; dlg.Filter = "Wave files|*.wav"; if (dlg.ShowDialog() == true) { WaveFileReader reader = new WaveFileReader(dlg.FileName); NAudioSourceStream nAudioSource = new NAudioSourceStream(reader); IeeeStream ieee = new IeeeStream(nAudioSource); MonoStream mono = new MonoStream(ieee); ResamplingStream res = new ResamplingStream(mono, ResamplingQuality.Medium, 22050); NAudioSinkStream nAudioSink = new NAudioSinkStream(res); WaveFileWriter.CreateWaveFile(dlg.FileName + ".processed.wav", nAudioSink); } }
private void btnPlay_Click(object sender, RoutedEventArgs e) { if (wavePlayer != null) { wavePlayer.Dispose(); } debugStreamController = new DebugStreamController(); MixerStream mixer = new MixerStream(2, 44100); foreach (AudioTrack audioTrack in trackListBox.Items) { WaveFileReader reader = new WaveFileReader(audioTrack.FileInfo.FullName); IeeeStream channel = new IeeeStream(new DebugStream(new NAudioSourceStream(reader), debugStreamController)); //ResamplingStream res = new ResamplingStream(new DebugStream(channel, debugStreamController), ResamplingQuality.SincBest, 22050); TimeWarpStream warp = new TimeWarpStream(new DebugStream(channel, debugStreamController)); //warp.Mappings.Add(new TimeWarp { From = new TimeSpan(audioTrack.Length.Ticks / 10 * 4), To = new TimeSpan(audioTrack.Length.Ticks / 9) }); //warp.Mappings.Add(new TimeWarp { From = new TimeSpan(audioTrack.Length.Ticks / 10 * 5), To = new TimeSpan(audioTrack.Length.Ticks / 9 * 2) }); //warp.Mappings.Add(new TimeWarp { From = new TimeSpan(audioTrack.Length.Ticks / 10 * 10), To = new TimeSpan(audioTrack.Length.Ticks / 9 * 3) }); // necessary to control each track individually VolumeControlStream volumeControl = new VolumeControlStream(new DebugStream(warp, debugStreamController)) { Mute = audioTrack.Mute, Volume = audioTrack.Volume }; // when the AudioTrack.Mute property changes, just set it accordingly on the audio stream audioTrack.MuteChanged += new EventHandler <ValueEventArgs <bool> >( delegate(object vsender, ValueEventArgs <bool> ve) { volumeControl.Mute = ve.Value; }); // when the AudioTrack.Solo property changes, we have to react in different ways: audioTrack.SoloChanged += new EventHandler <ValueEventArgs <bool> >( delegate(object vsender, ValueEventArgs <bool> ve) { AudioTrack senderTrack = (AudioTrack)vsender; bool isOtherTrackSoloed = false; foreach (AudioTrack vaudioTrack in trackListBox.Items) { if (vaudioTrack != senderTrack && vaudioTrack.Solo) { isOtherTrackSoloed = true; break; } } /* if there's at least one other track that is soloed, we set the mute property of * the current track to the opposite of the solo property: * - if the track is soloed, we unmute it * - if the track is unsoloed, we mute it */ if (isOtherTrackSoloed) { senderTrack.Mute = !ve.Value; } /* if this is the only soloed track, we mute all other tracks * if this track just got unsoloed, we unmute all other tracks */ else { foreach (AudioTrack vaudioTrack in trackListBox.Items) { if (vaudioTrack != senderTrack && !vaudioTrack.Solo) { vaudioTrack.Mute = ve.Value; } } } }); // when the AudioTrack.Volume property changes, just set it accordingly on the audio stream audioTrack.VolumeChanged += new EventHandler <ValueEventArgs <float> >( delegate(object vsender, ValueEventArgs <float> ve) { volumeControl.Volume = ve.Value; }); mixer.Add(new DebugStream(volumeControl)); } VolumeControlStream volumeControlStream = new VolumeControlStream(new DebugStream(mixer, debugStreamController)) { Volume = (float)volumeSlider.Value }; VolumeMeteringStream volumeMeteringStream = new VolumeMeteringStream(new DebugStream(volumeControlStream, debugStreamController), 5000); volumeMeteringStream.StreamVolume += new EventHandler <StreamVolumeEventArgs>(meteringStream_StreamVolume); VolumeClipStream volumeClipStream = new VolumeClipStream(new DebugStream(volumeMeteringStream, debugStreamController)); playbackStream = volumeClipStream; wavePlayer = new WaveOut(); wavePlayer.DesiredLatency = 250; wavePlayer.Init(new NAudioSinkStream(new DebugStream(playbackStream, debugStreamController))); // master volume setting volumeSlider.ValueChanged += new RoutedPropertyChangedEventHandler <double>( delegate(object vsender, RoutedPropertyChangedEventArgs <double> ve) { volumeControlStream.Volume = (float)ve.NewValue; }); lblTotalPlaybackTime.Content = TimeUtil.BytesToTimeSpan(playbackStream.Length, playbackStream.Properties); playbackSeeker.Maximum = TimeUtil.BytesToTimeSpan(playbackStream.Length, playbackStream.Properties).TotalSeconds; wavePlayer.Play(); }