示例#1
0
        private void button1_Click(object sender, RoutedEventArgs e)
        {
            Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog();
            dlg.DefaultExt = ".wav";
            dlg.Filter     = "Wave files|*.wav";

            if (dlg.ShowDialog() == true)
            {
                var stream = new IeeeStream(new NAudioSourceStream(new WaveFileReader(dlg.FileName)));
                mixer.Clear();
                mixer.Add(stream);
                lblFile.Content = dlg.FileName;

                fileSampleRateLabel.Content = stream.Properties.SampleRate;
                sliderSampleRate.Value      = stream.Properties.SampleRate;
            }
        }
示例#2
0
        static void Process(Dictionary <string, double> mapping, DirectoryInfo indir, DirectoryInfo outdir)
        {
            Dictionary <FileInfo, double> fileMapping = new Dictionary <FileInfo, double>();

            foreach (string fileNamePattern in mapping.Keys)
            {
                double factor = mapping[fileNamePattern];
                foreach (FileInfo fileInfo in indir.EnumerateFiles(fileNamePattern))
                {
                    fileMapping.Add(fileInfo, factor);
                }
            }

            Parallel.ForEach <FileInfo>(fileMapping.Keys, (fileInfo) => {
                double factor           = fileMapping[fileInfo];
                FileInfo outputFileInfo = new FileInfo(Path.Combine(outdir.FullName, fileInfo.Name));

                if (outputFileInfo.Exists)
                {
                    Console.WriteLine(fileInfo.Name + " SKIP (file already existing)");
                    return;
                }

                Console.WriteLine(fileInfo.Name);
                try
                {
                    IAudioStream inputStream          = AudioStreamFactory.FromFileInfoIeee32(fileInfo);
                    IAudioStream resamplingStream     = new ResamplingStream(inputStream, ResamplingQuality.VeryHigh, factor);
                    MixerStream sampleRateResetStream = new MixerStream(resamplingStream.Properties.Channels, inputStream.Properties.SampleRate);
                    sampleRateResetStream.Add(resamplingStream);

                    IAudioStream outputStream = sampleRateResetStream;

                    AudioStreamFactory.WriteToFile(outputStream, outputFileInfo.FullName);
                }
                catch (Exception e)
                {
                    Console.WriteLine("Error processing " + fileInfo.Name + ": " + e.Message);
                }
            });
        }
示例#3
0
        private void btnPlay_Click(object sender, RoutedEventArgs e)
        {
            if (wavePlayer != null)
            {
                wavePlayer.Dispose();
            }
            debugStreamController = new DebugStreamController();

            MixerStream mixer = new MixerStream(2, 44100);

            foreach (AudioTrack audioTrack in trackListBox.Items)
            {
                WaveFileReader reader  = new WaveFileReader(audioTrack.FileInfo.FullName);
                IeeeStream     channel = new IeeeStream(new DebugStream(new NAudioSourceStream(reader), debugStreamController));
                //ResamplingStream res = new ResamplingStream(new DebugStream(channel, debugStreamController), ResamplingQuality.SincBest, 22050);

                TimeWarpStream warp = new TimeWarpStream(new DebugStream(channel, debugStreamController));
                //warp.Mappings.Add(new TimeWarp { From = new TimeSpan(audioTrack.Length.Ticks / 10 * 4), To = new TimeSpan(audioTrack.Length.Ticks / 9) });
                //warp.Mappings.Add(new TimeWarp { From = new TimeSpan(audioTrack.Length.Ticks / 10 * 5), To = new TimeSpan(audioTrack.Length.Ticks / 9 * 2) });
                //warp.Mappings.Add(new TimeWarp { From = new TimeSpan(audioTrack.Length.Ticks / 10 * 10), To = new TimeSpan(audioTrack.Length.Ticks / 9 * 3) });

                // necessary to control each track individually
                VolumeControlStream volumeControl = new VolumeControlStream(new DebugStream(warp, debugStreamController))
                {
                    Mute   = audioTrack.Mute,
                    Volume = audioTrack.Volume
                };

                // when the AudioTrack.Mute property changes, just set it accordingly on the audio stream
                audioTrack.MuteChanged += new EventHandler <ValueEventArgs <bool> >(
                    delegate(object vsender, ValueEventArgs <bool> ve) {
                    volumeControl.Mute = ve.Value;
                });

                // when the AudioTrack.Solo property changes, we have to react in different ways:
                audioTrack.SoloChanged += new EventHandler <ValueEventArgs <bool> >(
                    delegate(object vsender, ValueEventArgs <bool> ve) {
                    AudioTrack senderTrack  = (AudioTrack)vsender;
                    bool isOtherTrackSoloed = false;

                    foreach (AudioTrack vaudioTrack in trackListBox.Items)
                    {
                        if (vaudioTrack != senderTrack && vaudioTrack.Solo)
                        {
                            isOtherTrackSoloed = true;
                            break;
                        }
                    }

                    /* if there's at least one other track that is soloed, we set the mute property of
                     * the current track to the opposite of the solo property:
                     * - if the track is soloed, we unmute it
                     * - if the track is unsoloed, we mute it
                     */
                    if (isOtherTrackSoloed)
                    {
                        senderTrack.Mute = !ve.Value;
                    }

                    /* if this is the only soloed track, we mute all other tracks
                     * if this track just got unsoloed, we unmute all other tracks
                     */
                    else
                    {
                        foreach (AudioTrack vaudioTrack in trackListBox.Items)
                        {
                            if (vaudioTrack != senderTrack && !vaudioTrack.Solo)
                            {
                                vaudioTrack.Mute = ve.Value;
                            }
                        }
                    }
                });

                // when the AudioTrack.Volume property changes, just set it accordingly on the audio stream
                audioTrack.VolumeChanged += new EventHandler <ValueEventArgs <float> >(
                    delegate(object vsender, ValueEventArgs <float> ve) {
                    volumeControl.Volume = ve.Value;
                });

                mixer.Add(new DebugStream(volumeControl));
            }

            VolumeControlStream volumeControlStream = new VolumeControlStream(new DebugStream(mixer, debugStreamController))
            {
                Volume = (float)volumeSlider.Value
            };
            VolumeMeteringStream volumeMeteringStream = new VolumeMeteringStream(new DebugStream(volumeControlStream, debugStreamController), 5000);

            volumeMeteringStream.StreamVolume += new EventHandler <StreamVolumeEventArgs>(meteringStream_StreamVolume);
            VolumeClipStream volumeClipStream = new VolumeClipStream(new DebugStream(volumeMeteringStream, debugStreamController));

            playbackStream = volumeClipStream;

            wavePlayer = new WaveOut();
            wavePlayer.DesiredLatency = 250;
            wavePlayer.Init(new NAudioSinkStream(new DebugStream(playbackStream, debugStreamController)));

            // master volume setting
            volumeSlider.ValueChanged += new RoutedPropertyChangedEventHandler <double>(
                delegate(object vsender, RoutedPropertyChangedEventArgs <double> ve) {
                volumeControlStream.Volume = (float)ve.NewValue;
            });

            lblTotalPlaybackTime.Content = TimeUtil.BytesToTimeSpan(playbackStream.Length, playbackStream.Properties);
            playbackSeeker.Maximum       = TimeUtil.BytesToTimeSpan(playbackStream.Length, playbackStream.Properties).TotalSeconds;

            wavePlayer.Play();
        }
示例#4
0
        private void AddTrack(AudioTrack audioTrack)
        {
            if (audioTrack.SourceProperties.SampleRate > audioMixer.SampleRate)
            {
                // The newly added track has a higher samplerate than the current tracks, so we adjust
                // the processing samplerate to the highest rate
                ChangeMixingSampleRate(audioTrack.SourceProperties.SampleRate);
            }

            IAudioStream input        = audioTrack.CreateAudioStream();
            IAudioStream baseStream   = new TolerantStream(new BufferedStream(input, 1024 * 256 * input.SampleBlockSize, true));
            OffsetStream offsetStream = new OffsetStream(baseStream)
            {
                Offset = TimeUtil.TimeSpanToBytes(audioTrack.Offset, baseStream.Properties)
            };

            audioTrack.OffsetChanged += new EventHandler <ValueEventArgs <TimeSpan> >(
                delegate(object sender, ValueEventArgs <TimeSpan> e) {
                offsetStream.Offset = TimeUtil.TimeSpanToBytes(e.Value, offsetStream.Properties);
                audioMixer.UpdateLength();
            });

            // Upmix mono inputs to dual channel stereo or downmix surround to allow channel balancing
            // TODO add better multichannel stream support and allow balancing of surround
            IAudioStream mixToStereoStream = offsetStream;

            if (mixToStereoStream.Properties.Channels == 1)
            {
                mixToStereoStream = new MonoStream(mixToStereoStream, 2);
            }
            else if (mixToStereoStream.Properties.Channels > 2)
            {
                mixToStereoStream = new SurroundDownmixStream(mixToStereoStream);
            }

            // control the track phase
            PhaseInversionStream phaseInversion = new PhaseInversionStream(mixToStereoStream)
            {
                Invert = audioTrack.InvertedPhase
            };

            MonoStream monoStream = new MonoStream(phaseInversion, phaseInversion.Properties.Channels)
            {
                Downmix = audioTrack.MonoDownmix
            };

            // necessary to control each track individually
            VolumeControlStream volumeControl = new VolumeControlStream(monoStream)
            {
                Mute    = audioTrack.Mute,
                Volume  = audioTrack.Volume,
                Balance = audioTrack.Balance
            };

            // when the AudioTrack.Mute property changes, just set it accordingly on the audio stream
            audioTrack.MuteChanged += new EventHandler <ValueEventArgs <bool> >(
                delegate(object vsender, ValueEventArgs <bool> ve) {
                volumeControl.Mute = ve.Value;
            });

            // when the AudioTrack.Solo property changes, we have to react in different ways:
            audioTrack.SoloChanged += new EventHandler <ValueEventArgs <bool> >(
                delegate(object vsender, ValueEventArgs <bool> ve) {
                AudioTrack senderTrack  = (AudioTrack)vsender;
                bool isOtherTrackSoloed = false;

                foreach (AudioTrack vaudioTrack in trackList)
                {
                    if (vaudioTrack != senderTrack && vaudioTrack.Solo)
                    {
                        isOtherTrackSoloed = true;
                        break;
                    }
                }

                /* if there's at least one other track that is soloed, we set the mute property of
                 * the current track to the opposite of the solo property:
                 * - if the track is soloed, we unmute it
                 * - if the track is unsoloed, we mute it
                 */
                if (isOtherTrackSoloed)
                {
                    senderTrack.Mute = !ve.Value;
                }

                /* if this is the only soloed track, we mute all other tracks
                 * if this track just got unsoloed, we unmute all other tracks
                 */
                else
                {
                    foreach (AudioTrack vaudioTrack in trackList)
                    {
                        if (vaudioTrack != senderTrack && !vaudioTrack.Solo)
                        {
                            vaudioTrack.Mute = ve.Value;
                        }
                    }
                }
            });

            // when the AudioTrack.Volume property changes, just set it accordingly on the audio stream
            audioTrack.VolumeChanged += new EventHandler <ValueEventArgs <float> >(
                delegate(object vsender, ValueEventArgs <float> ve) {
                volumeControl.Volume = ve.Value;
            });

            audioTrack.BalanceChanged += new EventHandler <ValueEventArgs <float> >(
                delegate(object vsender, ValueEventArgs <float> ve) {
                volumeControl.Balance = ve.Value;
            });

            audioTrack.InvertedPhaseChanged += new EventHandler <ValueEventArgs <bool> >(
                delegate(object vsender, ValueEventArgs <bool> ve) {
                phaseInversion.Invert = ve.Value;
            });
            audioTrack.MonoDownmixChanged += new EventHandler <ValueEventArgs <bool> >(
                delegate(object vsender, ValueEventArgs <bool> ve) {
                monoStream.Downmix = ve.Value;
            });

            // adjust sample rate to mixer output rate
            ResamplingStream resamplingStream = new ResamplingStream(volumeControl,
                                                                     ResamplingQuality.Medium, audioMixer.Properties.SampleRate);

            IAudioStream trackStream = resamplingStream;

            if (trackStream.Properties.Channels == 1 && audioMixer.Properties.Channels > 1)
            {
                trackStream = new MonoStream(trackStream, audioMixer.Properties.Channels);
            }

            audioMixer.Add(trackStream);
            trackListStreams.Add(audioTrack, trackStream);
        }