void Play() { if (soundOut == null || soundOut.PlaybackState == PlaybackState.Stopped) { MakeLabels(listView1.Items[now].SubItems[2].Text); soundSource = SoundSource(listView1.Items[now].SubItems[2].Text); var source = soundSource .ChangeSampleRate(32000) .ToSampleSource() .AppendSource(Equalizer.Create10BandEqualizer, out equalizer) .ToWaveSource(); soundOut = SoundOut(); soundOut.Initialize(source); float vol = tbar_volume.Value * 0.1f; soundOut.Volume = vol; soundOut.Play(); Play_Aux(); listView1.Items[now].BackColor = Color.SkyBlue; listView1.Items[now].ForeColor = Color.DarkSlateGray; soundOut.Stopped += Play_Aux_2; } else if (soundOut.PlaybackState == PlaybackState.Paused) { soundOut.Resume(); timer_ctime.Enabled = true; } btn_Play.Visible = false; btn_pause.Visible = true; }
public IWaveSource MixAudioAndVoice(IWaveSource audio, out VolumeSource vol1, IWaveSource voice, out VolumeSource vol2) { mixer = new Mixer.SimpleMixer(2, mixerSampleRate) //output: stereo, 44,1kHz { FillWithZeros = true, DivideResult = false //you may play around with this }; mixer.AddSource(audio .ChangeSampleRate(mixerSampleRate) .ToStereo() .AppendSource(x => new VolumeSource(x.ToSampleSource()), out vol1)); mixer.AddSource(voice .ChangeSampleRate(mixerSampleRate) .ToStereo() .AppendSource(x => new VolumeSource(x.ToSampleSource()), out vol2)); return(mixer.ToWaveSource()); }
/// <summary> /// Method Converts IWaveSource .Mp4 to .Mp3 with 192kbs Sample Rate and Saves it to Path Using the videoTitle /// </summary> /// <param name="source">Takes a IWaveSource videoSource = CSCore.Codecs.CodecFactory.Instance.GetCodec(new Uri(video.Uri))</param> /// <param name="videoTitle"> Takes the Video Title</param> /// <returns></returns> private static bool ConvertToMp3(IWaveSource source, string videoTitle) { var supportedFormats = MediaFoundationEncoder.GetEncoderMediaTypes(AudioSubTypes.MpegLayer3); if (!supportedFormats.Any()) { Console.WriteLine("The current platform does not support mp3 encoding."); return(true); } if (supportedFormats.All( x => x.SampleRate != source.WaveFormat.SampleRate && x.Channels == source.WaveFormat.Channels)) { int sampleRate = supportedFormats.OrderBy(x => Math.Abs(source.WaveFormat.SampleRate - x.SampleRate)) .First(x => x.Channels == source.WaveFormat.Channels) .SampleRate; Console.WriteLine("Samplerate {0} -> {1}", source.WaveFormat.SampleRate, sampleRate); Console.WriteLine("Channels {0} -> {1}", source.WaveFormat.Channels, 2); source = source.ChangeSampleRate(sampleRate); } using (source) { using (var encoder = MediaFoundationEncoder.CreateMP3Encoder(source.WaveFormat, PATH + videoTitle + ".mp3")) { byte[] buffer = new byte[source.WaveFormat.BytesPerSecond]; int read; while ((read = source.Read(buffer, 0, buffer.Length)) > 0) { encoder.Write(buffer, 0, read); //Console.CursorLeft = 0; //Console.Write("{0:P}/{1:P}", (double)source.Position / source.Length, 1); } } } File.Delete(PATH + videoTitle + ".mp4"); return(false); }
static void Main(string[] args) { Console.WriteLine( "This example will mix one audio file with \n" + "two sine waves (300Hz and 700Hz).\n" + "The 300Hz sine wave will play only on the right\n" + "channel and the 700Hz sine wave only on the left channel." + "\n\n\nPlease select the audio file!\n" ); IWaveSource fileWaveSource = null; do { OpenFileDialog openFileDialog = new OpenFileDialog() { Title = "Select any file to mix into", Filter = CodecFactory.SupportedFilesFilterEn }; if (openFileDialog.ShowDialog() == DialogResult.OK) { try { fileWaveSource = CodecFactory.Instance.GetCodec(openFileDialog.FileName); } catch { } } } while (fileWaveSource == null); const int mixerSampleRate = 44100; //44.1kHz var mixer = new SimpleMixer(2, mixerSampleRate) //output: stereo, 44,1kHz { FillWithZeros = true, DivideResult = true //you may play around with this }; var monoToLeftOnlyChannelMatrix = new ChannelMatrix(ChannelMask.SpeakerFrontCenter, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight); var monoToRightOnlyChannelMatrix = new ChannelMatrix(ChannelMask.SpeakerFrontCenter, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight); /* * Set the channel conversion matrix. * The y-axis specifies the input. This in only one channel since the SineGenerator only uses one channel. * The x-axis specifies the output. There we have to use two channels since we want stereo output. * The first value on the x-axis specifies the volume of the left channel, the second value * on the x-axis specifies the volume of the right channel. * * If we take look at the left only channel conversion matrix, we can see that we are mapping one channel (y-axis) * to two channels (x-axis). The left channel receives a volume of 1.0 (which means 100%) and the right channel * receives a volume of 0.0 (which means 0.0% -> muted). */ monoToLeftOnlyChannelMatrix.SetMatrix( new[, ] { { 1.0f, 0.0f } }); monoToRightOnlyChannelMatrix.SetMatrix( new[, ] { { 0.0f, 1.0f } }); VolumeSource volumeSource1, volumeSource2; //Add any sound track. mixer.AddSource( fileWaveSource .ChangeSampleRate(mixerSampleRate) .ToStereo() .ToSampleSource()); //Add a 700Hz sine with a amplitude of 0.5 which plays only on the left channel. mixer.AddSource( new SineGenerator(700, 0.5, 0).ToWaveSource() .AppendSource(x => new DmoChannelResampler(x, monoToLeftOnlyChannelMatrix, mixerSampleRate)) .AppendSource(x => new VolumeSource(x.ToSampleSource()), out volumeSource1)); //Add a 300Hz sine with a amplitude of 0.5 which plays only on the right channel. mixer.AddSource( new SineGenerator(300, 0.5, 0).ToWaveSource() .AppendSource(x => new DmoChannelResampler(x, monoToRightOnlyChannelMatrix, mixerSampleRate)) .AppendSource(x => new VolumeSource(x.ToSampleSource()), out volumeSource2)); //Initialize the soundout with the mixer. var soundOut = new WasapiOut() { Latency = 200 }; //better use a quite high latency soundOut.Initialize(mixer.ToWaveSource()); soundOut.Play(); //adjust the volume of the input signals (default value is 100%): volumeSource1.Volume = 0.5f; //set the volume of the 700Hz sine to 50% volumeSource2.Volume = 0.7f; //set the volume of the 300Hz sine to 70% Console.ReadKey(); mixer.Dispose(); soundOut.Dispose(); }
public async Task<bool> OpenTrack(IPlaySource track, bool openCrossfading, long position) { IsLoading = true; if (!openCrossfading) StopPlayback(); if (_crossfadeService.IsFading) _crossfadeService.Cancel(); if (_soundSource != null && !openCrossfading) _soundSource.Dispose(); if (openCrossfading && _soundSource != null) { _soundOut.Stopped -= SoundOut_Stopped; _loopStream.StreamFinished -= LoopStream_StreamFinished; _simpleNotificationSource.BlockRead -= SimpleNotificationSource_BlockRead; _crossfadeService.CrossfadeOut(_soundOut, CrossfadeDuration).Forget(); _soundOut = null; } var tempSource = await GetSoundSource(track, position); if (tempSource == null) return false; _soundSource = tempSource; if (_soundSource.WaveFormat.SampleRate < 44100) //Correct sample rate _soundSource = _soundSource.ChangeSampleRate(44100); _soundSource = _soundSource .AppendSource(x => new LoopStream(x), out _loopStream) .AppendSource(x => Equalizer.Create10BandEqualizer(x.ToSampleSource()), out _equalizer) .AppendSource(x => new SimpleNotificationSource(x) {Interval = 100}, out _simpleNotificationSource) .ToWaveSource(); _loopStream.EnableLoop = IsLooping; _loopStream.StreamFinished += LoopStream_StreamFinished; _simpleNotificationSource.BlockRead += SimpleNotificationSource_BlockRead; for (var i = 0; i < EqualizerBands.Count; i++) SetEqualizerBandValue(EqualizerBands.Bands[i].Value, i); if (_soundOut == null) { _soundOut = _soundOutProvider.GetSoundOut(); _soundOut.Stopped += SoundOut_Stopped; } _soundOut.Initialize(_soundSource); _soundOut.Volume = Volume; IsLoading = false; OnTrackLengthChanged(); _playTimeStopwatch.Reset(); if (openCrossfading) { await TogglePlayPause(); _fadingService.FadeIn(_soundOut, Volume).Forget(); } CurrentStateChanged(); OnPositionChanged(); return true; }
public async Task <bool> OpenTrack(IPlaySource track, bool openCrossfading, long position) { IsLoading = true; if (!openCrossfading) { StopPlayback(); } if (_crossfadeService.IsFading) { _crossfadeService.Cancel(); } if (_soundSource != null && !openCrossfading) { _soundSource.Dispose(); } if (openCrossfading && _soundSource != null) { _soundOut.Stopped -= SoundOut_Stopped; _loopStream.StreamFinished -= LoopStream_StreamFinished; _simpleNotificationSource.BlockRead -= SimpleNotificationSource_BlockRead; _crossfadeService.CrossfadeOut(_soundOut, CrossfadeDuration).Forget(); _soundOut = null; } var tempSource = await GetSoundSource(track, position); if (tempSource == null) { return(false); } _soundSource = tempSource; if (_soundSource.WaveFormat.SampleRate < 44100) //Correct sample rate { _soundSource = _soundSource.ChangeSampleRate(44100); } _soundSource = _soundSource .AppendSource(x => new LoopStream(x), out _loopStream) .AppendSource(x => Equalizer.Create10BandEqualizer(x.ToSampleSource()), out _equalizer) .AppendSource(x => new SimpleNotificationSource(x) { Interval = 100 }, out _simpleNotificationSource) .ToWaveSource(); _loopStream.EnableLoop = IsLooping; _loopStream.StreamFinished += LoopStream_StreamFinished; _simpleNotificationSource.BlockRead += SimpleNotificationSource_BlockRead; for (var i = 0; i < EqualizerBands.Count; i++) { SetEqualizerBandValue(EqualizerBands.Bands[i].Value, i); } if (_soundOut == null) { _soundOut = _soundOutProvider.GetSoundOut(); _soundOut.Stopped += SoundOut_Stopped; } _soundOut.Initialize(_soundSource); _soundOut.Volume = Volume; IsLoading = false; OnTrackLengthChanged(); _playTimeStopwatch.Reset(); if (openCrossfading) { await TogglePlayPause(); _fadingService.FadeIn(_soundOut, Volume).Forget(); } CurrentStateChanged(); OnPositionChanged(); return(true); }