/// <summary> /// Stops playback of the current wave sound. /// Forces the reader and WaveOutEvents to be disposed of immediately, rather than waiting for the Playbackstopped event. /// </summary> public void Stop() { if (waveOut.PlaybackState.Equals(PlaybackState.Playing)) { waveOut.Stop(); waveOut.Dispose(); reader.Dispose(); } }
private void RenderWaveform() { if (reader != null) { reader.Close(); reader.Dispose(); } GenerateWindow(); waveRender.SamplesPerPixel = 2; waveRender.AutoScaleDimensions = new SizeF(); reader = new WaveFileReader(tempOutputFilePath); waveRender.WaveStream = reader; }
private void DisposeWave() { if (waveOut != null) { if (waveOut.PlaybackState == PlaybackState.Playing) { waveOut.Stop(); } waveOut.Dispose(); waveOut = null; } if (waveReader != null) { waveReader.Flush(); waveReader.Dispose(); waveReader = null; } if (waveWriter != null) { waveWriter.Flush(); waveWriter.Dispose(); waveWriter = null; } if (axWindowsMediaPlayer1.IsDisposed != true) { axWindowsMediaPlayer1.URL = null; axWindowsMediaPlayer1.currentPlaylist.clear(); //axWindowsMediaPlayer1.Dispose(); //axWindowsMediaPlayer1.Visible = true; AxWMPLib.AxWindowsMediaPlayer myMediaPlayer = new AxWMPLib.AxWindowsMediaPlayer(); this.Controls.Add(myMediaPlayer); } }
private static void Output_PlaybackStopped(object sender, StoppedEventArgs e) { output.Stop(); output.Dispose(); output.Dispose(); wav.Dispose(); }
private void ReleaseResource() { if (_waveWriter != null) { _waveWriter.Close(); _waveWriter.Dispose(); _waveWriter = null; } if (_inputStream != null) { _inputStream.StopRecording(); _inputStream.DataAvailable -= InputStreamOnDataAvailable; _inputStream.Dispose(); _inputStream = null; } if (_waveOut != null) { _waveOut.Stop(); _waveOut.PlaybackStopped -= WaveOutOnPlaybackStopped; _waveOut.Dispose(); _waveOut = null; } if (_waveChanel != null) { _waveChanel.Dispose(); } if (_waveReader != null) { _waveReader.Dispose(); } }
/// <summary> /// 把兩個單音軌的檔案 合併為一個雙聲道的檔案 /// 詳細說明 見ExampleCollection.cs /// </summary> /// <param name="inputWavFilePath1">wav檔1</param> /// <param name="inputWavFilePath2">wav檔2</param> /// <param name="outputFilePath">合併輸出到</param> public static void Merge_2FilesWith1Channel_To_OneFileWith2Channel (string inputWavFilePath1, string inputWavFilePath2, string outputFilePath) { var wav1Reader = new WaveFileReader(inputWavFilePath1); var wav2Reader = new WaveFileReader(inputWavFilePath2); List <IWaveProvider> inputs = new List <IWaveProvider>() { wav1Reader, wav2Reader }; MultiplexingWaveProvider waveProvider = new MultiplexingWaveProvider(inputs, 2); waveProvider.ConnectInputToOutput(0, 0); waveProvider.ConnectInputToOutput(1, 1); byte[] buffer = new byte[2 * wav1Reader.WaveFormat.SampleRate * wav1Reader.WaveFormat.Channels]; using (WaveFileWriter writer = new WaveFileWriter(outputFilePath, new WaveFormat(wav1Reader.WaveFormat.SampleRate, 16, 2)) ) { int bytesRead; while ((bytesRead = waveProvider.Read(buffer, 0, buffer.Length)) > 0) { writer.Write(buffer, 0, bytesRead); } } //reader也要dispose才不會咬住檔案 wav1Reader.Dispose(); wav2Reader.Dispose(); }
/// <summary> /// Read audio wavFile. /// </summary> /// <param name="wavFile">WAV File in each turn.</param> public void ReadAudio(string wavFile) { lock (this.BotReplyList) { this.BotReplyList.Clear(); this.indexActivityWithAudio = 0; } if (this.appsettings.PushStreamEnabled) { int readBytes; byte[] dataBuffer = new byte[MaxSizeOfTtsAudioInBytes]; WaveFileReader waveFileReader = new WaveFileReader(Path.Combine(this.appsettings.InputFolder, wavFile)); // Reading header bytes int headerBytes = waveFileReader.Read(dataBuffer, 0, WavHeaderSizeInBytes); while ((readBytes = waveFileReader.Read(dataBuffer, 0, BytesToRead)) > 0) { this.pushAudioInputStream.Write(dataBuffer, readBytes); } // When done, we forcibly write one second (32000 bytes) of silence // to the stream, forcing the speech recognition service to segment. Array.Clear(dataBuffer, 0, 32000); this.pushAudioInputStream.Write(dataBuffer, 32000); waveFileReader.Dispose(); } else { GlobalPullStream.FilePullStreamCallback.ReadFile(Path.Combine(this.appsettings.InputFolder, wavFile)); } }
void IStreamProvider.Dispose() { //memStream.Dispose(); sourceStream.Dispose(); Channel.Dispose(); Dispose(); }
/// <summary> /// Mixes two wav files to a single wav file. (Unused). /// </summary> public void mix() { window.lockForMixing(); WaveFileReader reader1 = new WaveFileReader(file1); WaveFileReader reader2 = new WaveFileReader(file2); int maxSampleRate = Math.Max(reader1.WaveFormat.SampleRate, reader2.WaveFormat.SampleRate); WaveFormat format = new WaveFormat(maxSampleRate, 1); MediaFoundationResampler resampler1 = new MediaFoundationResampler(reader1, format); var input1 = resampler1.ToSampleProvider(); MediaFoundationResampler resampler2 = new MediaFoundationResampler(reader2, format); var input2 = resampler2.ToSampleProvider(); ISampleProvider[] provider = { input1, input2 }; MixingSampleProvider mixer = new MixingSampleProvider(provider); WaveFileWriter.CreateWaveFile16(mixfile, mixer); resampler1.Dispose(); resampler2.Dispose(); reader1.Close(); reader2.Close(); reader1.Dispose(); reader2.Dispose(); window.unlock(); }
private void DisposeWave() { if (outputSound != null) { if (outputSound.PlaybackState == NAudio.Wave.PlaybackState.Playing) { outputSound.Stop(); } outputSound.Dispose(); outputSound = null; } if (waveReader != null) { waveReader.Dispose(); waveReader = null; } if (waveSource != null) { waveSource.Dispose(); waveSource = null; } if (waveFile != null) { waveFile.Dispose(); waveFile = null; } }
private byte[] GenerateMp3(Song song) { if (song.Notes == null) { return(null); } using (var generatedSongStream = new MemoryStream()) { var mp3File = new MemoryStream(); var waveFile = new WaveFile(22050, BitDepth.Bit16, 1, generatedSongStream); var sg = new SoundGenerator(waveFile); foreach (var note in song.Notes) { sg.AddSimpleTone(note.Frequency, note.Duration); } sg.Save(); generatedSongStream.Position = 0; var r = new WaveFileReader(generatedSongStream); var wr = new LameMP3FileWriter(mp3File, r.WaveFormat, 96); r.CopyTo(wr); wr.Flush(); var fileData = mp3File.ToArray(); mp3File.Dispose(); r.Dispose(); wr.Dispose(); return(fileData); } }
/// ------------------------------------------------------------------------------------ /// <summary> /// The input file must be audio. If it is not or the format cannot be determined, /// null is returned. /// </summary> /// ------------------------------------------------------------------------------------ public static WaveFormat GetNAudioWaveFormat(string audioFilePath) { WaveFileReader reader = null; try { if (!GetDoesFileSeemToBeWave(audioFilePath)) { return(null); } reader = new WaveFileReader(audioFilePath); return(reader.WaveFormat); } catch { } finally { if (reader != null) { reader.Close(); reader.Dispose(); } } return(null); }
private void Plot() { var m = new FileInfo(glTemp); if (!m.Exists) { Return(); return; } if (m.Length < 10) { Return(); return; } waveViewer1.Size = pictureBox1.Size; var wfr = new WaveFileReader(glTemp); int w = waveViewer1.Size.Width; waveViewer1.SamplesPerPixel = (int)Math.Ceiling(Convert.ToDouble(wfr.SampleCount) / w); waveViewer1.WaveStream = wfr; var bmp = new Bitmap(waveViewer1.Size.Width, waveViewer1.Size.Height); Rectangle r = new Rectangle(pictureBox1.Location, pictureBox1.Size); waveViewer1.DrawToBitmap(bmp, r); pictureBox1.Image = bmp; waveViewer1.WaveStream = null; wfr.Dispose(); }
public void Dispose() { if (WaveFileReader != null) { WaveFileReader.Dispose(); } WaveFileReader = null; if (WaveChannel != null) { WaveChannel.Dispose(); } WaveChannel = null; if (MemoryStream != null) { MemoryStream.Dispose(); } MemoryStream = null; if (DirectSoundOut != null) { DirectSoundOut.Dispose(); } DirectSoundOut = null; }
private void btnPlayFFT_Click(object sender, EventArgs e) { WaveFormat wf = new WaveFormat(fft.samplingRate, 1); short[] data = new short[fft.getDataSize()]; for (int i = 0; i < fft.getDataSize(); i++) { data[i] = Convert.ToInt16(fft.X[i + 1].Real * 10); } using (WaveFileWriter wfw = new WaveFileWriter("temp.wav", wf)) { wfw.WriteSamples(data, 0, data.Length); } WaveOut wo = new WaveOut(); WaveFileReader wfr = new WaveFileReader("temp.wav"); wo.Init(wfr); wo.Play(); wo.PlaybackStopped += (a, b) => { wo.Stop(); wo.Dispose(); wfr.Dispose(); }; }
/// <summary> /// 來源: https://stackoverflow.com/questions/12075062/saving-each-wav-channel-as-a-mono-channel-wav-file-using-naudio/12149659#12149659 /// </summary> public static void SplitEachChannelToFiles() { var reader = new WaveFileReader(@"E:\Dropbox\WorkGrandsys\W\Workarea\20180511音檔 - 複製\1_8690002555DA7B59370000037\0_00001_0000364_0000837.wav"); var buffer = new byte[2 * reader.WaveFormat.SampleRate * reader.WaveFormat.Channels]; var writers = new WaveFileWriter[reader.WaveFormat.Channels]; for (int n = 0; n < writers.Length; n++) { var format = new WaveFormat(reader.WaveFormat.SampleRate, 16, 1); writers[n] = new WaveFileWriter(String.Format(@"E:\Dropbox\WorkGrandsys\W\Workarea\20180511音檔 - 複製\output\channel{0}.wav", n + 1), format); } int bytesRead; while ((bytesRead = reader.Read(buffer, 0, buffer.Length)) > 0) { int offset = 0; while (offset < bytesRead) { for (int n = 0; n < writers.Length; n++) { // write one sample writers[n].Write(buffer, offset, 2); offset += 2; } } } for (int n = 0; n < writers.Length; n++) { writers[n].Dispose(); } reader.Dispose(); }
public static List <float[]> Separate(WaveFileReader reader) { var output = new List <float[]>(); var buffer = new byte[2 * reader.WaveFormat.SampleRate * reader.WaveFormat.Channels]; var writers = new WaveFileWriter[reader.WaveFormat.Channels]; for (int n = 0; n < writers.Length; n++) { var format = new WaveFormat(reader.WaveFormat.SampleRate, 16, 1); writers[n] = new WaveFileWriter(String.Format("channel{0}.wav", n + 1), format); } int bytesRead; while ((bytesRead = reader.Read(buffer, 0, buffer.Length)) > 0) { int offset = 0; while (offset < bytesRead) { for (int n = 0; n < writers.Length; n++) { // write one sample writers[n].Write(buffer, offset, 2); offset += 2; } } } for (int n = 0; n < writers.Length; n++) { writers[n].Dispose(); } reader.Dispose(); return(output); }
protected void SplitTwoChannel(string FileName) { int bytesRead; var reader = new WaveFileReader(FileName); var buffer = new byte[2 * reader.WaveFormat.SampleRate * reader.WaveFormat.Channels]; var writers = new WaveFileWriter[reader.WaveFormat.Channels]; for (int n = 0; n < writers.Length; n++) { var format = new WaveFormat(reader.WaveFormat.SampleRate, 16, 1); writers[n] = new WaveFileWriter(GetFullPath(String.Format("channel{0}.wav", n + 1)), format); } while ((bytesRead = reader.Read(buffer, 0, buffer.Length)) > 0) { int offset = 0; while (offset < bytesRead) { for (int n = 0; n < writers.Length; n++) { writers[n].Write(buffer, offset, 2); offset += 2; } } } for (int n = 0; n < writers.Length; n++) { writers[n].Dispose(); } reader.Dispose(); }
private static void TrimWavFile(WaveFileReader reader, WaveFileWriter writer, int startPos, int endPos) { reader.Position = startPos; byte[] buffer = new byte[1024]; while (reader.Position < endPos) { int bytesRequired = (int)(endPos - reader.Position); if (bytesRequired > 0) { int bytesToRead = Math.Min(bytesRequired, buffer.Length); int bytesRead = reader.Read(buffer, 0, bytesToRead); if (bytesRead > 0) { writer.Write(buffer, 0, bytesRead); } } } if (reader != null) { reader.Dispose(); } if (writer != null) { writer.Dispose(); } }
public void TestAudio() { EventWaitHandle waitHandle = new AutoResetEvent(false); using (MemoryStream stream = new MemoryStream()) using (SpeechSynthesizer synth = new SpeechSynthesizer()) { synth.SetOutputToWaveStream(stream); synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s><audio src=\"C:\\Users\\jgm\\Desktop\\positive.wav\"/>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈlaʊ.təns\">Luyten's</phoneme> <phoneme alphabet=\"ipa\" ph=\"stɑː\">Star</phoneme> system.</s></speak>"); stream.Seek(0, SeekOrigin.Begin); IWaveSource source = new WaveFileReader(stream); var soundOut = new WasapiOut(); soundOut.Stopped += (s, e) => waitHandle.Set(); soundOut.Initialize(source); soundOut.Play(); waitHandle.WaitOne(); soundOut.Dispose(); source.Dispose(); } }
private bool ProcessPairWaveForm(WaveformGenerationParams argument) { bool isCancel = false; WaveFileReader forwardStream = new WaveFileReader(new MemoryStream(argument.Path)); WaveFileReader backStream = new WaveFileReader(new MemoryStream(argument.BackPath)); WaveChannel32 forwardChannel = new WaveChannel32(forwardStream); WaveChannel32 backChannel = new WaveChannel32(backStream); backChannel.Sample += waveStream_Sample; forwardChannel.Sample += waveStream_Sample; long frameLength = 2 * backChannel.Length / argument.Points; frameLength = frameLength - frameLength % backChannel.WaveFormat.BlockAlign; waveformAggregator = new SampleAggregator((int)(frameLength / backChannel.WaveFormat.BlockAlign)); float[] numArray = new float[argument.Points]; byte[] buffer = new byte[frameLength]; int factPointsCount = argument.Points / 2; for (int i = 0; i < factPointsCount; i++) { backChannel.Read(buffer, 0, buffer.Length); numArray[i * 2] = waveformAggregator.LeftMaxVolume * verticalScale; forwardChannel.Read(buffer, 0, buffer.Length); numArray[i * 2 + 1] = waveformAggregator.LeftMaxVolume * verticalScale; if (this.waveformGenerateWorker.CancellationPending) { isCancel = true; break; } } float[] finalClonedData = (float[])numArray.Clone(); Application.Current.Dispatcher.Invoke(new Action(() => this.WaveformData = finalClonedData)); forwardChannel.Close(); forwardChannel.Dispose(); forwardChannel = null; backChannel.Close(); backChannel.Dispose(); backChannel = null; forwardStream.Close(); forwardStream.Dispose(); forwardStream = null; backStream.Close(); backStream.Dispose(); backStream = null; return(isCancel); }
private void DisposeWavePlay() { if (output != null) { if (output.PlaybackState == PlaybackState.Playing) { output.Stop(); } output.Dispose(); output = null; } if (wave != null) { wave.Dispose(); wave = null; } if (wavechannel != null) { wavechannel.Dispose(); wavechannel = null; } if (waveSignal != null) { waveSignal.Dispose(); waveSignal = null; } }
public void TestPhonemes() { EventWaitHandle waitHandle = new AutoResetEvent(false); using (MemoryStream stream = new MemoryStream()) using (SpeechSynthesizer synth = new SpeechSynthesizer()) { synth.SetOutputToWaveStream(stream); //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"UTF-8\"?><speak version=\"1.0\" xmlns=\"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>This is your <phoneme alphabet=\"ipa\" ph=\"leɪkɒn\">Lakon</phoneme>.</s></speak>"); //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈdɛltə\">delta</phoneme> system.</s></speak>"); synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈlaʊ.təns\">Luyten's</phoneme> <phoneme alphabet=\"ipa\" ph=\"stɑː\">Star</phoneme> system.</s></speak>"); //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"bliːiː\">Bleae</phoneme> <phoneme alphabet=\"ipa\" ph=\"θuːə\">Thua</phoneme> system.</s></speak>"); //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the Amnemoi system.</s></speak>"); //synth.Speak("You are travelling to the Barnard's Star system."); stream.Seek(0, SeekOrigin.Begin); IWaveSource source = new WaveFileReader(stream); var soundOut = new WasapiOut(); soundOut.Stopped += (s, e) => waitHandle.Set(); soundOut.Initialize(source); soundOut.Play(); waitHandle.WaitOne(); soundOut.Dispose(); source.Dispose(); } }
public void TestPhonemes() { EventWaitHandle waitHandle = new AutoResetEvent(false); using (MemoryStream stream = new MemoryStream()) using (SpeechSynthesizer synth = new SpeechSynthesizer()) { synth.SetOutputToWaveStream(stream); synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈprəʊˌsɪən\">Procyon</phoneme> system.</s></speak>"); //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈkaɪə\">Kaia</phoneme> <phoneme alphabet=\"ipa\" ph=\"ˈbɑːhɑːhɑː\">Bajaja</phoneme> system.</s></speak>"); //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the Amnemoi system.</s></speak>"); //synth.Speak("Anemoi"); stream.Seek(0, SeekOrigin.Begin); IWaveSource source = new WaveFileReader(stream); var soundOut = new WasapiOut(); soundOut.Stopped += (s, e) => waitHandle.Set(); soundOut.Initialize(source); soundOut.Play(); waitHandle.WaitOne(); soundOut.Dispose(); source.Dispose(); } }
public void TestDistortion() { EventWaitHandle waitHandle = new AutoResetEvent(false); using (MemoryStream stream = new MemoryStream()) using (SpeechSynthesizer synth = new SpeechSynthesizer()) { foreach (InstalledVoice voice in synth.GetInstalledVoices()) { Console.WriteLine(voice.VoiceInfo.Name); } synth.SetOutputToWaveStream(stream); synth.Speak("Anaconda golf foxtrot lima one niner six eight requesting docking."); stream.Seek(0, SeekOrigin.Begin); IWaveSource source = new WaveFileReader(stream); DmoDistortionEffect distortedSource = new DmoDistortionEffect(source); distortedSource.Edge = 10; distortedSource.PreLowpassCutoff = 4800; var soundOut = new WasapiOut(); soundOut.Stopped += (s, e) => waitHandle.Set(); soundOut.Initialize(distortedSource); soundOut.Play(); waitHandle.WaitOne(); soundOut.Dispose(); distortedSource.Dispose(); source.Dispose(); } }
//Create a disposable Sound Player void PlaySound() { using (WaveOutEvent tempWave = new WaveOutEvent()) { tempWave.DeviceNumber = outputDeviceID; //Set the Output Device WaveFileReader reader = null; reader = new WaveFileReader(filepath); WaveChannel32 inputStream = new WaveChannel32(reader); inputStream.PadWithZeroes = false; tempWave.Init(reader); tempWave.Volume = volume; tempWave.Play(); while (tempWave.PlaybackState != PlaybackState.Stopped) { //Wait and continue when finished if (stop) { tempWave.Stop(); } } AudioMixer.RemoveSpeaker(this); //Remove Speaker from lists reader.Dispose(); //Dispose reader tempWave.Dispose(); //Dispose wave if (deleteFile) //Delete file if deleteFile is true { File.Delete(filepath); //Delete used and created file } } }
public void Dispose() { if (_channels != null) { _channels.Dispose(); _reader.Dispose(); } }
private TimeSpan GetAudioFileTime(string filePath) { WaveFileReader reader = new WaveFileReader(filePath); TimeSpan duration = reader.TotalTime; reader.Dispose(); return(duration); }
public void Dispose() { if (Channel != null) { Channel.Dispose(); Reader.Dispose(); } }
private void ReplayButton_Click(object sender, EventArgs e) { soundTestForm helper = new soundTestForm(); WaveStream record_strem = new WaveFileReader(selectdFileName()); helper.Playrecording(record_strem); record_strem.Dispose(); }