static void Main(string[] args) { string model = null; string alphabet = null; string lm = null; string trie = null; string audio = null; if (args.Length > 0) { model = GetArgument(args, "--model"); alphabet = GetArgument(args, "--alphabet"); lm = GetArgument(args, "--lm"); trie = GetArgument(args, "--trie"); audio = GetArgument(args, "--audio"); } const uint N_CEP = 26; const uint N_CONTEXT = 9; const uint BEAM_WIDTH = 200; const float LM_ALPHA = 0.75f; const float LM_BETA = 1.85f; Stopwatch stopwatch = new Stopwatch(); using (DeepSpeech sttClient = new DeepSpeech()) { var result = 1; Console.WriteLine("Loading model..."); stopwatch.Start(); try { result = sttClient.CreateModel( model ?? "output_graph.pbmm", N_CEP, N_CONTEXT, alphabet ?? "alphabet.txt", BEAM_WIDTH); } catch (IOException ex) { Console.WriteLine("Error loading lm."); Console.WriteLine(ex.Message); } stopwatch.Stop(); if (result == 0) { Console.WriteLine($"Model loaded - {stopwatch.Elapsed.Milliseconds} ms"); stopwatch.Reset(); if (lm != null) { Console.WriteLine("Loadin LM..."); try { result = sttClient.EnableDecoderWithLM( alphabet ?? "alphabet.txt", lm ?? "lm.binary", trie ?? "trie", LM_ALPHA, LM_BETA); } catch (IOException ex) { Console.WriteLine("Error loading lm."); Console.WriteLine(ex.Message); } } string audioFile = audio ?? "arctic_a0024.wav"; var waveBuffer = new WaveBuffer(File.ReadAllBytes(audioFile)); using (var waveInfo = new WaveFileReader(audioFile)) { Console.WriteLine("Running inference...."); stopwatch.Start(); string speechResult = sttClient.SpeechToText(waveBuffer.ShortBuffer, Convert.ToUInt32(waveBuffer.MaxSize / 2), 16000); stopwatch.Stop(); Console.WriteLine($"Audio duration: {waveInfo.TotalTime.ToString()}"); Console.WriteLine($"Inference took: {stopwatch.Elapsed.ToString()}"); Console.WriteLine($"Recognized text: {speechResult}"); } waveBuffer.Clear(); } else { Console.WriteLine("Error loding the model."); } } }
private void DeleteWaveFile(WaveFileReader sou, long startPos, long endPos) { // tạo file temp WaveFileWriter temp = new WaveFileWriter("temp.wav", sou.WaveFormat); sou.Position = 0; var buffer = new byte[1024]; // lưu từ đầu đến star while (sou.Position < startPos) { var bytesRequired = (int)(startPos - sou.Position); if (bytesRequired <= 0) { continue; } var bytesToRead = Math.Min(bytesRequired, buffer.Length); var bytesRead = sou.Read(buffer, 0, bytesToRead); if (bytesRead > 0) { temp.Write(buffer, 0, bytesRead); } } // lưu tiêp từ end đên hết sou.Position = endPos; while (sou.Position < sou.Length) { var bytesRequired = (int)(sou.Length - sou.Position); if (bytesRequired <= 0) { continue; } var bytesToRead = Math.Min(bytesRequired, buffer.Length); var bytesRead = sou.Read(buffer, 0, bytesToRead); if (bytesRead > 0) { temp.Write(buffer, 0, bytesRead); } } // ghi đè lại sou temp.Dispose(); sou.Dispose(); if (sou.Equals(cwvNumber1.WaveStream)) { CopyWaveFile(fileName1, temp.Filename); Wave = new WaveFileReader(fileName1); cwvNumber1.WaveStream = wave; cwvNumber1.Painting(); cwvNumber1.FitToScreen(); cwvNumber1.WaveStream.Position = 0; lbMax.Text = wave.TotalTime.Minutes.ToString() + ":" + wave.TotalTime.Seconds.ToString(); lbCur.Text = "0 : 0"; } else if (sou.Equals(cwvNumber2.WaveStream)) { CopyWaveFile(fileName2, temp.Filename); Wave = new WaveFileReader(fileName2); cwvNumber2.WaveStream = wave; cwvNumber2.Painting(); cwvNumber2.FitToScreen(); cwvNumber2.WaveStream.Position = 0; lbMax.Text = wave.TotalTime.Minutes.ToString() + ":" + wave.TotalTime.Seconds.ToString(); lbCur.Text = "0 : 0"; } }
public void LoadSampleAsset(string assetName, string patchName, string directory) { string assetNameWithoutExtension; string extension; if (Path.HasExtension(assetName)) { assetNameWithoutExtension = Path.GetFileNameWithoutExtension(assetName); extension = Path.GetExtension(assetName).ToLower(); } else { assetNameWithoutExtension = assetName; assetName += ".wav"; //assume .wav extension = ".wav"; } if (FindSample(assetNameWithoutExtension) == null) { string waveAssetPath; if (CrossPlatformHelper.ResourceExists(assetName)) { waveAssetPath = assetName; //ex. "asset.wav" } else if (CrossPlatformHelper.ResourceExists(directory + Path.DirectorySeparatorChar + assetName)) { waveAssetPath = directory + Path.DirectorySeparatorChar + assetName; //ex. "C:\asset.wav" } else if (CrossPlatformHelper.ResourceExists(directory + "/SAMPLES/" + assetName)) { waveAssetPath = directory + "/SAMPLES/" + assetName; //ex. "C:\SAMPLES\asset.wav" } else if (CrossPlatformHelper.ResourceExists(directory + Path.DirectorySeparatorChar + patchName + Path.DirectorySeparatorChar + assetName)) { waveAssetPath = directory + Path.DirectorySeparatorChar + patchName + Path.DirectorySeparatorChar + assetName; //ex. "C:\Piano\asset.wav" } else { throw new IOException("Could not find sample asset: (" + assetName + ") required for patch: " + patchName); } using (BinaryReader reader = new BinaryReader(CrossPlatformHelper.OpenResource(waveAssetPath))) { switch (extension) { case ".wav": sampleAssets.Add(new SampleDataAsset(assetNameWithoutExtension, WaveFileReader.ReadWaveFile(reader))); break; } } } }
public static int Main(string[] args) { int sampleRate; string filename = null; SoundIoBackend backend = SoundIoBackend.None; string deviceId = null; bool isRaw = false; SoundIoError err; try { if (args.Length < 1) { Usage(); return(1); } for (int i = 0; i < args.Length; i++) { string arg = args[i]; if (arg.StartsWith("--")) { if (++i > args.Length) { return(Usage()); } else if (arg.CompareTo("--backend") == 0) { backend = (SoundIoBackend)Enum.Parse(typeof(SoundIoBackend), args[i]); } else if (arg.CompareTo("--device") == 0) { deviceId = args[i]; } else { return(Usage()); } } else { if (File.Exists(args[i])) { filename = args[i]; } else { Usage(); return(1); } } } if (string.IsNullOrEmpty(filename)) { throw new Exception("Input file name can not null."); } } catch (IndexOutOfRangeException) { Usage(); return(1); } using (_soundIO = new SoundIO()) { using (_waveFile = new WaveFileReader(filename)) { _channels = _waveFile.WaveFormat.Channels; sampleRate = _waveFile.WaveFormat.SampleRate; _soundIO.Connect(); _soundIO.FlushEvents(); SoundIODevice device = null; if (deviceId != null) { foreach (var dev in _soundIO) { if (dev.Aim == SoundIoDeviceAim.Output && dev.Id.Equals(deviceId) && dev.IsRaw == isRaw) { device = dev; break; } } if (device == null) { Console.Error.WriteLine("Output device not found."); return(1); } device.AddRef(); // Enumerator cleans up itself on dispose } else { device = _soundIO.GetDefaultOutputDevice(); } Console.WriteLine("Device: {0}.", device.Name); if (device.ProbeError != SoundIoError.None) { Console.WriteLine("Cannot probe device: {0}", device.ProbeError); return(1); } Console.WriteLine("Output device: {0}", device.Name); if (device.ProbeError != SoundIoError.None) { Console.WriteLine("Cannot probe device: {0}", device.ProbeError); return(1); } var outstream = new SoundIOOutStream(device) { OnWriteCallback = WriteCallback, OnUnderflowCallback = UnderflowCallback, Name = "sio_play", SampleRate = sampleRate }; // look for maching layout for wav file... var foundLayout = false; foreach (var layout in device.Layouts) { if (layout.ChannelCount == _channels) { outstream.Layout = layout; foundLayout = true; break; } } // TODO: may need to look at endian issues and other formats... // when paired with NAudioLite, ISampleProvider the conversion to Float32 is automatic. if (device.SupportsFormat(SoundIoFormats.Float32NE)) { outstream.Format = SoundIoFormats.Float32NE; } else if (device.SupportsFormat(SoundIoFormats.Float64NE)) { outstream.Format = SoundIoFormats.Float64NE; } else if (device.SupportsFormat(SoundIoFormats.S32NE)) { outstream.Format = SoundIoFormats.S32NE; } else if (device.SupportsFormat(SoundIoFormats.S16NE)) { outstream.Format = SoundIoFormats.S16NE; } else { Console.WriteLine("No suitable device format available."); return(1); } Console.WriteLine(); Console.WriteLine("Playing file: {0}, Format: {1}", Path.GetFullPath(filename), _waveFile.WaveFormat); err = outstream.Open(); if (err != SoundIoError.None) { Console.WriteLine($"Unable to open device: {err.GetErrorMessage()}, with sample rate: {outstream.LayoutError}"); return(1); } if (outstream.LayoutError != SoundIoError.None) { Console.WriteLine($"Unable to set channel layout: {err.GetErrorMessage()}"); } // revisit layout... // if no suitable layout found if (!foundLayout) { Console.WriteLine("No native channel layout found, Device Channels: {0}, Wav File Channels: {1}, requires sampler...", outstream.Layout.ChannelCount, _channels); } // get sample provider that matches outstream.Layout if (outstream.Layout.ChannelCount == 1) { // mono if (_waveFile.WaveFormat.Channels == 1) { _sampleProvider = _waveFile.ToSampleProvider(); } else { _sampleProvider = _waveFile.ToSampleProvider().ToMono(); } } else if (outstream.Layout.ChannelCount == 2) { //stereo if (_waveFile.WaveFormat.Channels == 1) { _sampleProvider = _waveFile.ToSampleProvider().ToStereo(); } else { _sampleProvider = _waveFile.ToSampleProvider(); } } outstream.Start(); _soundIO.OnBackendDisconnect += SoundIo_OnBackendDisconnected; while (!_fileDone) { System.Threading.Thread.Sleep(100); _soundIO.FlushEvents(); } System.Threading.Thread.Sleep(500); if (_fileDone && outstream != null) { outstream.Dispose(); outstream = null; } Console.WriteLine("End Program"); return(0); } } }
private void cwvNumber2_Click(object sender, EventArgs e) { selectedWave = 2; Wave = cwvNumber2.WaveStream as WaveFileReader; }
//вырезать фрагмент сигнала private void TrimWav(String OpenFileName, String TrimFileName, int StartSample, int EndSample) // метод сохранения фрагмента файла { string fileName = System.IO.Path.GetTempPath() + Guid.NewGuid().ToString() + ".wav"; // создание временного файла WaveStream wave = new WaveFileReader(OpenFileName); // открытие файла в виде потока байтов WaveFormat Format = wave.WaveFormat; // создание делегата класса для указания формата нового файла write = new WaveFileWriter(fileName, Format); // создание файла с указанием пути и формата var buffer = new byte[wave.Length]; // создание массива байтов с длиной равной длине файла int _bufferSize = Convert.ToInt32(wave.Length); // переменная, которая содержит длину файла var read = 0; read = wave.Read(buffer, 0, _bufferSize); // чтение файла if (wavePlayer.WaveFormat == "IeeeFloat") // если способ кодировки файла IEEE Float { write.Write(buffer, hScrollBar1.Value * 32, hScrollBar2.Value * 32); // запись выбранных байтов в файл } else // если файл PCM { switch (wavePlayer.BitsPerSample) { case 8: if (wavePlayer.Channels == 1) { write.Write(buffer, StartSample, EndSample); // запись выбранных байтов в файл } else { write.Write(buffer, StartSample * 2, EndSample * 2); // запись выбранных байтов в файл } break; case 16: if (wavePlayer.Channels == 1) { write.Write(buffer, StartSample * 2, EndSample * 2); // запись выбранных байтов в файл } else { write.Write(buffer, StartSample * 4, EndSample * 4); // запись выбранных байтов в файл } break; case 24: if (wavePlayer.Channels == 1) { write.Write(buffer, StartSample * 3, EndSample * 3); // запись выбранных байтов в файл } else { write.Write(buffer, StartSample * 6, EndSample * 6); // запись выбранных байтов в файл } break; case 32: if (wavePlayer.Channels == 1) { write.Write(buffer, StartSample * 4, EndSample * 4); // запись выбранных байтов в файл } else { write.Write(buffer, StartSample * 8, EndSample * 8); // запись выбранных байтов в файл } break; } } write.Close(); // закрытие потока File.Copy(fileName, TrimFileName, true); // копирование временного файла в место указанное пользователем File.Delete(fileName); // удаление временного файла }
/// <summary> /// Creates an input WaveChannel (Audio file reader for MP3/WAV/OGG/FLAC/WMA/AIFF/Other formats in the future) /// </summary> /// <param name="filename"></param> private void CreateInputWaveChannel(string filename) { string fileExt = Path.GetExtension(filename.ToLower()); if (fileExt == MP3Extension) { m_waveReader = new Mp3FileReader(filename); m_blockAlignedStream = new BlockAlignReductionStream(m_waveReader); // Wave channel - reads from file and returns raw wave blocks m_waveChannel = new WaveChannel32(m_blockAlignedStream); } else if (fileExt == WAVExtension) { m_waveReader = new WaveFileReader(filename); if (m_waveReader.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { m_waveReader = WaveFormatConversionStream.CreatePcmStream(m_waveReader); m_waveReader = new BlockAlignReductionStream(m_waveReader); } if (m_waveReader.WaveFormat.BitsPerSample != 16) { var format = new WaveFormat(m_waveReader.WaveFormat.SampleRate, 16, m_waveReader.WaveFormat.Channels); m_waveReader = new WaveFormatConversionStream(format, m_waveReader); } m_waveChannel = new WaveChannel32(m_waveReader); } else if (fileExt == OGGVExtension) { m_waveReader = new OggVorbisFileReader(filename); if (m_waveReader.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { m_waveReader = WaveFormatConversionStream.CreatePcmStream(m_waveReader); m_waveReader = new BlockAlignReductionStream(m_waveReader); } if (m_waveReader.WaveFormat.BitsPerSample != 16) { var format = new WaveFormat(m_waveReader.WaveFormat.SampleRate, 16, m_waveReader.WaveFormat.Channels); m_waveReader = new WaveFormatConversionStream(format, m_waveReader); } m_waveChannel = new WaveChannel32(m_waveReader); } else if (fileExt == FLACExtension) { m_waveReader = new FLACFileReader(filename); if (m_waveReader.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { m_waveReader = WaveFormatConversionStream.CreatePcmStream(m_waveReader); m_waveReader = new BlockAlignReductionStream(m_waveReader); } if (m_waveReader.WaveFormat.BitsPerSample != 16) { var format = new WaveFormat(m_waveReader.WaveFormat.SampleRate, 16, m_waveReader.WaveFormat.Channels); m_waveReader = new WaveFormatConversionStream(format, m_waveReader); } m_waveChannel = new WaveChannel32(m_waveReader); } else if (fileExt == WMAExtension) { m_waveReader = new WMAFileReader(filename); if (m_waveReader.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { m_waveReader = WaveFormatConversionStream.CreatePcmStream(m_waveReader); m_waveReader = new BlockAlignReductionStream(m_waveReader); } if (m_waveReader.WaveFormat.BitsPerSample != 16) { var format = new WaveFormat(m_waveReader.WaveFormat.SampleRate, 16, m_waveReader.WaveFormat.Channels); m_waveReader = new WaveFormatConversionStream(format, m_waveReader); } m_waveChannel = new WaveChannel32(m_waveReader); } else if (fileExt == AIFFExtension) { m_waveReader = new AiffFileReader(filename); m_waveChannel = new WaveChannel32(m_waveReader); } else { throw new ApplicationException("Cannot create Input WaveChannel - Unknown file type: " + fileExt); } }
public void ParseText(string text) { inputText = text.ToLower(new CultureInfo("en-US", false)); inputText = TextReplace(inputText); //this.inputText = inputText.Replace("\n", "/"); inputText = inputText.Replace("“", "\""); inputText = inputText.Replace("”", "\""); string[] lines = Regex.Split(inputText, "\n"); List <string> sentences = new List <string>(); List <string> symbols = new List <string>(); List <bool> quotes = new List <bool>(); foreach (string line in lines) { if (line.Length == 1 && line.GetHashCode() == -842352733) { continue; } else if (line.Length == 0) { continue; } DeleteOperation operation = DeleteOperation.None; foreach (KeyValuePair <string, DeleteOperation> kvp in deleteDictionary) { if (line.Contains(kvp.Key)) { operation = kvp.Value; break; } } if (operation == DeleteOperation.DeleteAllAfter) { break; } else if (operation == DeleteOperation.DeleteLine) { continue; } string[] quoteSplit = Regex.Split(line, "\""); for (int q = 0; q < quoteSplit.Length; q++) { bool quoted = q % 2 == 1 ? true : false; string[] parts = Regex.Split(quoteSplit[q], "([,…。:?!])"); string sentence = ""; string symbol = ""; //Console.WriteLine("======" + parts.Length); for (int i = 0; i < parts.Length; i++) { if (parts[i].Length == 0) { continue; } sentence = parts[i]; while (i + 1 < parts.Length) { //Console.WriteLine("Symbol " + parts[i + 1] + " " + parts[i + 1].GetHashCode()); if (parts[i + 1].GetHashCode() == 757602046) { i++; continue; } if (",…。:?!".Contains(parts[i + 1])) { symbol += parts[i + 1]; i++; continue; } break; } if (symbol.Length == 0) { symbol = "。"; } //BuildPrompt(sentence, symbol, quoted); sentences.Add(sentence); symbols.Add(symbol); quotes.Add(quoted); sentence = ""; symbol = ""; } } } for (int i = 0; i < sentences.Count; i++) { BuildPrompt(sentences[i], symbols[i], quotes[i]); int percent = (int)(100.0f * (float)(i + 1) / (float)sentences.Count); //Console.Write("\r " + fileName + " Progress: {0}% ", percent); Console.WriteLine(percent); } if (outputFileLocation != null) { ms.Seek(0, SeekOrigin.Begin); //Convert from wav to mp3 to save space using (var rdr = new WaveFileReader(ms)) using (var wtr = new LameMP3FileWriter(outputFileLocation, rdr.WaveFormat, LAMEPreset.VBR_90)) { rdr.CopyTo(wtr); } } }
static void Main(string[] args) { if (args.Length < 5) { Console.WriteLine("Not enough arguments"); PrintUsage(); return; } int argC = 0; string originalRomName = args[argC++]; string newRomName = args[argC++]; string srcFile = args[argC++]; string asmFile = args[argC++]; string title = args[argC++]; string destAddress = args[argC++]; string pointerAddress = args[argC++]; string repeat = args[argC++]; bool bRepeat = bool.Parse(repeat); //Add to support other games int mffreq = 10512; int zmfreq = 13379; int freq = 0; if (title.ToLower() == "mf") { freq = mffreq; } else if (title.ToLower() == "zm") { freq = zmfreq; } else { Console.WriteLine("Not Fusion or ZM, using freq " + title); freq = int.Parse(title); } FileInfo srcAudio = new FileInfo(srcFile); //we need to be 8bit and mono channel, apply desired frequency. var outFormat = new WaveFormat(freq, 8, 1); //Find out level of decode WaveStream srcStream = null; if (srcAudio.Extension.ToLower() == ".mp3") { Console.WriteLine("Decoding mp3."); srcStream = new Mp3FileReader(srcFile); } if (srcAudio.Extension.ToLower() == ".wav") { Console.WriteLine("Decoding wav."); srcStream = new WaveFileReader(srcFile); } if (srcStream == null) { Console.WriteLine($"{srcAudio.Extension} is an unsupported format"); return; } //Convert either source to wave. using (WaveFormatConversionStream conversionStream = new WaveFormatConversionStream(outFormat, srcStream)) { using (RawSourceWaveStream raw = new RawSourceWaveStream(conversionStream, outFormat)) { //Convert to signed 8bit. raw.Seek(0, SeekOrigin.Begin); int len = 0; List <sbyte> data = new List <sbyte>(); for (; len < raw.Length; len++) { sbyte n = Convert.ToSByte(raw.ReadByte() - 128); data.Add(n); } //Generate AudioToSappyASM(originalRomName, newRomName, asmFile, data, bRepeat, freq, pointerAddress, destAddress); } } }
public WaveSampleReader(IVirtualFileProvider fileProvider, string url) { fileStream = fileProvider.OpenStream(url, VirtualFileMode.Open, VirtualFileAccess.Read); waveFileReader = new WaveFileReader(fileStream); }
public WaveSampleReader(Stream stream) { waveFileReader = new WaveFileReader(stream); }
public WaveSampleReader(string filePath) { fileStream = File.Open(filePath, FileMode.Open); waveFileReader = new WaveFileReader(fileStream); }
/// ------------------------------------------------------------------------------------ public WavePainterBasic(WaveFileReader stream) : this(null, stream) { }
private void SetSongToPlay(int songNumber) { string[] brokenPaths = CurrentPlaylist?.CheckPathsActuality(); if (brokenPaths != null) { foreach (string path in _audioPathsToPlay.ToArray()) { foreach (string brokenPath in brokenPaths) { if (path == brokenPath) { int brokenIndex = _audioPathsToPlay.IndexOf(path); _audioPathsToPlay.Remove(path); int brokenItem = _playOrder[brokenIndex]; _playOrder.RemoveAt(brokenIndex); int[] newPlayOrder = new int[_playOrder.Count]; for (int i = 0; i < _playOrder.Count; i++) { if (_playOrder[i] > brokenItem) { newPlayOrder[i] = _playOrder[i] - 1; } else { newPlayOrder[i] = _playOrder[i]; } } _playOrder = newPlayOrder.ToList(); } } } } try { if (songNumber >= _audioPathsToPlay.Count) { songNumber = 0; } else if (songNumber < 0) { songNumber = _audioPathsToPlay.Count - 1; } if (_playingSongNumber >= _audioPathsToPlay.Count) { _playingSongNumber = 0; } else if (_playingSongNumber < 0) { _playingSongNumber = _audioPathsToPlay.Count - 1; } _waveOut.Dispose(); if (_mp3Reader != null) { _mp3Reader.Dispose(); } if (_wavReader != null) { _wavReader.Dispose(); } songNumber = _playOrder[songNumber]; switch (Path.GetExtension(_audioPathsToPlay[songNumber])) { case ".mp3": _mp3Reader = new Mp3FileReader(_audioPathsToPlay[songNumber]); _waveOut.Init(_mp3Reader); timeline.Maximum = (int)double.Parse(_mp3Reader.TotalTime.TotalSeconds.ToString()); break; case ".wav": _wavReader = new WaveFileReader(_audioPathsToPlay[songNumber]); _waveOut.Init(_wavReader); timeline.Maximum = (int)double.Parse(_wavReader.TotalTime.TotalSeconds.ToString()); break; } MusicName.Text = Path.GetFileNameWithoutExtension(_audioPathsToPlay[songNumber]); File.WriteAllText(Path.Combine(_obsFilePath, "!OBS.txt"), MusicName.Text); _errorsLabel.Text = ""; } catch (NullReferenceException) { _errorsLabel.Text = "Error: Path is incorrect"; } catch (ArgumentOutOfRangeException) { _errorsLabel.Text = "Error: Folder does not contain any audio"; } }
private static void OpenFiles(out WaveFileReader inputFile, out Stream?outputFile, in RunParameters parameters)
static void Main(string[] args) { string model = null; string lm = null; string trie = null; string audio = null; bool extended = false; if (args.Length > 0) { model = GetArgument(args, "--model"); lm = GetArgument(args, "--lm"); trie = GetArgument(args, "--trie"); audio = GetArgument(args, "--audio"); extended = !string.IsNullOrWhiteSpace(GetArgument(args, "--extended")); } const uint BEAM_WIDTH = 500; const float LM_ALPHA = 0.75f; const float LM_BETA = 1.85f; Stopwatch stopwatch = new Stopwatch(); try { Console.WriteLine("Loading model..."); stopwatch.Start(); using (IDeepSpeech sttClient = new DeepSpeech(model ?? "output_graph.pbmm", BEAM_WIDTH)) { stopwatch.Stop(); Console.WriteLine($"Model loaded - {stopwatch.Elapsed.Milliseconds} ms"); stopwatch.Reset(); if (lm != null) { Console.WriteLine("Loadin LM..."); sttClient.EnableDecoderWithLM( lm ?? "lm.binary", trie ?? "trie", LM_ALPHA, LM_BETA); } string audioFile = audio ?? "arctic_a0024.wav"; var waveBuffer = new WaveBuffer(File.ReadAllBytes(audioFile)); using (var waveInfo = new WaveFileReader(audioFile)) { Console.WriteLine("Running inference...."); stopwatch.Start(); string speechResult; if (extended) { Metadata metaResult = sttClient.SpeechToTextWithMetadata(waveBuffer.ShortBuffer, Convert.ToUInt32(waveBuffer.MaxSize / 2)); speechResult = MetadataToString(metaResult); } else { speechResult = sttClient.SpeechToText(waveBuffer.ShortBuffer, Convert.ToUInt32(waveBuffer.MaxSize / 2)); } stopwatch.Stop(); Console.WriteLine($"Audio duration: {waveInfo.TotalTime.ToString()}"); Console.WriteLine($"Inference took: {stopwatch.Elapsed.ToString()}"); Console.WriteLine((extended ? $"Extended result: " : "Recognized text: ") + speechResult); } waveBuffer.Clear(); } } catch (Exception ex) { Console.WriteLine(ex.Message); } }
//Создание графика сигнала public void plottATR_wav(String FileName) { WaveStream wave = new WaveFileReader(FileName); int sampleSize = wavePlayer.BitsPerSample; var bufferSize = Convert.ToInt32(wave.Length); var buffer = new byte[wave.Length]; var read = wave.Read(buffer, 0, bufferSize); if (wavePlayer.WaveFormat != "IeeeFloat") { switch (sampleSize) { case 8: if (wavePlayer.Channels == 2) { for (int i = 0; i < bufferSize; i += 2) { uint sampleChen1 = buffer[i]; float sample8Chen1 = (sampleChen1 - 128f) / 256f; chart1.Series[0].Points.Add(sample8Chen1); uint sampleChen2 = buffer[i + 1]; float sample8Chen2 = (sampleChen2 - 128f) / 256f; chart1.Series[1].Points.Add(sample8Chen2); } N_Output = bufferSize / 2; _ScrollBar(N_Output); } else { for (int i = 0; i < bufferSize; i++) { ushort sample = (buffer[i]); float sample8 = (sample - 128) / 256f; chart1.Series[0].Points.Add(sample8); } N_Output = bufferSize; _ScrollBar(N_Output); break; } break; case 16: if (wavePlayer.Channels == 2) { for (int i = 0; i < bufferSize; i += 4) { long sampleChen1 = (short)((buffer[i + 1] << 8) | buffer[i]); float sample16Chen1 = sampleChen1 / 32768f; chart1.Series[0].Points.Add(sample16Chen1); long sampleChen2 = (short)((buffer[i + 3] << 8) | buffer[i + 2]); float sample16Chen2 = sampleChen2 / 32768f; chart1.Series[1].Points.Add(sample16Chen2); } N_Output = bufferSize / 4; _ScrollBar(N_Output); } else { for (int i = 0; i < bufferSize; i += 2) { short sample = (short)((buffer[i + 1] << 8) | buffer[i]); float sample16 = sample / 32768f; chart1.Series[0].Points.Add(sample16); } N_Output = bufferSize / 2; _ScrollBar(N_Output); } break; case 24: if (wavePlayer.Channels == 2) { for (int i = 0; i < bufferSize; i += 6) { int sampleChen1 = (((buffer[i + 2] << 12) | buffer[i + 1]) << 12 | buffer[i + 0]); float sample24Chen1 = sampleChen1 / 2147483648f; chart1.Series[0].Points.Add(sample24Chen1); int sampleChen2 = (((buffer[i + 5] << 12) | buffer[i + 4]) << 12 | buffer[i + 3]); float sample24Chen2 = sampleChen2 / 2147483648f; chart1.Series[1].Points.Add(sample24Chen2); } N_Output = bufferSize / 6; _ScrollBar(N_Output); } else { for (int i = 0; i < bufferSize; i += 3) { long sample = (((buffer[i + 2] << 12) | buffer[i + 1]) << 12 | buffer[i]); float sample24 = sample / 2147483648f; chart1.Series[0].Points.Add(sample24); } N_Output = bufferSize / 3; _ScrollBar(N_Output); } break; case 32: if (wavePlayer.Channels == 2) { for (int i = 0; i < bufferSize; i += 8) { int sampleChen1 = ((((buffer[i + 3] << 8) | buffer[i + 2]) << 8 | buffer[i + 1]) << 8 | buffer[i + 0]); float sample32Chen1 = sampleChen1 / 2147483648f; chart1.Series[0].Points.Add(sample32Chen1); int sampleChen2 = ((((buffer[i + 7] << 8) | buffer[i + 6]) << 8 | buffer[i + 5]) << 8 | buffer[i + 4]); float sample32Chen2 = sampleChen2 / 2147483648f; chart1.Series[1].Points.Add(sample32Chen2); } N_Output = bufferSize / 8; _ScrollBar(N_Output); } else { for (int i = 0; i < bufferSize; i += 4) { int sample = ((((buffer[i + 3] << 8) | buffer[i + 2]) << 8 | buffer[i + 1]) << 8 | buffer[i]); float sample32 = sample / 2147483648f; chart1.Series[0].Points.Add(sample32); } N_Output = bufferSize / 4; _ScrollBar(N_Output); } break; default: break; } } else { if (wavePlayer.Channels == 2) { for (int i = 0; i < bufferSize / sampleSize; i++) { var intSampleValue = BitConverter.ToSingle(buffer, i * wavePlayer.BitsPerSample); chart1.Series[0].Points.Add(intSampleValue); } for (int i = 1; i < bufferSize / sampleSize; i++) { var intSampleValue = BitConverter.ToSingle(buffer, i * wavePlayer.BitsPerSample - 4); chart1.Series[1].Points.Add(intSampleValue); } N_Output = bufferSize / sampleSize; _ScrollBar(N_Output); } else { for (int i = 0; i < bufferSize / sampleSize; i++) { var intSampleValue = BitConverter.ToSingle(buffer, i * sampleSize); chart1.Series[0].Points.Add(intSampleValue); } N_Output = bufferSize / sampleSize; _ScrollBar(N_Output); } } }
/// ------------------------------------------------------------------------------------ protected override WavePainterBasic GetNewWavePainter(WaveFileReader stream) { return(new WavePainterWithRangeSelection(this, stream)); }
private void Start_AddTask_Btn_Click(object sender, EventArgs e) { /* * //start concat test * * var extList = new string[] { ".wav" }; * var files = Directory.GetFiles("D:/share/concat", "*.wav").ToList(); * var sampleRate = 48000; * var bit = 16; * var channel = 1; * var outFormat = new WaveFormat(sampleRate, bit, channel); * var writer = new WaveFileWriter("concat.wav", outFormat); * foreach (var name in files) * { * using (WaveFormatConversionStream stream = new WaveFormatConversionStream(outFormat, new WaveFileReader(name))) * { * byte[] taskBuffer = new byte[stream.WaveFormat.BitsPerSample * stream.WaveFormat.Channels / 4]; * int read; * try * { * while ((read = stream.Read(taskBuffer, 0, taskBuffer.Length)) > 0) * { * writer.Write(taskBuffer, 0, read); * } * } * catch (Exception ex) * { * addText(ex.Message + "\n" + "filename: " + name, Color.Black); * if (writer != null) * writer.Dispose(); * return; * } * } * } * writer.Dispose(); */ SaveFileDialog dialog = new SaveFileDialog(); dialog.Title = "Save"; dialog.InitialDirectory = ".\\"; dialog.SupportMultiDottedExtensions = true; dialog.Filter = "wav files (*.wav)|*.wav|All files (*.*)|*.*"; dialog.FileName = "Untitled.wav"; dialog.OverwritePrompt = true; if (dialog.ShowDialog() == DialogResult.OK) { try { //read the cut location var location = new List <int>(); using (StreamReader sr = new StreamReader(Keyword_Sample_TextBox.Text.Trim())) { String line; while ((line = sr.ReadLine()) != null) { location.Add(Int32.Parse(line)); } } //read the task filenames var taskFilenames = new List <String>(); using (StreamReader sr = new StreamReader(Task_Source_TextBox.Text.Trim())) { String line; while ((line = sr.ReadLine()) != null) { if (line != "") { taskFilenames.Add(line.Trim()); } } } if (location.Count > taskFilenames.Count) { throw new ArgumentException("the number cut location must equal or less than the number of input task files"); } using (var keywordWaveReader = new WaveFileReader(Keyword_Source_TextBox.Text.Trim())) { var keywordSampleProvider = WaveExtensionMethods.ToSampleProvider(keywordWaveReader); //set writer var sampleRate = keywordWaveReader.WaveFormat.SampleRate; var bit = keywordWaveReader.WaveFormat.BitsPerSample; var channel = 1; var outFormat = new WaveFormat(sampleRate, bit, channel); var writer = new WaveFileWriter(dialog.FileName, outFormat); //start writing file float[] buffer = new float[2 * keywordWaveReader.WaveFormat.Channels]; //2 samples per buffer int samplesRead; int currentLocation = 0; //number of samples has been read int taskCount = 0; int cutLocation = location[taskCount]; while ((samplesRead = keywordSampleProvider.Read(buffer, 0, buffer.Length)) > 0) { writer.WriteSamples(buffer, 0, samplesRead); currentLocation += samplesRead; //when encounter the cut location, write the task stream if (currentLocation > cutLocation) { //convert the task stream into the same waveFormat as keyword stream's var taskFilename = $@"task/{taskFilenames[taskCount]}"; using (WaveFormatConversionStream taskStream = new WaveFormatConversionStream(outFormat, new WaveFileReader(taskFilename))) { byte[] taskBuffer = new byte[taskStream.WaveFormat.BitsPerSample * taskStream.WaveFormat.Channels / 4]; int taskBytesRead; while ((taskBytesRead = taskStream.Read(taskBuffer, 0, taskBuffer.Length)) > 0) { writer.Write(taskBuffer, 0, taskBytesRead); } //insert 5sec silence for (int sec = 0; sec < 5; sec++) { float[] silenceBuffer = Enumerable.Repeat((float)0, writer.WaveFormat.SampleRate).ToArray(); writer.WriteSamples(silenceBuffer, 0, silenceBuffer.Length); } } taskCount++; cutLocation = location[taskCount]; } } writer.Dispose(); } } catch (Exception ex) { //MessageBox.Show(ex.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); MessageBox.Show(ex.ToString()); } } }
// Subtask for async operation of above function public void SendAudioTask(string filePath) { // Flag to prevent multiple simultaneous audio sendingAudio = true; try { // Get the number of AudioChannels our AudioService has been configured to use. var channelCount = client.GetService <AudioService>().Config.Channels; // Create a new Output Format, using the spec that Discord will accept, and with the number of channels that our client supports. var OutFormat = new WaveFormat(48000, 16, channelCount); // Select the right type of reader, based on the file extension. MediaFoundationResampler resampler = null; if (filePath.ToLower().Contains(".mp3")) { // MP3 file // Create a new Disposable MP3FileReader, to read audio from the filePath parameter var Reader = new Mp3FileReader(filePath); resampler = new MediaFoundationResampler(Reader, OutFormat); } if (filePath.ToLower().Contains(".wav")) { // WAV file // Create a new Disposable WaveFileReader, to read audio from the filePath parameter var Reader = new WaveFileReader(filePath); resampler = new MediaFoundationResampler(Reader, OutFormat); } // Only attempt output if we have a good file type if (resampler != null) { // Set the quality of the resampler to 60, the highest quality resampler.ResamplerQuality = 60; // Establish the size of our AudioBuffer int blockSize = OutFormat.AverageBytesPerSecond / 50; byte[] buffer = new byte[blockSize]; int byteCount; try { // Read audio into our buffer, and keep a loop open while data is present while ((byteCount = resampler.Read(buffer, 0, blockSize)) > 0) { if (byteCount < blockSize) { // Incomplete Frame, pad it for (int i = byteCount; i < blockSize; i++) { buffer[i] = 0; } } // Send the buffer to Discord vClient.Send(buffer, 0, blockSize); } vClient.Wait(); } catch (Exception ex) { // Do nothing, just absorb the exception- likely as not we were cancelled. } } } catch (Exception ex) { // Set a flag so we can annunciate a bad file badfile = true; } //Set flag to say we're no longer sending audio. sendingAudio = false; }
public static void WaveToMP3(string waveFileName, string mp3FileName, LAMEPreset bitRate = LAMEPreset.ABR_128) { using (var reader = new WaveFileReader(waveFileName)) using (var writer = new LameMP3FileWriter(mp3FileName, reader.WaveFormat, bitRate)) reader.CopyTo(writer); }
/// <summary> /// WAVEファイルを生成する /// </summary> /// <param name="textToSpeak"> /// Text to Speak</param> /// <param name="wave"> /// WAVEファイルのパス</param> private void CreateWave( string textToSpeak, string wave) { // パス関係を生成する var openJTalkDir = Settings.Default.OpenJTalkSettings.OpenJTalkDirectory; if (string.IsNullOrWhiteSpace(openJTalkDir)) { openJTalkDir = "OpenJTalk"; } var openJTalk = Path.Combine(openJTalkDir, @"open_jtalk.exe"); var dic = Path.Combine(openJTalkDir, @"dic"); var voice = Path.Combine(openJTalkDir, @"voice\" + this.Config.Voice); var waveTemp = Path.GetTempFileName(); if (File.Exists(waveTemp)) { File.Delete(waveTemp); } var textFile = Path.GetTempFileName(); File.WriteAllText(textFile, textToSpeak, Encoding.GetEncoding("Shift_JIS")); var args = new string[] { $"-x \"{dic}\"", $"-m \"{voice}\"", $"-ow \"{waveTemp}\"", $"-s 48000", $"-p 240", $"-g {this.Config.Volume.ToString("N2")}", $"-a {this.Config.AllPass.ToString("N2")}", $"-b {this.Config.PostFilter.ToString("N2")}", $"-r {this.Config.Rate.ToString("N2")}", $"-fm {this.Config.HalfTone.ToString("N2")}", $"-u {this.Config.UnVoice.ToString("N2")}", $"-jm {this.Config.Accent.ToString("N2")}", $"-jf {this.Config.Weight.ToString("N2")}", $"\"{textFile}\"" }; var pi = new ProcessStartInfo() { FileName = openJTalk, CreateNoWindow = true, UseShellExecute = false, Arguments = string.Join(" ", args), RedirectStandardError = true, RedirectStandardOutput = true, }; Debug.WriteLine(pi.FileName + " " + pi.Arguments); using (var p = Process.Start(pi)) { var stderr = p.StandardError.ReadToEnd(); var stdout = p.StandardOutput.ReadToEnd(); if (!string.IsNullOrWhiteSpace(stderr)) { Debug.WriteLine(stderr); } if (!string.IsNullOrWhiteSpace(stdout)) { Debug.WriteLine(stdout); } p.WaitForExit(); } if (File.Exists(textFile)) { File.Delete(textFile); } if (this.Config.Gain != 1.0f) { using (var reader = new WaveFileReader(waveTemp)) { var prov = new VolumeWaveProvider16(reader); prov.Volume = this.Config.Gain; WaveFileWriter.CreateWaveFile( wave, prov); } } else { File.Move(waveTemp, wave); } if (File.Exists(waveTemp)) { File.Delete(waveTemp); } }
public void OpenFile(string path) { Stop(); if (ActiveStream != null) { SelectionBegin = TimeSpan.Zero; SelectionEnd = TimeSpan.Zero; ChannelPosition = 0; } StopAndCloseStream(); if (System.IO.File.Exists(path)) { try { waveOutDevice = new WaveOut() { DesiredLatency = 100 }; var extension = Path.GetExtension(path); if (extension == ".mp3") { ActiveStream = new Mp3FileReader(path); } else if (extension == ".wav") { ActiveStream = new WaveFileReader(path); } else { throw new ArgumentException("Wrong file extension."); } inputStream = new WaveChannel32(ActiveStream); effectStream = new EffectStream(inputStream); outputStream = new WaveChannel32(effectStream); waveFormStream = new WaveChannel32(effectStream); sampleAggregator = new SampleAggregator(fftDataSize); outputStream.Sample += outputStream_Sample; waveOutDevice.Init(outputStream); ChannelLength = outputStream.TotalTime.TotalSeconds; FileTag = TagLib.File.Create(path); GenerateWaveformData(waveFormStream); CanPlay = true; } catch { ActiveStream = null; CanPlay = false; } } }
/// <summary> /// Uses the Windows TTS library to get the bytes of a PCM Wave file from a text messages. /// </summary> /// <param name="message"></param> /// <param name="voiceName"></param> /// <returns>The way files bytes, or an empty array if something went wrong.</returns> public byte[] GenerateRadioMessageWavBytes(string message, string voiceName = null) { // Media files are stored in the Release build directory, so if we're running a Debug build, we have to look for them here. string debugPathToRelease = ""; //#if DEBUG // debugPathToRelease = "..\\Release\\"; //#endif if (string.IsNullOrEmpty(message)) { message = ""; // Make sure message is not null } // No voice name provided, use the default voice instead if (voiceName == null) { if (DefaultVoice == null) // Default voice not set/doesn't exist { return(new byte[0]); } voiceName = DefaultVoice; } try { Reader.SelectVoice(voiceName); } catch (Exception) { return(new byte[0]); } // Text-to-speech MemoryStream ttsStream = new MemoryStream(); // create a new memory stream Reader.SetOutputToWaveStream(ttsStream); // set the stream as output for the TTS reader Reader.Volume = 35; Reader.Speak(message); // read the text into the stream ttsStream.Seek(0, SeekOrigin.Begin); // rewind the stream to position 0 WaveFileReader waveTTS = new WaveFileReader(ttsStream); // read the stream into a WaveFileReader object // Mix voice with radio static WaveFileReader waveStatic = new WaveFileReader(debugPathToRelease + "Media/Loop.wav"); // load the static sound loop ISampleProvider providerSpeech = new AMRadioFilter(waveTTS.ToSampleProvider(), FXIntensity * 250); // get the sample provider for the TTS, apply a radio filter ISampleProvider providerStatic = waveStatic.ToSampleProvider(); // get the sample provider for the static TimeSpan ttsDuration = waveTTS.TotalTime; // get the tts wave duration if (ttsDuration < TimeSpan.FromSeconds(MIN_SPEECH_DURATION)) { ttsDuration = TimeSpan.FromSeconds(MIN_SPEECH_DURATION); // check min value } if (ttsDuration > TimeSpan.FromSeconds(MAX_SPEECH_DURATION)) { ttsDuration = TimeSpan.FromSeconds(MAX_SPEECH_DURATION); // check max value } ISampleProvider[] sources = new[] { providerSpeech.Take(ttsDuration), providerStatic.Take(ttsDuration) }; // use both providers as source with a duration of ttsDuration MixingSampleProvider mixingSampleProvider = new MixingSampleProvider(sources); // mix both channels IWaveProvider radioMix = mixingSampleProvider.ToWaveProvider16(); // convert the mix output to a PCM 16bit sample provider // Concatenate radio in/out sounds WaveFileReader waveRadioIn = new WaveFileReader(debugPathToRelease + "Media/In.wav"); // load the radio in FX WaveFileReader waveRadioOut = new WaveFileReader(debugPathToRelease + "Media/Out.wav"); // load the radio out FX IWaveProvider[] radioFXParts = new IWaveProvider[] { waveRadioIn, radioMix, waveRadioOut }; // create an array with all 3 parts byte[] buffer = new byte[1024]; // create a buffer to store wav data to concatenate MemoryStream finalWavStr = new MemoryStream(); // create a stream for the final concatenated wav WaveFileWriter waveFileWriter = null; // create a writer to fill the stream foreach (IWaveProvider wav in radioFXParts) // iterate all three parts { if (waveFileWriter == null) // no writer, first part of the array { waveFileWriter = new WaveFileWriter(finalWavStr, wav.WaveFormat); // create a writer of the proper format } else if (!wav.WaveFormat.Equals(waveFileWriter.WaveFormat)) // else, check the other parts are of the same format { continue; // file is not of the proper format } int read; // bytes read while ((read = wav.Read(buffer, 0, buffer.Length)) > 0) // read data from the wave { waveFileWriter.Write(buffer, 0, read); } // fill the buffer with it } // Copy the stream to a byte array waveFileWriter.Flush(); finalWavStr.Seek(0, SeekOrigin.Begin); byte[] waveBytes = new byte[finalWavStr.Length]; finalWavStr.Read(waveBytes, 0, waveBytes.Length); // Close/dispose of everything ttsStream.Close(); ttsStream.Dispose(); waveTTS.Close(); waveTTS.Dispose(); waveStatic.Close(); waveStatic.Dispose(); waveRadioIn.Close(); waveRadioIn.Dispose(); waveRadioOut.Close(); waveRadioOut.Dispose(); waveFileWriter.Close(); waveFileWriter.Dispose(); finalWavStr.Close(); finalWavStr.Dispose(); // Return the bytes return(waveBytes); }
private void ConCatWaveFile(WaveFileReader sou, WaveFileReader des, long startPos, long endPos) { // lưu toàn bộ file đích vào temp WaveFileWriter temp = new WaveFileWriter("temp.wav", des.WaveFormat); // des.Filename = ""; des.Position = 0; var end = (int)des.Length; var buffer = new byte[1024]; while (des.Position < end) { var bytesRequired = (int)(end - des.Position); if (bytesRequired <= 0) { continue; } var bytesToRead = Math.Min(bytesRequired, buffer.Length); var bytesRead = des.Read(buffer, 0, bytesToRead); if (bytesRead > 0) { temp.Write(buffer, 0, bytesRead); } } // lưu 1 phần thêm file nguồn vào temp sou.Position = startPos; buffer = new byte[1024]; while (sou.Position < endPos) { int bytesRequired = (int)(endPos - sou.Position); if (bytesRequired > 0) { int bytesToRead = Math.Min(bytesRequired, buffer.Length); int bytesRead = sou.Read(buffer, 0, bytesToRead); if (bytesRead > 0) { temp.Write(buffer, 0, bytesRead); } } } temp.Dispose(); des.Dispose(); // xoá file đích if (des.Equals(cwvNumber1.WaveStream)) { CopyWaveFile(fileName1, temp.Filename); Wave = new WaveFileReader(fileName1); cwvNumber1.WaveStream = wave; cwvNumber1.Painting(); cwvNumber1.FitToScreen(); cwvNumber1.WaveStream.Position = 0; lbMax.Text = wave.TotalTime.Minutes.ToString() + ":" + wave.TotalTime.Seconds.ToString(); lbCur.Text = "0 : 0"; } else if (des.Equals(cwvNumber2.WaveStream)) { CopyWaveFile(fileName2, temp.Filename); Wave = new WaveFileReader(fileName2); cwvNumber2.WaveStream = wave; cwvNumber2.Painting(); cwvNumber2.FitToScreen(); cwvNumber2.WaveStream.Position = 0; lbMax.Text = wave.TotalTime.Minutes.ToString() + ":" + wave.TotalTime.Seconds.ToString(); lbCur.Text = "0 : 0"; } }
/// ------------------------------------------------------------------------------------ protected virtual WavePainterBasic GetNewWavePainter(WaveFileReader stream) { return(new WavePainterBasic(this, stream)); }
private TimeSpan CheckAudioFile() { AudioInformation.Text = string.Empty; // Going to allow this call to work with out an audio file // As it's ok to generate the Preview. // But if there is a file selected, and it's not a valid format, we will throw an error. if (!string.IsNullOrEmpty(this.AudioFileName.Text)) { // Open the Audio file to get it's length and set the end time. var ext = Path.GetExtension(this.AudioFileName.Text); if (string.Equals(ext, ".wav", StringComparison.OrdinalIgnoreCase)) { using (var reader = new WaveFileReader(this.AudioFileName.Text)) { var knownGoodEncoding = false; // This is the list of encoding that we have tested and fully support // other encodings may work, just haven't been tested. switch (reader.WaveFormat.Encoding) { case (WaveFormatEncoding)0xA100: case WaveFormatEncoding.IeeeFloat: case WaveFormatEncoding.Pcm: case WaveFormatEncoding.ALaw: case WaveFormatEncoding.MuLaw: knownGoodEncoding = true; break; } AudioInformation.Text = $"Encoding: {reader.WaveFormat.Encoding}{(knownGoodEncoding ? string.Empty : " (!!un-tested encoding!!)")}, Length: {reader.TotalTime}, Sample Size: {reader.WaveFormat.BitsPerSample} bits, Sample Rate: {reader.WaveFormat.SampleRate:N0}"; if (reader.WaveFormat.Channels > 2) { throw new InvalidOperationException( "While the Tethr API does support mulit-channel auido files, this application is limited to 2 channels at this time."); } return(reader.TotalTime); } } if (string.Equals(ext, ".mp3", StringComparison.OrdinalIgnoreCase)) { using (var reader = new Mp3FileReader(this.AudioFileName.Text)) { AudioInformation.Text = $"Encoding: {reader.Mp3WaveFormat.Encoding}, Length: {reader.TotalTime}, Avg Rate: {reader.Mp3WaveFormat.AverageBytesPerSecond / 1024:N0} KBps"; if (reader.WaveFormat.Channels > 2) { throw new InvalidOperationException( "While the Tethr API does support mulit-channel auido files, this application is limited to 2 channels at this time."); } return(reader.TotalTime); } } if (string.Equals(ext, ".ogg", StringComparison.OrdinalIgnoreCase)) { // Naudio doesn't support ogg/opus out of the box, so we need to let Tethr tell us if something is wrong. AudioInformation.Text = $"Encoding: ogg, Length: ???, Sample Size: ???, Sample Rate: ???"; return(TimeSpan.FromMinutes(5)); } throw new InvalidOperationException($"{ext} is not a supported audio file type."); } return(TimeSpan.FromMinutes(5)); }
private void Play_Click(object sender, RoutedEventArgs e) { UndertaleSound sound = DataContext as UndertaleSound; if ((sound.Flags & UndertaleSound.AudioEntryFlags.IsEmbedded) != UndertaleSound.AudioEntryFlags.IsEmbedded && (sound.Flags & UndertaleSound.AudioEntryFlags.IsCompressed) != UndertaleSound.AudioEntryFlags.IsCompressed) { try { string filename; if (!sound.File.Content.Contains(".")) { filename = sound.File.Content + ".ogg"; } else { filename = sound.File.Content; } string audioPath = System.IO.Path.Combine(System.IO.Path.GetDirectoryName((Application.Current.MainWindow as MainWindow).FilePath), filename); if (File.Exists(audioPath)) { switch (System.IO.Path.GetExtension(filename).ToLower()) { case ".wav": wavReader = new WaveFileReader(audioPath); InitAudio(); waveOut.Init(wavReader); waveOut.Play(); break; case ".ogg": oggReader = new VorbisWaveReader(audioPath); InitAudio(); waveOut.Init(oggReader); waveOut.Play(); break; case ".mp3": mp3Reader = new Mp3FileReader(audioPath); InitAudio(); waveOut.Init(mp3Reader); waveOut.Play(); break; default: throw new Exception("Unknown file type."); } } else { throw new Exception("Failed to find audio file."); } } catch (Exception ex) { waveOut = null; MessageBox.Show("Failed to play audio!\r\n" + ex.Message, "Audio failure", MessageBoxButton.OK, MessageBoxImage.Warning); } return; } UndertaleEmbeddedAudio target; if (sound.GroupID != 0 && sound.AudioID != -1) { try { string path = System.IO.Path.Combine(System.IO.Path.GetDirectoryName((Application.Current.MainWindow as MainWindow).FilePath), "audiogroup" + sound.GroupID + ".dat"); if (File.Exists(path)) { if (loadedPath != path) { loadedPath = path; using (var stream = new FileStream(path, FileMode.Open, FileAccess.Read)) { audioGroupData = UndertaleIO.Read(stream, warning => { throw new Exception(warning); }); } } target = audioGroupData.EmbeddedAudio[sound.AudioID]; } else { throw new Exception("Failed to find audio group file."); } } catch (Exception ex) { waveOut = null; MessageBox.Show("Failed to play audio!\r\n" + ex.Message, "Audio failure", MessageBoxButton.OK, MessageBoxImage.Warning); return; } } else { target = sound.AudioFile; } if (target != null) { if (target.Data.Length > 4) { try { if (target.Data[0] == 'R' && target.Data[1] == 'I' && target.Data[2] == 'F' && target.Data[3] == 'F') { wavReader = new WaveFileReader(new MemoryStream(target.Data)); InitAudio(); waveOut.Init(wavReader); waveOut.Play(); } else if (target.Data[0] == 'O' && target.Data[1] == 'g' && target.Data[2] == 'g' && target.Data[3] == 'S') { oggReader = new VorbisWaveReader(new MemoryStream(target.Data)); InitAudio(); waveOut.Init(oggReader); waveOut.Play(); } else { MessageBox.Show("Failed to play audio!\r\nNot a WAV or OGG.", "Audio failure", MessageBoxButton.OK, MessageBoxImage.Warning); } } catch (Exception ex) { waveOut = null; MessageBox.Show("Failed to play audio!\r\n" + ex.Message, "Audio failure", MessageBoxButton.OK, MessageBoxImage.Warning); } } } else { MessageBox.Show("Failed to play audio!\r\nNo options for playback worked.", "Audio failure", MessageBoxButton.OK, MessageBoxImage.Warning); } }
/// <summary> /// 开始播放音频 /// </summary> public void Play() { OptReturn optReturn; if (string.IsNullOrEmpty(mUrl)) { optReturn = new OptReturn(); optReturn.Code = Defines.RET_STRING_EMPTY; optReturn.Message = string.Format(("Url is empty")); ShowException(optReturn); return; } try { WaveStream reader; if (mUrl.StartsWith("http", StringComparison.InvariantCultureIgnoreCase)) { if (mUrl.EndsWith(".mp3", StringComparison.InvariantCultureIgnoreCase)) { reader = new Mp3NetworkStream(mUrl); } else { reader = new NetWorkWaveReader(mUrl); if (reader.WaveFormat.Encoding != WaveFormatEncoding.Pcm && reader.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) { reader = WaveFormatConversionStream.CreatePcmStream(reader); reader = new BlockAlignReductionStream(reader); } } } else { if (mUrl.EndsWith(".mp3", StringComparison.InvariantCultureIgnoreCase)) { reader = new Mp3FileReader(mUrl); } else { reader = new WaveFileReader(mUrl); if (reader.WaveFormat.Encoding != WaveFormatEncoding.Pcm && reader.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) { reader = WaveFormatConversionStream.CreatePcmStream(reader); reader = new BlockAlignReductionStream(reader); } } } if (mWaveStream != null) { mWaveStream.Dispose(); } mWaveStream = reader; SubPlayerEvent(Defines.EVT_MEDIAOPENED, mWaveStream.TotalTime); if (!CreateWaveImage(mWaveStream)) { return; } LbTotalTime.Content = GetTimeString(mWaveStream.TotalTime); LbCurrentTime.Content = "00:00:00"; SliderPosition.Maximum = mWaveStream.TotalTime.TotalSeconds; if (!CreateWaveStream()) { return; } if (!CreateWaveOut()) { return; } if (mWaveOut != null) { mWaveOut.Play(); mTimer.Start(); SubPlayerEvent(Defines.EVT_PLAYBACKSTARTED, mUrl); } } catch (Exception ex) { optReturn = new OptReturn(); optReturn.Code = Defines.RET_FAIL; optReturn.Message = ex.Message; ShowException(optReturn); } }
//not used here but its useful to get the length of wav file public static TimeSpan GetSoundLength(string fileName) { WaveFileReader wf = new WaveFileReader(fileName); return(wf.TotalTime); }
/// <summary> /// Checks if the cue and list chunks exist and if so, creates a cue list /// </summary> internal static CueList FromChunks(WaveFileReader reader) { CueList cueList = null; byte[] cueChunkData = null; byte[] listChunkData = null; foreach (RiffChunk chunk in reader.ExtraChunks) { if (chunk.IdentifierAsString.ToLower() == "cue ") { cueChunkData = reader.GetChunkData(chunk); } else if (chunk.IdentifierAsString.ToLower() == "list") { listChunkData = reader.GetChunkData(chunk); } } if (cueChunkData != null && listChunkData != null) { cueList = new CueList(cueChunkData, listChunkData); } return cueList; }
public Play(string file) { WaveFileReader reader = new WaveFileReader(file); sampleBuffer = new double[reader.Length]; float[] buffer; while ((buffer = reader.ReadNextSampleFrame()) != null) { for (int i = 0; i < buffer.Length; i++) { sampleBuffer[n++] = buffer[i]; } } WaveFormat format = reader.WaveFormat; ch = format.Channels; frame = sampleBuffer.Length / ch; n = 0; speed = 1; }