public void LoadFile(AudioData audioData, IFileFormat fileFormat) { AudioFileFormats.Add(fileFormat); //Load Channel Info AudioFile file = new AudioFile(); file.Title = fileFormat.FileName; if (fileFormat is VGAdudioFile) { file.vgAdudioFile = (VGAdudioFile)fileFormat; } //Loop through each channel and set it's own var format = audioData.GetAllFormats().ToArray()[0]; for (int c = 0; c < format.ChannelCount; c++) { using (var memWav = new MemoryStream()) { AudioChannel audioChannel = new AudioChannel(); audioChannel.Name = $"Channel [{c}]"; file.Channels.Add(audioChannel); //Load data and write to stream var audio = format.GetChannels(c).ToPcm16(); var writer = new WaveWriter(); writer.WriteToStream(audio, memWav); audioChannel.Data = memWav.ToArray(); memWav.Position = 0; //Load the player audioChannel.audioPlayer.Open(new MemoryStream(audioChannel.Data), "wav", activeDevice); /* OpenFileDialog openFileDialog = new OpenFileDialog(); * if (openFileDialog.ShowDialog() == DialogResult.OK) * { * audioChannel.audioPlayer.Open(openFileDialog.FileName, activeDevice); * }*/ audioChannel.audioPlayer.PlaybackStopped += (s, args) => { //WasapiOut uses SynchronizationContext.Post to raise the event //There might be already a new WasapiOut-instance in the background when the async Post method brings the PlaybackStopped-Event to us. if (audioChannel.audioPlayer.PlaybackState != PlaybackState.Stopped) { } }; } } audioListView.AddObject(file); if (audioListView.Items.Count != 0) { audioListView.SelectedIndex = 0; } }
public byte[] BfwavToWav() { MemoryStream mem = new MemoryStream(); WaveWriter writer = new WaveWriter(); AudioData audioData = GetAudioData(); writer.WriteToStream(audioData, mem); return(mem.ToArray()); }
public byte[] ToWave(IArchData archData, Dictionary <string, object> context = null) { NxOpusReader reader = new NxOpusReader(); var data = reader.Read(archData.Data.Data); using MemoryStream oms = new MemoryStream(); WaveWriter writer = new WaveWriter(); writer.WriteToStream(data, oms, new WaveConfiguration { Codec = WaveCodec.Pcm16Bit }); //only 16Bit supported return(oms.ToArray()); }
public void TestDspDecode() { var resPath = Path.Combine(Environment.CurrentDirectory, @"..\..\Res"); var path = Path.Combine(resPath, "[nx][adpcm]syssearc", "1.raw"); DspReader reader = new DspReader(); var data = reader.Read(File.ReadAllBytes(path)); using MemoryStream oms = new MemoryStream(); WaveWriter writer = new WaveWriter(); writer.WriteToStream(data, oms, new WaveConfiguration { Codec = WaveCodec.Pcm16Bit }); //only 16Bit supported File.WriteAllBytes(path + ".wav", oms.ToArray()); }
public void unpackWav(string file) { Directory.CreateDirectory(file); for (int i = 0; i < amtaData.Count; i++) { if (audioIdntr[i] == ".bfwav") { FileStream f = File.Create(file + "/" + strgList[i].name + ".wav"); BCFstmReader reader = new BCFstmReader(); WaveWriter writer = new WaveWriter(); VGAudio.Formats.AudioData convertedWav = reader.Read(audioData[i]); writer.WriteToStream(convertedWav, f); f.Close(); } } }
public Stream unpackWavStream(string file) { for (int i = 0; i < amtaData.Count; i++) { if ((audioIdntr[i] == ".bfwav" || audioIdntr[i] == ".bfstp" || audioIdntr[i] == ".bfstm") && strgList[i].name == file) { Stream f = new MemoryStream(); BCFstmReader reader = new BCFstmReader(); WaveWriter writer = new WaveWriter(); VGAudio.Formats.AudioData convertedWav = reader.Read(audioData[i]); writer.WriteToStream(convertedWav, f); return(f); } } return(null); }