public TestMix() { //WaveStream str1 = new Mp3FileReader("C:\\Users\\mtemkine\\Desktop\\snd\\guitar1.mp3"); //WaveStream str2 = new Mp3FileReader("C:\\Users\\mtemkine\\Desktop\\snd\\molecules.mp3"); //WaveMixerStream32 mix = new WaveMixerStream32(new [] {str1, str2}, false); var background = new Mp3FileReader("C:\\Users\\mtemkine\\Desktop\\snd\\ferriss.mp3"); var message = new Mp3FileReader("C:\\Users\\mtemkine\\Desktop\\snd\\guitar1.mp3"); var mixer = new WaveMixerStream32(); mixer.AutoStop = true; var messageOffset = background.TotalTime; var messageOffsetted = new WaveOffsetStream(message, TimeSpan.FromSeconds(1.5), TimeSpan.Zero, message.TotalTime.Subtract(TimeSpan.FromSeconds(1))); var background32 = new WaveChannel32(background); background32.PadWithZeroes = false; background32.Volume = 0.9f; var message32 = new WaveChannel32(messageOffsetted); message32.PadWithZeroes = false; message32.Volume = 0.7f; var s1 = new RawSourceWaveStream(background32, new WaveFormat(8000, 16, 1)); var s2 = new RawSourceWaveStream(message32, new WaveFormat(8000, 16, 1)); WaveFormat targetFormat = WaveFormat.CreateIeeeFloatWaveFormat(128, 2); var ss1 = new WaveFormatConversionStream(targetFormat, background32); //var c = new WaveFormatConversionStream(WaveFormat.CreateALawFormat(8000, 1), background32); //var stream_background32 = new WaveFormatConversionStream(new WaveFormat(256, 32, 2), background32); //var stream_message32 = new WaveFormatConversionStream(new WaveFormat(256, 32, 2), message32); mixer.AddInputStream(s1); mixer.AddInputStream(s2); WaveFileWriter.CreateWaveFile("mycomposed.wav", new Wave32To16Stream(mixer)); }
public static void PlayFromFile(string filename, int frequency) { using ( FileStream stream = new FileStream(filename, FileMode.Open)) { var waveFormat = WaveFormat.CreateMuLawFormat(frequency * 2, 1); var reader = new NAudio.Wave.RawSourceWaveStream(stream, waveFormat); using (WaveStream convertedStream = WaveFormatConversionStream.CreatePcmStream(reader)) { convertedStream.Seek(0, 0); WaveOutEvent player = new WaveOutEvent(); WaveChannel32 volumeStream = new WaveChannel32(convertedStream); player.Init(volumeStream); player.Play(); while (player.PlaybackState == PlaybackState.Playing) { System.Threading.Thread.Sleep(100); var input = Console.ReadKey(); if (input.KeyChar > 1) ; { player.Stop(); } } } } }
/// <summary> /// Starts playback. /// </summary> public void Play(string audioFilePath) { try { MemoryStream audioData = new MemoryStream(File.ReadAllBytes(audioFilePath)); WaveFormat waveFormat = new WaveFormat(44100, 2); RawSourceWaveStream waveStream = new RawSourceWaveStream(audioData, waveFormat); waveOut = new WaveOut(); waveOut.DeviceNumber = AudioController.getInstance().GetDefaultOutputDeviceNumber(); waveOut.Init(waveStream); waveOut.Play(); } catch (FileNotFoundException e) { // TODO } catch (IOException e) { // TODO } }
public WaveStream[] Read() { SectionHeader sectionHeader = _input.ReadContent<SectionHeader>(); SscfHeader sscfHeader = _input.ReadContent<SscfHeader>(); BinaryReader br = new BinaryReader(_input); _input.SetPosition(sscfHeader.WavesOffset); int[] offsets = new int[sscfHeader.NumWaves]; for (int i = 0; i < offsets.Length; i++) offsets[i] = br.ReadInt32(); WaveStream[] result = new WaveStream[sscfHeader.NumWaves]; for (int i = 0; i < offsets.Length; i++) { _input.SetPosition(offsets[i]); SscfWaveHeader waveHeader = _input.ReadContent<SscfWaveHeader>(); if (waveHeader.Format == SscfWaveFormat.Vorbis) { _input.SetPosition(waveHeader.DataOffset); byte[] vorbisData = _input.EnsureRead(waveHeader.DataLength); MemoryStream vorbisMs = new MemoryStream(vorbisData, 0, vorbisData.Length, false); result[i] = new VorbisWaveReader(vorbisMs); continue; } WaveFormat format = ReadWaveFormat(waveHeader); if (format == null) continue; _input.SetPosition(waveHeader.DataOffset); byte[] data = _input.EnsureRead(waveHeader.DataLength); MemoryStream ms = new MemoryStream(data, 0, data.Length, false); result[i] = new RawSourceWaveStream(ms, format); } return result; }
private byte[] RealMix(ReceivedRtp item1, ReceivedRtp item2) { if (item1 == null || item2 == null) return null; if (item1.size == 0 || item2.size == 0) return null; byte[] wavSrc1 = new byte[item1.size - headersize]; byte[] wavSrc2 = new byte[item2.size - headersize]; Array.Copy(item1.buff, headersize, wavSrc1, 0, (item1.size - headersize)); Array.Copy(item2.buff, headersize, wavSrc2, 0, (item2.size - headersize)); WaveMixerStream32 mixer = new WaveMixerStream32(); // mixer.AutoStop = true; MemoryStream memstrem = new MemoryStream(wavSrc1); RawSourceWaveStream rawsrcstream = new RawSourceWaveStream(memstrem, this.codec); WaveFormatConversionStream conversionstream = new WaveFormatConversionStream(pcmFormat16, rawsrcstream); WaveChannel32 channelstream = new WaveChannel32(conversionstream); mixer.AddInputStream(channelstream); memstrem = new MemoryStream(wavSrc2); rawsrcstream = new RawSourceWaveStream(memstrem, this.codec); conversionstream = new WaveFormatConversionStream(pcmFormat16, rawsrcstream); channelstream = new WaveChannel32(conversionstream); mixer.AddInputStream(channelstream); mixer.Position = 0; Wave32To16Stream to16 = new Wave32To16Stream(mixer); var convStm = new WaveFormatConversionStream(pcmFormat8, to16); byte[] mixedbytes = new byte[(int)convStm.Length]; int chk = convStm.Read(mixedbytes, 0, (int)convStm.Length); //Buffer.BlockCopy(tobyte, 0, writingBuffer, 0, tobyte.Length); memstrem.Close(); rawsrcstream.Close(); conversionstream.Close(); channelstream.Close(); convStm.Close(); convStm.Dispose(); convStm = null; to16.Close(); to16.Dispose(); to16 = null; mixer.Close(); mixer.Dispose(); mixer = null; return mixedbytes; }
public byte[] Render(float masterVolume) { // due to the way NAudio works, the source files must be provided twice. // this is because all channels are kept in sync by the mux, and the unused // channel data is discarded. If we tried to use the same source for both // muxes, it would try to read 2x the data present in the buffer! // If only we had a way to create separate WaveProviders from within the // MultiplexingWaveProvider.. try { using (MemoryStream sourceLeft = new MemoryStream(Data), sourceRight = new MemoryStream(Data)) { using (RawSourceWaveStream waveLeft = new RawSourceWaveStream(new IgnoreDisposeStream(sourceLeft), Format), waveRight = new RawSourceWaveStream(new IgnoreDisposeStream(sourceRight), Format)) { // step 1: separate the stereo stream MultiplexingWaveProvider demuxLeft = new MultiplexingWaveProvider(new IWaveProvider[] { waveLeft }, 1); MultiplexingWaveProvider demuxRight = new MultiplexingWaveProvider(new IWaveProvider[] { waveRight }, 1); demuxLeft.ConnectInputToOutput(0, 0); demuxRight.ConnectInputToOutput(1, 0); // step 2: adjust the volume of a stereo stream VolumeWaveProvider16 volLeft = new VolumeWaveProvider16(demuxLeft); VolumeWaveProvider16 volRight = new VolumeWaveProvider16(demuxRight); // note: use logarithmic scale #if (true) // log scale is applied to each operation float volumeValueLeft = (float)Math.Pow(1.0f - Panning, 0.5f); float volumeValueRight = (float)Math.Pow(Panning, 0.5f); // ensure 1:1 conversion volumeValueLeft /= (float)Math.Sqrt(0.5); volumeValueRight /= (float)Math.Sqrt(0.5); // apply volume volumeValueLeft *= (float)Math.Pow(Volume, 0.5f); volumeValueRight *= (float)Math.Pow(Volume, 0.5f); // clamp volumeValueLeft = Math.Min(Math.Max(volumeValueLeft, 0.0f), 1.0f); volumeValueRight = Math.Min(Math.Max(volumeValueRight, 0.0f), 1.0f); #else // log scale is applied to the result of the operations float volumeValueLeft = (float)Math.Pow(1.0f - Panning, 0.5f); float volumeValueRight = (float)Math.Pow(Panning, 0.5f); // ensure 1:1 conversion volumeValueLeft /= (float)Math.Sqrt(0.5); volumeValueRight /= (float)Math.Sqrt(0.5); // apply volume volumeValueLeft *= Volume; volumeValueRight *= Volume; // apply log scale volumeValueLeft = (float)Math.Pow(volumeValueLeft, 0.5f); volumeValueRight = (float)Math.Pow(volumeValueRight, 0.5f); // clamp volumeValueLeft = Math.Min(Math.Max(volumeValueLeft, 0.0f), 1.0f); volumeValueRight = Math.Min(Math.Max(volumeValueRight, 0.0f), 1.0f); #endif // use linear scale for master volume volLeft.Volume = volumeValueLeft * masterVolume; volRight.Volume = volumeValueRight * masterVolume; // step 3: combine them again IWaveProvider[] tracks = new IWaveProvider[] { volLeft, volRight }; MultiplexingWaveProvider mux = new MultiplexingWaveProvider(tracks, 2); // step 4: export them to a byte array byte[] finalData = new byte[Data.Length]; mux.Read(finalData, 0, finalData.Length); // cleanup demuxLeft = null; demuxRight = null; volLeft = null; volRight = null; mux = null; return finalData; } } } catch { return Data; } }
public void SetSound(byte[] data, WaveFormat sourceFormat) { MemoryStream dataStream = new MemoryStream(data); RawSourceWaveStream wavStream = new RawSourceWaveStream(dataStream, sourceFormat); WaveStream wavConvertStream = null; try { wavConvertStream = WaveFormatConversionStream.CreatePcmStream(wavStream); // using a mux, we force all sounds to be 2 channels MultiplexingWaveProvider sourceProvider = new MultiplexingWaveProvider(new IWaveProvider[] { wavConvertStream }, 2); int bytesToRead = (int)((wavConvertStream.Length * 2) / wavConvertStream.WaveFormat.Channels); byte[] rawWaveData = new byte[bytesToRead]; int bytesRead = sourceProvider.Read(rawWaveData, 0, bytesToRead); Data = rawWaveData; Format = sourceProvider.WaveFormat; // clean up sourceProvider = null; } catch { Data = data; Format = sourceFormat; } finally { if (wavConvertStream != null) wavConvertStream.Dispose(); wavConvertStream = null; wavStream.Dispose(); wavStream = null; dataStream.Dispose(); dataStream = null; } }
public static byte[] Resample(byte[] pcm, int fromRate, short fromDepth, short fromChannels, int toRate, short toDepth, short toChannels) { using (MemoryStream mem = new MemoryStream(pcm)) { using (RawSourceWaveStream stream = new RawSourceWaveStream(mem, new WaveFormat(fromRate, fromDepth, fromChannels))) { var outFormat = new WaveFormat(toRate, stream.WaveFormat.Channels); using (var resampler = new WaveFormatConversionStream(outFormat, stream)) { int resampled_length = (int)((float)pcm.Length * ((float)toRate / (float)fromRate)); byte[] ret = new byte[resampled_length]; resampler.Read(ret, 0, resampled_length); return ret; } } } }
private void WaveFileWriting(byte[] buff, string fn) { if (buff.Length < 1) return; if (File.Exists(fn)) { var wavefilestream = new WaveFileReader(fn); byte[] wavefilebyte = new byte[(int)wavefilestream.Length]; int chk = wavefilestream.Read(wavefilebyte, 0, wavefilebyte.Length); wavefilestream.Close(); byte[] desByte = new byte[wavefilebyte.Length + buff.Length]; Buffer.BlockCopy(wavefilebyte, 0, desByte, 0, wavefilebyte.Length); Buffer.BlockCopy(buff, 0, desByte, wavefilebyte.Length, buff.Length); using (MemoryStream memStm = new MemoryStream(desByte)) using (BufferedStream bufStm = new BufferedStream(memStm, 2048)) using (RawSourceWaveStream rawSrcStm = new RawSourceWaveStream(bufStm, pcmFormat)) { WaveFileWriter.CreateWaveFile(fn, rawSrcStm); } } else { using (MemoryStream memStm = new MemoryStream(buff)) using (BufferedStream bufStm = new BufferedStream(memStm, 2048)) using (RawSourceWaveStream rawSrcStm = new RawSourceWaveStream(bufStm, pcmFormat)) { WaveFileWriter.CreateWaveFile(fn, rawSrcStm); } } }
private void saveButton_Click(object sender, EventArgs e) { reworkFile(); string fileName = null; if (Path.GetExtension(textBox1.Text).ToLower() == ".wav" || Path.GetExtension(textBox1.Text).ToLower() == ".flac") { saveFileDialog1.FilterIndex = 1; } else if (Path.GetExtension(textBox1.Text).ToLower() == ".mp3") { saveFileDialog1.FilterIndex = 2; } string extension = Path.GetExtension(textBox1.Text); saveFileDialog1.InitialDirectory = Path.GetDirectoryName(textBox1.Text); if (extension == ".flac") { extension = ".wav"; } if (checkBox1.Checked) { int i = 0; do { if (radioButton1.Checked) { if (!File.Exists(Path.GetDirectoryName(textBox1.Text) + "\\" + Path.GetFileNameWithoutExtension(textBox1.Text) + "_swapped_1_3_" + i + extension)) { fileName = Path.GetFileNameWithoutExtension(textBox1.Text) + "_swapped_1_3_" + i + extension; break; } else { i++; } } else if (radioButton2.Checked) { if (!File.Exists(Path.GetDirectoryName(textBox1.Text) + "\\" + Path.GetFileNameWithoutExtension(textBox1.Text) + "_swapped_2_4_" + i + extension)) { fileName = Path.GetFileNameWithoutExtension(textBox1.Text) + "_swapped_2_4_" + i + extension; break; } else { i++; } } } while (true); //if (radioButton2.Checked) fileName = Path.GetFileNameWithoutExtension(textBox1.Text) + "_swapped_2_4" + Path.GetExtension(textBox1.Text); //else if (radioButton1.Checked) fileName = Path.GetFileNameWithoutExtension(textBox1.Text) + "_swapped_1_3" + Path.GetExtension(textBox1.Text); } else { int i = 0; do { if (!File.Exists(Path.GetDirectoryName(textBox1.Text) + "\\" + Path.GetFileNameWithoutExtension(textBox1.Text) + "_" + i + extension)) { fileName = Path.GetFileNameWithoutExtension(textBox1.Text) + "_" + i + extension; break; } else { i++; } } while (true); } saveFileDialog1.FileName = fileName; if (saveFileDialog1.ShowDialog() == DialogResult.OK) { reworkFile(); do { try { if (saveFileDialog1.FilterIndex == 1) { if (!checkBox1.Checked) { File.WriteAllBytes(saveFileDialog1.FileName, originalFile); } else { File.WriteAllBytes(saveFileDialog1.FileName, swappedFile); } } else if (saveFileDialog1.FilterIndex == 2) { if (Convert.ToInt32(freq) != 44100 && Convert.ToInt32(freq) != 48000) { MessageBox.Show("Saving into MP3 possible only with 44100 and 48000 Hz! Save it into WAV and convert it with another programm!", "Error!", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } if (bitDepth != 16) { MessageBox.Show("Saving into MP3 possible only with 16 bits! Save it into WAV and convert it with another programm!", "Error!", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } if (channels > 2) { MessageBox.Show("Saving into MP3 possible only with 1 and 2 channels! Save it into WAV and convert it with another programm!", "Error!", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } bitrate bit = new bitrate(); bit.ShowDialog(); if (bit.DialogResult == DialogResult.Cancel) { return; } NAudio.MediaFoundation.MediaFoundationInterop.MFStartup(0); WaveFormat wav = new WaveFormat(Convert.ToInt32(freq), channels); if (!checkBox1.Checked) { using (var writer = new NAudio.Wave.RawSourceWaveStream(originalFile, 44 + dataStartOffset, dataSize, wav)) { NAudio.Wave.MediaFoundationEncoder.EncodeToMp3(writer, saveFileDialog1.FileName, mp3Bitrate); } } else { using (var writer = new NAudio.Wave.RawSourceWaveStream(swappedFile, 44 + dataStartOffset, dataSize, wav)) { NAudio.Wave.MediaFoundationEncoder.EncodeToMp3(writer, saveFileDialog1.FileName, mp3Bitrate); } } } break; } catch { MessageBox.Show("File is busy, or there's an error! Try again or choose another name!\nIf you saving to MP3 and using Windows 7, choose WAV!", "Error!", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } } while (true); } }
public void PlayOnce() { Recalculate(); // TODO: Look into doing this properly (http://mark-dot-net.blogspot.de/2014/02/fire-and-forget-audio-playback-with.html) // Maybe do this with async? using (var output = new WaveOutEvent()) using (RawSourceWaveStream source = new RawSourceWaveStream(new MemoryStream(compressed), new WaveFormat(SampleRate, 1))) { output.Init(source); output.Play(); while (output.PlaybackState == PlaybackState.Playing) { Thread.Sleep(50); } } }
public bool ProcessOffline(String waveInputFilePath, String waveOutputFilePath, String pluginPath, String fxpFilePath=null, float volume=1.0f) { WaveFileReader wavFileReader = new WaveFileReader(waveInputFilePath); // reuse if batch processing bool doUpdateVstPlugin = false; if (_pluginPath != null) { if (_pluginPath.Equals(pluginPath)) { // plugin has not changed } else { // plugin has changed! doUpdateVstPlugin = true; } } else { _pluginPath = pluginPath; doUpdateVstPlugin = true; } if (doUpdateVstPlugin) { HostCommandStub hcs = new HostCommandStub(); hcs.Directory = System.IO.Path.GetDirectoryName(pluginPath); vst = new VST(); try { vst.PluginContext = VstPluginContext.Create(pluginPath, hcs); if (vst.PluginContext == null) { Console.Out.WriteLine("Could not open up the plugin specified by {0}!", pluginPath); return false; } // plugin does not support processing audio if ((vst.PluginContext.PluginInfo.Flags & VstPluginFlags.CanReplacing) == 0) { Console.Out.WriteLine("This plugin does not process any audio."); return false; } // check if the plugin supports offline proccesing if(vst.PluginContext.PluginCommandStub.CanDo(VstCanDoHelper.ToString(VstPluginCanDo.Offline)) == VstCanDoResult.No) { Console.Out.WriteLine("This plugin does not support offline processing."); Console.Out.WriteLine("Try use realtime (-play) instead!"); return false; } // add custom data to the context vst.PluginContext.Set("PluginPath", pluginPath); vst.PluginContext.Set("HostCmdStub", hcs); // actually open the plugin itself vst.PluginContext.PluginCommandStub.Open(); Console.Out.WriteLine("Enabling the audio output on the VST!"); vst.PluginContext.PluginCommandStub.MainsChanged(true); // setup the VSTStream vstStream = new VSTStream(); vstStream.ProcessCalled += new EventHandler<VSTStreamEventArgs>(vst_ProcessCalled); vstStream.PlayingStarted += new EventHandler(vst_PlayingStarted); vstStream.PlayingStopped += new EventHandler(vst_PlayingStopped); vstStream.pluginContext = vst.PluginContext; vstStream.SetWaveFormat(wavFileReader.WaveFormat.SampleRate, wavFileReader.WaveFormat.Channels); } catch (Exception ex) { Console.Out.WriteLine("Could not load VST! ({0})", ex.Message); return false; } } if (File.Exists(fxpFilePath)) { vst.LoadFXP(fxpFilePath); } else { Console.Out.WriteLine("Could not find preset file (fxp|fxb) ({0})", fxpFilePath); } // each float is 4 bytes byte[] buffer = new byte[512*4]; using (MemoryStream ms = new MemoryStream()) { vstStream.SetInputWave(waveInputFilePath, volume); vstStream.DoProcess = true; // wait a little while Thread.Sleep(1000); // keep on reading until it stops playing. while (!stoppedPlaying) { int read = vstStream.Read(buffer, 0, buffer.Length); if (read <= 0) { break; } ms.Write(buffer, 0, read); } // save using (WaveStream ws = new RawSourceWaveStream(ms, vstStream.WaveFormat)) { ws.Position = 0; WaveFileWriter.CreateWaveFile(waveOutputFilePath, ws); } } // reset the input wave file vstStream.DoProcess = false; vstStream.DisposeInputWave(); // reset if calling this method multiple times stoppedPlaying = false; return true; }
private void button1_Click(object sender, EventArgs e) { if (player != null) { player.Stop(); return; } if (!ValidateInput()) { return; } _start = dateStart.Value.ToUniversalTime(); _senderId = txtSender.Text; _length = ZeroPad(comboLengthHour.Text, 2) + ZeroPad(comboLengthMinutes.Text, 2); var newMessage = new EASMessage(_selectedOriginator.Id, _selectedAlertCode.Id, Regions, _length, _start, _senderId); var messageStream = EASEncoder.EASEncoder.GetMemoryStreamFromNewMessage(newMessage, chkEbsTones.Checked, chkNwsTone.Checked, formatAnnouncement(txtAnnouncement.Text)); btnGeneratePlay.Text = "Stop Playing"; WaveStream mainOutputStream = new RawSourceWaveStream(messageStream, new WaveFormat()); var volumeStream = new WaveChannel32(mainOutputStream); volumeStream.PadWithZeroes = false; player = new WaveOutEvent(); player.PlaybackStopped += (o, args) => { player.Dispose(); player = null; btnGeneratePlay.Text = "Generate && Play"; }; player.Init(volumeStream); player.Play(); }
private void ProcessMixingFinal(RcvData data, int dataSize) { string processingFn = string.Format("e:\\{0}_{1}_{2}.wav", data.seqnum, data.extension, data.peernumber); List<RecInfos> ls0 = lExtension0.FindAll( delegate(RecInfos list) { return list.rcvData.Equals(data) && list.isExtension == 0; }); List<RecInfos> ls1 = lExtension1.FindAll( delegate(RecInfos list) { return list.rcvData.Equals(data) && list.isExtension == 1; }); IsExtensionComparer isExtensionCompare = new IsExtensionComparer(); ls0.Sort(isExtensionCompare); ls1.Sort(isExtensionCompare); int count = 0; int count0 = ls0.Count(); int count1 = ls1.Count(); if (count0 - count1 < 0) count = count0; else count = count1; for (int i = 0; i < count; i++) { if (ls0[i].seq == ls1[i].seq) { // 믹싱 byte[] wavSrc0 = new byte[160]; byte[] wavSrc1 = new byte[160]; Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length); Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length); WaveMixerStream32 mixer = new WaveMixerStream32(); //mixer.AutoStop = true; WaveChannel32 channelStm = null; for (int j = 0; j < 2; j++) { MemoryStream memStm = null; BufferedStream bufStm = null; RawSourceWaveStream rawSrcStm = null; WaveFormatConversionStream conversionStm = null; if (j == 0) memStm = new MemoryStream(wavSrc0); else memStm = new MemoryStream(wavSrc1); bufStm = new BufferedStream(memStm); rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat); conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm); channelStm = new WaveChannel32(conversionStm); mixer.AddInputStream(channelStm); } mixer.Position = 0; if (File.Exists(processingFn)) { var wavefilestream = new WaveFileReader(processingFn); byte[] wavefilebyte = new byte[(int)wavefilestream.Length]; int chk0 = wavefilestream.Read(wavefilebyte, 0, wavefilebyte.Length); Wave32To16Stream to16 = new Wave32To16Stream(mixer); var conversionStm = new WaveFormatConversionStream(pcmFormat, to16); byte[] tobyte = new byte[(int)conversionStm.Length]; int chk1 = conversionStm.Read(tobyte, 0, (int)conversionStm.Length); byte[] desByte = new byte[wavefilebyte.Length + tobyte.Length]; conversionStm.Close(); wavefilestream.Close(); Buffer.BlockCopy(wavefilebyte, 0, desByte, 0, wavefilebyte.Length); Buffer.BlockCopy(tobyte, 0, desByte, wavefilebyte.Length, tobyte.Length); using (MemoryStream memStm = new MemoryStream(desByte)) using (BufferedStream buffStm = new BufferedStream(memStm)) using (RawSourceWaveStream rawSrcStm = new RawSourceWaveStream(buffStm, pcmFormat)) { WaveFileWriter.CreateWaveFile(processingFn, rawSrcStm); } } else { var mixedStm = new Wave32To16Stream(mixer); var convStm = new WaveFormatConversionStream(pcmFormat, mixedStm); WaveFileWriter.CreateWaveFile(processingFn, convStm); convStm.Close(); mixedStm.Close(); } mixer.Close(); // 삭제 lExtension0.Remove(ls0[i]); lExtension1.Remove(ls1[i]); } else if (ls0[i].seq - ls1[i].seq < 0) { // ls0 만 믹싱 // ls0 원본에 byte[] 붙임 > 원본 byte[]를 wavesream 으로 변환 > wave 파일로 저장 if (File.Exists(processingFn)) { //wavefilestream = new WaveFileReader(processingFn); } else { } // 삭제 lExtension0.Remove(ls0[i]); ls1.Insert(i + 1, ls1[i]); } else if (ls0[i].seq - ls1[i].seq > 0) { // ls1 만 믹싱 // ls1 원본에 byte[] 붙임 > 원본 byte[]를 wavesream 으로 변환 > wave 파일로 저장 if (File.Exists(processingFn)) { //wavefilestream = new WaveFileReader(processingFn); } else { } // 삭제 lExtension1.Remove(ls1[i]); ls0.Insert(i + 1, ls0[i]); } } }
/// <summary> /// Sets the track. /// </summary> /// <param name="trackId">The track identifier.</param> public void SetTrack(string trackId) { CurrentSong = trackId; LibraryTrack track = library.GetTrack(trackId); if (track != null) { WaveStream stream; if (Path.GetExtension(track.Path) == ".flac") { var flac = new FlacBox.WaveOverFlacStream(new FlacBox.FlacReader(File.OpenRead(track.Path), false)); stream = new RawSourceWaveStream(flac, new WaveFormat()); } else { stream = new MediaFoundationReader(track.Path); } try { waveOutDevice.Init(stream); } catch { nextCallback(); } } else { nextCallback(); } }
private void Resample() { // If this causes problems, check at http://mark-dot-net.blogspot.de/2014/05/how-to-resample-audio-with-naudio.html using (RawSourceWaveStream source = new RawSourceWaveStream(new MemoryStream(preprocessed), new WaveFormat(SourceSampleRate, 1))) using (WaveFormatConversionStream resampler = new WaveFormatConversionStream(new WaveFormat(TargetSampleRate, 1), source)) { resampled = new byte[resampler.Length]; resampler.Read(resampled, 0, resampled.Length); } }
public async Task Play(ICommandContext Context, [Remainder] string youtubevid = null) { try { if (string.IsNullOrEmpty(youtubevid) && IsPlaying) { await Context.Channel.SendMessageAsync("You did not specify a link or search."); return; } if (string.IsNullOrEmpty(youtubevid) && !IsPlaying && Queue.Count == 0) { await Context.Channel.SendMessageAsync("You did not specify a link or search."); return; } if (IsPlaying && !string.IsNullOrEmpty(youtubevid)) { TimeSpan dur = TimeSpan.Zero; string thm = ""; string message = ""; YouTubeVideo v = uvid(youtubevid, @"queue", out thm, out dur, out message); if (v == null) { await Context.Channel.SendMessageAsync("Hmm, that didn't work. info: " + message); return; } MusicQueueItem item = new MusicQueueItem(v, @"queue\" + v.FullName, dur, thm); Queue.Enqueue(item); EmbedBuilder qvidinfo = new EmbedBuilder(); qvidinfo.WithAuthor("Added to Queue!"); qvidinfo.AddField(v.Title, dur); qvidinfo.ImageUrl = thm; await Context.Channel.SendMessageAsync("", false, qvidinfo); return; } if (!IsPlaying && !string.IsNullOrEmpty(youtubevid))//You will f*****g obey me this time;; { TimeSpan dur = TimeSpan.Zero; string thm = ""; string message = ""; YouTubeVideo v = uvid(youtubevid, @"queue", out thm, out dur, out message); if (v == null) { await Context.Channel.SendMessageAsync("Hmm, that didn't work. info: " + message); return; } MusicQueueItem item = new MusicQueueItem(v, @"queue" + v.FullName, dur, thm); Queue.Enqueue(item); if (Queue.Count == 1) { await Context.Channel.SendMessageAsync("Added to queue, but wait, Only one song. Let's play it now."); } } var vidinfo = Queue.Dequeue(); string source = @"queue"; var vide = vidinfo.video; byte[] b = vidinfo.video.GetBytes(); System.IO.File.WriteAllBytes(System.IO.Path.GetFullPath(source) + "\\" + vide.FullName, b); var inputFile = new MediaFile { Filename = source + "\\" + vide.FullName }; var outputFile = new MediaFile { Filename = $"{source + "\\" + vide.FullName}.wav" }; using (var engine = new Engine()) { engine.GetMetadata(inputFile); engine.Convert(inputFile, outputFile); } NowPlayingInfo = new EmbedBuilder(); NowPlayingInfo.WithAuthor("Now Playing", Context.Client.CurrentUser.GetAvatarUrl(ImageFormat.Auto)); NowPlayingInfo.AddField(vide.Title, vidinfo.duration); NowPlayingInfo.AddField("Voice Channel", "`" + (await Context.Guild.GetVoiceChannelAsync(ChannelID)).Name + "`"); NowPlayingInfo.ImageUrl = vidinfo.ThumbnailURL; NowPlayingInfo.Color = Color.Purple; await Context.Channel.SendMessageAsync("", false, NowPlayingInfo.Build()); using (var output = new NAudio.Wave.WaveFileReader(outputFile.Filename)) { using (var ms = new System.IO.MemoryStream()) { using (var PlayingStream = aclient.CreatePCMStream(AudioApplication.Mixed, null, 150, 0)) { using (var resampledAudio = new MediaFoundationResampler(output, new WaveFormat(48000, 16, 2))) { resampledAudio.ResamplerQuality = 50; WaveFileWriter.WriteWavFileToStream(ms, resampledAudio); using (var cvt = new NAudio.Wave.RawSourceWaveStream(ms, new WaveFormat(48000, 2))) { IsPlaying = true;//stupid f**k while (true) { if (paused) { await Task.Delay(20); continue; } byte[] buffer = new byte[81920]; int r = await cvt.ReadAsync(buffer, 0, buffer.Length); await PlayingStream.WriteAsync(buffer, 0, r); if (Skipped) { Skipped = false; break; } //await PlayingStream.FlushAsync(new CancellationToken(paused)); if (r == 0) { break; } } await PlayingStream.FlushAsync(); } } } } } IsPlaying = false;//stupid f**k LogMessage log = new LogMessage(LogSeverity.Info, "VoiceMOD", "End of Stream. Off to next one"); Console.WriteLine(log); while (Queue.Count >= 1) { if (Queue.Count == 0) { break; } //System.Threading.SpinWait.SpinUntil(Stopped.);//wait until it is stopped for sure before calling the next play loop. if (IsPlaying) { await Task.Delay(20); continue; } await Play(Context); } } catch (Exception ex) { await Context.Channel.SendMessageAsync(ex.Message); Console.WriteLine(ex.ToString()); } }
protected override void OnLoad(EventArgs e) { Visible = false; ShowInTaskbar = false; base.OnLoad(e); /* * Get all installed voices * */ var voices = speech.GetInstalledVoices(); string voice = ""; foreach (InstalledVoice v in voices) { if (v.Enabled) //voice = v.VoiceInfo.Name; Console.WriteLine(v.VoiceInfo.Name); } queuetimer = new System.Timers.Timer(250); queuetimer.Elapsed += (object sender, ElapsedEventArgs ev) => { TTSRequest r; if (Queue.TryDequeue(out r)) { Console.WriteLine("dequeing off of concurrent queue..."); if (r.Interrupt) { // stop current TTS if (IsSpeaking) { //speech.StopSpeaking(); } if (IsSounding) { //sound.Stop(); if(sound.PlaybackState == PlaybackState.Playing) { sound.Stop(); } } // clear queue SpeechQueue.Clear(); } if(!r.Reset) { SpeechQueue.Enqueue(r); } RequestCount++; } var eventdata = new Hashtable(); eventdata.Add("ProcessedRequests", RequestCount); eventdata.Add("QueuedRequests", SpeechQueue.Count); eventdata.Add("IsSpeaking", IsSounding); InstrumentationEvent blam = new InstrumentationEvent(); blam.EventName = "status"; blam.Data = eventdata; NotifyGui(blam.EventMessage()); }; // when this timer fires, it will pull off of the speech queue and speak it // the long delay also adds a little pause between tts requests. speechtimer = new System.Timers.Timer(250); speechtimer.Elapsed += (object sender, ElapsedEventArgs ev) => { if (IsSpeaking.Equals(false)) { if (SpeechQueue.Count > 0) { TTSRequest r = SpeechQueue.Dequeue(); Console.WriteLine("dequeuing off of speech queue"); IsSpeaking = true; speechtimer.Enabled = false; //speech.SpeakAsync(r.Text); //using (speech = new SpeechSynthesizer()) { speech = new SpeechSynthesizer(); speech.SpeakCompleted += speech_SpeakCompleted; format = new SpeechAudioFormatInfo(EncodingFormat.ALaw, 8000, 8, 1, 1, 2, null); //format = new SpeechAudioFormatInfo(11025, AudioBitsPerSample.Sixteen, AudioChannel.Mono); // var si = speech.GetType().GetMethod("SetOutputStream", BindingFlags.Instance | BindingFlags.NonPublic); stream = new MemoryStream(); //si.Invoke(speech, new object[] { stream, format, true, true }); //speech.SetOutputToWaveStream(stream); speech.SetOutputToAudioStream(stream, format); speech.SelectVoice(config.getVoice (r.Language, r.Voice)); int rate = (r.Speed * 2 - 10); Console.WriteLine(rate); try { speech.Rate = rate; } catch (ArgumentOutOfRangeException ex) { speech.Rate = 0; } speech.SpeakAsync(r.Text); //} synthesis.WaitOne(); speech.SpeakCompleted -= speech_SpeakCompleted; speech.SetOutputToNull(); speech.Dispose(); //IsSpeaking = false; IsSounding = true; stream.Position = 0; //WaveFormat.CreateCustomFormat(WaveFormatEncoding.WmaVoice9, 11025, 1, 16000, 2, 16) using(RawSourceWaveStream reader = new RawSourceWaveStream(stream, WaveFormat.CreateALawFormat(8000, 1))) { WaveStream ws = WaveFormatConversionStream.CreatePcmStream(reader); //var waveProvider = new MultiplexingWaveProvider(new IWaveProvider[] { ws }, 4); //waveProvider.ConnectInputToOutput(0, 3); sound = new WaveOutEvent(); // set output device *before* init Console.WriteLine("Output Device: " + OutputDeviceId); sound.DeviceNumber = OutputDeviceId; sound.Init(ws); //sound.Init(waveProvider); sound.PlaybackStopped += output_PlaybackStopped; // Console.WriteLine("playing here " + ws.Length); sound.Play(); } playback.WaitOne(); //IsSounding = false; speechtimer.Enabled = true; } } }; queuetimer.Enabled = true; queuetimer.Start(); speechtimer.Enabled = true; speechtimer.Start(); InitHTTPServer(); }
private void ProcessMixing2(RcvData data, int dataSize) { string processingFn = string.Format("d:\\{0}_{1}_{2}.wav", data.seqnum, data.extension, data.peernumber); List<RecInfos> ls0 = lExtension0.FindAll( delegate(RecInfos list) { return list.rcvData.Equals(data) && list.isExtension == 0; }); List<RecInfos> ls1 = lExtension1.FindAll( delegate(RecInfos list) { return list.rcvData.Equals(data) && list.isExtension == 1; }); IsExtensionComparer isExtensionCompare = new IsExtensionComparer(); ls0.Sort(isExtensionCompare); ls1.Sort(isExtensionCompare); int count = 0; int count0 = ls0.Count(); int count1 = ls1.Count(); if (count0 - count1 < 0) count = count0; else count = count1; byte[] buffWriting = new byte[320 * count]; for (int i = 0; i < count; i++) { if (ls0[i].seq == ls1[i].seq) { // 믹싱 // 코덱 종류에 따라서 바이트 길이는 달라질 수 있다. 실제로 만들 때 경우의 수 확인하고 만들어야 한다. byte[] wavSrc0 = new byte[160]; byte[] wavSrc1 = new byte[160]; Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length); Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length); WaveMixerStream32 mixer = new WaveMixerStream32(); //mixer.AutoStop = true; WaveChannel32 channelStm = null; MemoryStream memStm = null; BufferedStream bufStm = null; RawSourceWaveStream rawSrcStm = null; WaveFormatConversionStream conversionStm = null; for (int j = 0; j < 2; j++) { if (j == 0) memStm = new MemoryStream(wavSrc0); else memStm = new MemoryStream(wavSrc1); bufStm = new BufferedStream(memStm); rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat); conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm); channelStm = new WaveChannel32(conversionStm); mixer.AddInputStream(channelStm); } mixer.Position = 0; Wave32To16Stream to16 = new Wave32To16Stream(mixer); var convStm = new WaveFormatConversionStream(pcmFormat, to16); byte[] tobyte = new byte[(int)convStm.Length]; int chk = convStm.Read(tobyte, 0, (int)convStm.Length); Buffer.BlockCopy(tobyte, 0, buffWriting, i * tobyte.Length, tobyte.Length); conversionStm.Close(); rawSrcStm.Close(); bufStm.Close(); memStm.Close(); convStm.Close(); to16.Close(); channelStm.Close(); mixer.Close(); // 삭제 lExtension0.Remove(ls0[i]); lExtension1.Remove(ls1[i]); } else if (ls0[i].seq - ls1[i].seq < 0) { // ls0 만 믹싱 // ls0 원본에 byte[] 붙임 > 원본 byte[]를 wavesream 으로 변환 > wave 파일로 저장 // 믹싱 // 코덱 종류에 따라서 바이트 길이는 달라질 수 있다. 실제로 만들 때 경우의 수 확인하고 만들어야 한다. byte[] wavSrc0 = new byte[160]; byte[] wavSrc1 = new byte[160]; Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length); Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length); WaveMixerStream32 mixer = new WaveMixerStream32(); //mixer.AutoStop = true; WaveChannel32 channelStm = null; MemoryStream memStm = null; BufferedStream bufStm = null; RawSourceWaveStream rawSrcStm = null; WaveFormatConversionStream conversionStm = null; for (int j = 0; j < 2; j++) { if (j == 0) memStm = new MemoryStream(wavSrc0); else memStm = new MemoryStream(wavSrc1); bufStm = new BufferedStream(memStm); rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat); conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm); channelStm = new WaveChannel32(conversionStm); mixer.AddInputStream(channelStm); } mixer.Position = 0; Wave32To16Stream to16 = new Wave32To16Stream(mixer); var convStm = new WaveFormatConversionStream(pcmFormat, to16); byte[] tobyte = new byte[(int)convStm.Length]; int chk = convStm.Read(tobyte, 0, (int)convStm.Length); Buffer.BlockCopy(tobyte, 0, buffWriting, i * tobyte.Length, tobyte.Length); conversionStm.Close(); rawSrcStm.Close(); bufStm.Close(); memStm.Close(); convStm.Close(); to16.Close(); channelStm.Close(); mixer.Close(); // 삭제 lExtension0.Remove(ls0[i]); ls1.Insert(i + 1, ls1[i]); } else if (ls0[i].seq - ls1[i].seq > 0) { // ls1 만 믹싱 // ls1 원본에 byte[] 붙임 > 원본 byte[]를 wavesream 으로 변환 > wave 파일로 저장 // 믹싱 // 코덱 종류에 따라서 바이트 길이는 달라질 수 있다. 실제로 만들 때 경우의 수 확인하고 만들어야 한다. byte[] wavSrc0 = new byte[160]; byte[] wavSrc1 = new byte[160]; Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length); Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length); WaveMixerStream32 mixer = new WaveMixerStream32(); //mixer.AutoStop = true; WaveChannel32 channelStm = null; MemoryStream memStm = null; BufferedStream bufStm = null; RawSourceWaveStream rawSrcStm = null; WaveFormatConversionStream conversionStm = null; for (int j = 0; j < 2; j++) { if (j == 0) memStm = new MemoryStream(wavSrc0); else memStm = new MemoryStream(wavSrc1); bufStm = new BufferedStream(memStm); rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat); conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm); channelStm = new WaveChannel32(conversionStm); mixer.AddInputStream(channelStm); } mixer.Position = 0; Wave32To16Stream to16 = new Wave32To16Stream(mixer); var convStm = new WaveFormatConversionStream(pcmFormat, to16); byte[] tobyte = new byte[(int)convStm.Length]; int chk = convStm.Read(tobyte, 0, (int)convStm.Length); Buffer.BlockCopy(tobyte, 0, buffWriting, i * tobyte.Length, tobyte.Length); conversionStm.Close(); rawSrcStm.Close(); bufStm.Close(); memStm.Close(); convStm.Close(); to16.Close(); channelStm.Close(); mixer.Close(); // 삭제 lExtension1.Remove(ls1[i]); ls0.Insert(i + 1, ls0[i]); } } // 10개의 버프를 바이트로 만들어 WaveFileWrite WaveFileWriting(buffWriting, processingFn); }