public void OutputWaveToFile( string textToSpeak, string waveFile) { if (string.IsNullOrWhiteSpace(textToSpeak)) { return; } this.StartSasara(); var tempWave = Path.GetTempFileName(); talker.OutputWaveToFile( textToSpeak, tempWave); // ささらは音量が小さめなので増幅する using (var reader = new WaveFileReader(tempWave)) { var prov = new VolumeWaveProvider16(reader); prov.Volume = Settings.Default.SasaraGain; WaveFileWriter.CreateWaveFile( waveFile, prov); } if (File.Exists(tempWave)) { File.Delete(tempWave); } }
//--------------------controls---------------------------------------- /// <summary> /// Starts to play a file /// </summary> public void play() { try { //Call a helper method (look in the botom) to reset the playback stop(); // open uncompresed strem pcm from mp3 file reader compresed stream. NAudio.Wave.WaveStream pcm = NAudio.Wave.WaveFormatConversionStream.CreatePcmStream(new NAudio.Wave.Mp3FileReader(this.songPath)); stream = new NAudio.Wave.BlockAlignReductionStream(pcm); volProvider = new VolumeWaveProvider16(stream); volProvider.Volume = vol; output = new NAudio.Wave.WaveOut();//new NAudio.Wave.DirectSoundOut(); output.PlaybackStopped += output_PlaybackStopped; output.Init(volProvider); output.Play(); checkPlayback(); } catch (Exception e) { Console.WriteLine(e.Message); } }
//------------------------------------------------------------------------------------------------------------------------ #endregion #region Constructor //------------------------------------------------------------------------------------------------------------------------ public Speaker() { waveout = new WaveOut(); bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(8000, 16, 2)); waveout.PlaybackStopped += Waveout_PlaybackStopped; volumeProvider = new VolumeWaveProvider16(bufferedWaveProvider); waveout.Init(volumeProvider); }
public void PassesThroughDataUnchangedAtVolume1() { var testProvider= new TestWaveProvider(new WaveFormat(44100,16,2)); VolumeWaveProvider16 vwp = new VolumeWaveProvider16(testProvider); byte[] buffer = new byte[20]; int bytesRead = vwp.Read(buffer, 0, buffer.Length); Assert.AreEqual(buffer.Length, bytesRead); Assert.AreEqual(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 }, buffer); }
public void HalfVolumeWorks() { var testProvider = new TestWaveProvider(new WaveFormat(44100, 16, 2)); testProvider.ConstValue = 100; VolumeWaveProvider16 vwp = new VolumeWaveProvider16(testProvider); vwp.Volume = 0.5f; byte[] buffer = new byte[4]; int bytesRead = vwp.Read(buffer, 0, buffer.Length); Assert.AreEqual(new byte[] { 50, 50, 50, 50 }, buffer); }
public void DoubleVolumeWorks() { var testProvider = new TestWaveProvider(new WaveFormat(44100, 16, 1)); testProvider.ConstValue = 2; short sampleValue = BitConverter.ToInt16(new byte[] { 2, 2 }, 0); sampleValue = (short)(sampleValue * 2); VolumeWaveProvider16 vwp = new VolumeWaveProvider16(testProvider); vwp.Volume = 2f; byte[] buffer = new byte[2]; int bytesRead = vwp.Read(buffer, 0, buffer.Length); Assert.AreEqual(BitConverter.GetBytes(sampleValue), buffer); }
private void CreateOutput() { var vwp = new VolumeWaveProvider16(_buffer); var dso = new DirectSoundOut(70); vwp.Volume = _currentVolume; Debug.WriteLine("Now playing at {0}% volume", dso.Volume); dso.Init(vwp); dso.Play(); vwp.Volume = _currentVolume; _currentOut = dso; _volumeWaveProvider = vwp; }
public int EnqueueSamples(int channels, int rate, byte[] samples, int frames) { if (bufferedWaveProvider == null) { bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(rate, channels)); volumeWaveProvider = new VolumeWaveProvider16(bufferedWaveProvider) {Volume = volumeControl.CurrentVolume}; //Output = new WasapiOut(AudioClientShareMode.Shared, false, 0); Output = new DirectSoundOut(70); Output.Init(volumeWaveProvider); Output.Play(); } int space = bufferedWaveProvider.BufferLength - bufferedWaveProvider.BufferedBytes; if (space > samples.Length) { bufferedWaveProvider.AddSamples(samples, 0, samples.Length); return frames; } return 0; }
public WaveInChannelProcessor(string streamName, string waveInSourceName, int waveInChannel, Common.ProcessRadioSignalingItemDelegate sigDelegate, Action<bool> propertyChangedAction, float initialVolume, bool recordingEnabled, Common.SignalRecordingType recordingType, int recordingKickTime, Common.NoiseFloor noiseFloor, int customNoiseFloor, bool removeNoise, bool decodeMDC1200, bool decodeGEStar, bool decodeFleetSync, bool decodeP25, string waveOutDevName) { _streamShouldPlay = true; _streamName = streamName; _waveInSourceName = waveInSourceName; _waveInChannel = waveInChannel; _sigDelegate = sigDelegate; _propertyChangedAction = propertyChangedAction; sourceActive = false; _recordingEnabled = recordingEnabled; _recordingType = recordingType; _recordingKickTime = recordingKickTime; _noiseFloor = noiseFloor; _customNoiseFloor = customNoiseFloor; _removeNoise = removeNoise; _decodeMDC1200 = decodeMDC1200; _decodeGEStar = decodeGEStar; _decodeFleetSync = decodeFleetSync; _decodeP25 = decodeP25; _waveOutDevName = waveOutDevName; bufferedWaveProvider = new BufferedWaveProvider(AudioProcessingGlobals.GetWaveFormatForChannels(1)); bufferedWaveProvider.BufferDuration = TimeSpan.FromSeconds(3); processorWaveProvider = new ProcessorWaveProvider(streamName, bufferedWaveProvider, ProcessRadioSignalingItem, propertyChangedAction, recordingEnabled, recordingType, recordingKickTime, noiseFloor, customNoiseFloor, removeNoise, decodeMDC1200, decodeGEStar, decodeFleetSync, decodeP25); volumeProvider = new VolumeWaveProvider16(processorWaveProvider); volumeProvider.Volume = initialVolume; FirePropertyChangedAction(true); WaveInManager.Instance.SetupForProcessor(this); sourceActive = true; waveOut = CreateWaveOut(); waveOut.Init(volumeProvider); waveOut.Play(); }
public void Stream(string url) { webRequest = (HttpWebRequest)WebRequest.Create(url + "/;"); int metaInt = 0; // blocksize of mp3 data webRequest.Headers.Clear(); webRequest.Method = "GET"; // needed to receive metadata informations webRequest.Headers.Add("Icy-MetaData", "1"); webRequest.UserAgent = "WinampMPEG/5.09"; HttpWebResponse resp = null; try { resp = (HttpWebResponse)webRequest.GetResponse(); } catch (WebException e) { if (e.Status != WebExceptionStatus.RequestCanceled) { //ShowError(e.Message); } return; } byte[] buffer = new byte[16384 * 4]; // needs to be big enough to hold a decompressed frame try { // read blocksize to find metadata block metaInt = Convert.ToInt32(resp.GetResponseHeader("icy-metaint")); } catch { } decompressor = null; try { using (var responseStream = resp.GetResponseStream()) { var readFullyStream = new ReadFullyStream(responseStream); readFullyStream.metaInt = metaInt; System.Threading.Tasks.Task.Factory.StartNew(() => Thread.Sleep(10000)).ContinueWith(x => { volumeProvider = new VolumeWaveProvider16(bufferedWaveProvider); waveOut = new WaveOut(); waveOut.Init(volumeProvider); }); do { if (bufferedWaveProvider != null && bufferedWaveProvider.BufferLength - bufferedWaveProvider.BufferedBytes < bufferedWaveProvider.WaveFormat.AverageBytesPerSecond / 4) { Debug.WriteLine("Buffer getting full, taking a break"); Thread.Sleep(500); } else { Mp3Frame frame = null; try { frame = Mp3Frame.LoadFromStream(readFullyStream, true); if (metaInt > 0) songCallback(readFullyStream.SongName); else songCallback("No Song Info in Stream..."); } catch (EndOfStreamException) { this.fullyDownloaded = true; // reached the end of the MP3 file / stream break; } catch (WebException) { // probably we have aborted download from the GUI thread break; } if (decompressor == null) { // don't think these details matter too much - just help ACM select the right codec // however, the buffered provider doesn't know what sample rate it is working at // until we have a frame WaveFormat waveFormat = new Mp3WaveFormat(frame.SampleRate, frame.ChannelMode == ChannelMode.Mono ? 1 : 2, frame.FrameLength, frame.BitRate); decompressor = new AcmMp3FrameDecompressor(waveFormat); this.bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat); this.bufferedWaveProvider.BufferDuration = TimeSpan.FromSeconds(20); // allow us to get well ahead of ourselves //this.bufferedWaveProvider.BufferedDuration = 250; } int decompressed = decompressor.DecompressFrame(frame, buffer, 0); //Debug.WriteLine(String.Format("Decompressed a frame {0}", decompressed)); bufferedWaveProvider.AddSamples(buffer, 0, decompressed); } } while (playbackState != StreamingPlaybackState.Stopped); Debug.WriteLine("Exiting"); // was doing this in a finally block, but for some reason // we are hanging on response stream .Dispose so never get there Dispose(); } } finally { Dispose(); } }
private void timer1_Tick(object sender, EventArgs e) { if (playbackState != StreamingPlaybackState.Stopped) { if (waveOut == null && bufferedWaveProvider != null) { Debug.WriteLine("Creating WaveOut Device"); waveOut = CreateWaveOut(); //waveOut.PlaybackStopped += OnPlaybackStopped; volumeProvider = new VolumeWaveProvider16(bufferedWaveProvider); //volumeProvider.Volume = volumeSlider1.Volume; waveOut.Init(volumeProvider); } else if (bufferedWaveProvider != null) { var bufferedSeconds = bufferedWaveProvider.BufferedDuration.TotalSeconds; //ShowBufferState(bufferedSeconds); // make it stutter less if we buffer up a decent amount before playing if (bufferedSeconds < 0.5 && playbackState == StreamingPlaybackState.Playing && !fullyDownloaded) { Pause(); } else if (bufferedSeconds > 4 && playbackState == StreamingPlaybackState.Buffering) { Play(); } else if (fullyDownloaded && bufferedSeconds == 0) { Debug.WriteLine("Reached end of stream"); playbackState = StreamingPlaybackState.Stopped; StopPlayback(); } } } }
public void SetupBackend(Mp3Frame frame) { //first cleanup the the backend. the old MP3 info could be different StopPlayer(); bFileEnding = false; //setup the output stream, using the provider & mp3Frame waveOut = new NAudio.Wave.DirectSoundOut(); SetWaveFormat(frame); decompressor = new AcmMp3FrameDecompressor(waveFormat); out_buffer = new BufferedWaveProvider(decompressor.OutputFormat); volumeHandler = new VolumeWaveProvider16(out_buffer); //1.0 = full volume, 0.0 = silence volumeHandler.Volume = volume; waveOut.Init(volumeHandler); //signal event we are set up if (backendHandler != null) backendHandler(this, true); waveOut.Play(); }
public void PassesThroughSourceWaveFormat() { var testProvider = new TestWaveProvider(new WaveFormat(44100, 16, 2)); VolumeWaveProvider16 vwp = new VolumeWaveProvider16(testProvider); Assert.AreSame(testProvider.WaveFormat, vwp.WaveFormat); }
//Set's up for playing a stream. basically sets up the DirectSound and the buffer. to add audio to the buffer we call AddSamples with the data public void StartPlaying() { if (waveFormat == null) return; //setup the output stream, using the provider waveOut = new NAudio.Wave.DirectSoundOut(); //setup the 'provider'. from what i understood this is what takes the input wave and converts it into what directsound understands. which afaik is PCM out_buffer = new BufferedWaveProvider(waveFormat); volumeHandler = new VolumeWaveProvider16(out_buffer); volumeHandler.Volume = volume; waveOut.Init(volumeHandler); //signal event we are set up if (backendHandler != null) backendHandler(this, true); waveOut.Play(); }
//The StopPlaying/Recording functions which will call KillAll which kills both recording and playing streams. //public void StopPlaying(object sender, EventArgs e) public void StopPlayer() { //signal event we are shutting down if (backendHandler != null) backendHandler(this, false); if (decompressor != null) { decompressor.Dispose(); decompressor = null; } if (volumeHandler != null) { volumeHandler = null; } if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } KillAll(); }
/// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> public void Dispose() { if (m_Device == null) return; m_Device.Stop(); m_Device.Dispose(); m_Device = null; if (m_WaveProvider == null) return; m_WaveProvider = null; }
public void DefaultVolumeIs1() { var testProvider = new TestWaveProvider(new WaveFormat(44100, 16, 2)); VolumeWaveProvider16 vwp = new VolumeWaveProvider16(testProvider); Assert.AreEqual(1.0f, vwp.Volume); }
private void WaveStreamProc() { string icyStreamName = string.Empty; while (_streamShouldPlay) { ResetFeedInactivityTimeout(); SetFeedActive(true, false); System.Threading.Thread.Sleep(new System.Random().Next(100, 1500)); try { Common.ConsoleHelper.ColorWriteLine(ConsoleColor.DarkCyan, "Initiating connection to {0}", _streamName); playbackState = StreamingPlaybackState.Buffering; FirePropertyChangedAction(false); bufferedWaveProvider = null; fullyDownloaded = false; webRequest = (HttpWebRequest)WebRequest.Create(Common.UrlHelper.GetCorrectedStreamURL(_streamURL)); //webRequest.Timeout = webRequest.Timeout * 20; /*test - start*/ webRequest.Headers.Clear(); webRequest.Headers.Add("Icy-MetaData", "1"); SetupWebRequestSettings(webRequest); /*test - end*/ HttpWebResponse resp; try { resp = SmartGetWebResponse(webRequest);// (HttpWebResponse)webRequest.GetResponse(); string newUrl = string.Empty; if (!ValidateResponse(resp, out newUrl, out icyStreamName)) { if (!string.IsNullOrWhiteSpace(newUrl)) { webRequest = (HttpWebRequest)WebRequest.Create(newUrl); //webRequest.Timeout = webRequest.Timeout * 20; /*test - start*/ webRequest.Headers.Clear(); webRequest.Headers.Add("Icy-MetaData", "1"); SetupWebRequestSettings(webRequest); /*test - end*/ resp = SmartGetWebResponse(webRequest);// (HttpWebResponse)webRequest.GetResponse(); PrintHttpWebResponseHeader(resp, "WaveStreamProc.Redirected", out icyStreamName); } } } catch (WebException e) { if (e.Status == WebExceptionStatus.ProtocolError) { Common.DebugHelper.WriteExceptionToLog("WaveStreamProcessor.WebRequest.GetResponse", e, false, _streamURL); try { System.Net.HttpWebResponse httpRes = e.Response as System.Net.HttpWebResponse; if (httpRes != null && httpRes.StatusCode == HttpStatusCode.NotFound) { SetFeedActive(false, false); } } catch { } } if (e.Status != WebExceptionStatus.RequestCanceled) { //ConsoleHelper.ColorWriteLine(ConsoleColor.Red, "WaveStreamProcessor Error: {0}", e.Message); //Common.DebugHelper.WriteExceptionToLog("WaveStreamProcessor Error", e, false); InternalStopStream(); ClearStreamTitle(); SmartSleep(30 * 1000, string.Format("WebRequest.GetResponse: {0}", e.Status)); } continue; } var buffer = new byte[16348 * 4]; IMp3FrameDecompressor decompressor = null; try { int metaInt = -1; bool bIsIcy = IsIcecastStream(resp, out metaInt); if (!bIsIcy) { metaInt = -1; } using (var responseStream = resp.GetResponseStream()) { Stream readFullyStream = null; if (!string.IsNullOrWhiteSpace(icyStreamName)) { UpdateStreamTitle(icyStreamName, false); FirePropertyChangedAction(true); } if (bIsIcy && metaInt > 0) { readFullyStream = new ReadFullyStream(new IcyStream(responseStream, metaInt, ProcessIcyMetadata)); } else { readFullyStream = new ReadFullyStream(responseStream); } do { if (IsBufferNearlyFull) { SmartSleep(500); //ConsoleHelper.ColorWriteLine(ConsoleColor.DarkCyan, "Buffer is getting full, taking a break, {0}sec...", bufferedWaveProvider.BufferedDuration.TotalSeconds); //ConsoleHelper.ColorWriteLine(ConsoleColor.DarkCyan, " {0}", _streamURL); } else { Mp3Frame frame; try { frame = Mp3Frame.LoadFromStream(readFullyStream); } catch (EndOfStreamException eose) { fullyDownloaded = true; SmartSleep(1500, "EndOfStreamException"); if (playbackState != StreamingPlaybackState.Stopped) { Common.DebugHelper.WriteExceptionToLog("Mp3Frame.LoadFromStream", eose, false, _streamURL); } continue; } catch (WebException wex) { InternalStopStream(); SmartSleep(3 * 60 * 1000, "WebException"); if (playbackState != StreamingPlaybackState.Stopped) { Common.DebugHelper.WriteExceptionToLog("Mp3Frame.LoadFromStream", wex, false, _streamURL); } continue; } if (frame != null) { KickFeedInactivityTimeout(); SetFeedActive(true, true); if (decompressor == null) { try { Common.ConsoleHelper.ColorWriteLine(ConsoleColor.DarkGray, "Creating MP3 Decompressor for {0}...", _streamURL); decompressor = CreateFrameDecompressor(frame); bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat); bufferedWaveProvider.BufferDuration = TimeSpan.FromSeconds(20); processorWaveProvider = new ProcessorWaveProvider(_streamName, bufferedWaveProvider, _sigDelegate, _propertyChangedAction, _recordingEnabled, _recordingType, _recordingKickTime, _noiseFloor, _customNoiseFloor, _removeNoise, _decodeMDC1200, _decodeGEStar, _decodeFleetSync, _decodeP25); //volumeProvider = new VolumeWaveProvider16(bufferedWaveProvider); volumeProvider = new VolumeWaveProvider16(processorWaveProvider); volumeProvider.Volume = _initialVolume; waveOut = CreateWaveOut(); waveOut.Init(volumeProvider); FirePropertyChangedAction(false); } catch (Exception ex) { Common.ConsoleHelper.ColorWriteLine(ConsoleColor.Red, "Excpetion in stream {0}: {1}", _streamURL, ex.Message); Common.DebugHelper.WriteExceptionToLog("WaveStreamProcessor", ex, false, string.Format("Exception in stream {0}", _streamURL)); InternalStopStream(); SmartSleep(3 * 60 * 1000, "Exception:CreateFrameDecompressor"); continue; } } int decompressed = decompressor.DecompressFrame(frame, buffer, 0); bufferedWaveProvider.AddSamples(buffer, 0, decompressed); var bufferedSeconds = bufferedWaveProvider.BufferedDuration.TotalSeconds; if (bufferedSeconds < 0.5 && playbackState == StreamingPlaybackState.Playing && !fullyDownloaded) { playbackState = StreamingPlaybackState.Buffering; FirePropertyChangedAction(false); waveOut.Pause(); Common.ConsoleHelper.ColorWriteLine(ConsoleColor.DarkRed, "Stream Paused... Buffering... {0}", _streamURL); } else if (bufferedSeconds > 4 && playbackState == StreamingPlaybackState.Buffering) { waveOut.Play(); playbackState = StreamingPlaybackState.Playing; FirePropertyChangedAction(false); Common.ConsoleHelper.ColorWriteLine(ConsoleColor.DarkGreen, "Stream Playing... {0}", _streamURL); } } else { if(IsFeedInactive()) { try { Common.ConsoleHelper.ColorWriteLine(ConsoleColor.DarkYellow, "Restarting {0} due to inactivity...", _streamName); InternalStopStream(); } finally { LongSmartSleep(FEED_NOT_ACTIVE_SLEEP_SECS, "WaveStreamProc.IsFeedInactive"); } } } } } while (playbackState != StreamingPlaybackState.Stopped); if (decompressor != null) { try { decompressor.Dispose(); } finally { decompressor = null; } } } } finally { if (decompressor != null) { try { decompressor.Dispose(); } finally { decompressor = null; } } } } catch(Exception ex) { #if DEBUG Common.DebugHelper.WriteExceptionToLog("WaveStreamProc", ex, false, "General Catch"); #endif try { InternalStopStream(); } finally { LongSmartSleep(15, "WaveStreamProc"); } } } }
public byte[] Render(float masterVolume) { // due to the way NAudio works, the source files must be provided twice. // this is because all channels are kept in sync by the mux, and the unused // channel data is discarded. If we tried to use the same source for both // muxes, it would try to read 2x the data present in the buffer! // If only we had a way to create separate WaveProviders from within the // MultiplexingWaveProvider.. try { using (MemoryStream sourceLeft = new MemoryStream(Data), sourceRight = new MemoryStream(Data)) { using (RawSourceWaveStream waveLeft = new RawSourceWaveStream(new IgnoreDisposeStream(sourceLeft), Format), waveRight = new RawSourceWaveStream(new IgnoreDisposeStream(sourceRight), Format)) { // step 1: separate the stereo stream MultiplexingWaveProvider demuxLeft = new MultiplexingWaveProvider(new IWaveProvider[] { waveLeft }, 1); MultiplexingWaveProvider demuxRight = new MultiplexingWaveProvider(new IWaveProvider[] { waveRight }, 1); demuxLeft.ConnectInputToOutput(0, 0); demuxRight.ConnectInputToOutput(1, 0); // step 2: adjust the volume of a stereo stream VolumeWaveProvider16 volLeft = new VolumeWaveProvider16(demuxLeft); VolumeWaveProvider16 volRight = new VolumeWaveProvider16(demuxRight); // note: use logarithmic scale #if (true) // log scale is applied to each operation float volumeValueLeft = (float)Math.Pow(1.0f - Panning, 0.5f); float volumeValueRight = (float)Math.Pow(Panning, 0.5f); // ensure 1:1 conversion volumeValueLeft /= (float)Math.Sqrt(0.5); volumeValueRight /= (float)Math.Sqrt(0.5); // apply volume volumeValueLeft *= (float)Math.Pow(Volume, 0.5f); volumeValueRight *= (float)Math.Pow(Volume, 0.5f); // clamp volumeValueLeft = Math.Min(Math.Max(volumeValueLeft, 0.0f), 1.0f); volumeValueRight = Math.Min(Math.Max(volumeValueRight, 0.0f), 1.0f); #else // log scale is applied to the result of the operations float volumeValueLeft = (float)Math.Pow(1.0f - Panning, 0.5f); float volumeValueRight = (float)Math.Pow(Panning, 0.5f); // ensure 1:1 conversion volumeValueLeft /= (float)Math.Sqrt(0.5); volumeValueRight /= (float)Math.Sqrt(0.5); // apply volume volumeValueLeft *= Volume; volumeValueRight *= Volume; // apply log scale volumeValueLeft = (float)Math.Pow(volumeValueLeft, 0.5f); volumeValueRight = (float)Math.Pow(volumeValueRight, 0.5f); // clamp volumeValueLeft = Math.Min(Math.Max(volumeValueLeft, 0.0f), 1.0f); volumeValueRight = Math.Min(Math.Max(volumeValueRight, 0.0f), 1.0f); #endif // use linear scale for master volume volLeft.Volume = volumeValueLeft * masterVolume; volRight.Volume = volumeValueRight * masterVolume; // step 3: combine them again IWaveProvider[] tracks = new IWaveProvider[] { volLeft, volRight }; MultiplexingWaveProvider mux = new MultiplexingWaveProvider(tracks, 2); // step 4: export them to a byte array byte[] finalData = new byte[Data.Length]; mux.Read(finalData, 0, finalData.Length); // cleanup demuxLeft = null; demuxRight = null; volLeft = null; volRight = null; mux = null; return finalData; } } } catch { return Data; } }
private void DecompressFrames() { IMp3FrameDecompressor decompressor = null; IWavePlayer waveOut = null; try { BufferedWaveProvider bufferedWaveProvider = null; VolumeWaveProvider16 volumeProvider = null; byte[] buffer = new byte[16384 * 4]; // needs to be big enough to hold a decompressed frame bool firstLoop = true; do { //WaveBuffer getting full, taking a break if (bufferedWaveProvider != null && bufferedWaveProvider.BufferLength - bufferedWaveProvider.BufferedBytes < bufferedWaveProvider.WaveFormat.AverageBytesPerSecond / 4) { Thread.Sleep(500); } //StreamBuffer empty, taking a break else if (stream.Length < 16384 * 2) { Thread.Sleep(500); } else { Mp3Frame frame = null; try { frame = Mp3Frame.LoadFromStream(stream); } catch (EndOfStreamException) { break; } if (frame == null) continue; if (decompressor == null) { WaveFormat waveFormat = new Mp3WaveFormat(frame.SampleRate, frame.ChannelMode == ChannelMode.Mono ? 1 : 2, frame.FrameLength, frame.BitRate); decompressor = new AcmMp3FrameDecompressor(waveFormat); bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat); bufferedWaveProvider.BufferDuration = TimeSpan.FromSeconds(20); // allow us to get well ahead of ourselves } int decompressed = decompressor.DecompressFrame(frame, buffer, 0); if (decompressed > 0) bufferedWaveProvider.AddSamples(buffer, 0, decompressed); if (firstLoop) { firstLoop = false; waveOut = CreateWaveOut(); volumeProvider = new VolumeWaveProvider16(bufferedWaveProvider); volumeProvider.Volume = 1f; waveOut.Init(volumeProvider); waveOut.Play(); } } } while (IsPlaying); } finally { if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); } if (decompressor != null) { decompressor.Dispose(); } } }
IWaveProvider audio_onWaveProviderCreated(IWaveProvider provider) { volumeProvider = new VolumeWaveProvider16(provider) { Volume = volume / 100 }; return volumeProvider; }
private async Task RequestCommand(CommandEventArgs e) { var urlToDownload = e.Args[0]; var newFilename = Guid.NewGuid().ToString(); var mp3OutputFolder = @"c:\mp3\"; if (urlToDownload.Contains("soundcloud")) { Track track = _soundCloud.GetTrack(e.Args[0]); string inPath = Path.Combine(mp3OutputFolder, newFilename + ".mp3"); if (!track.Streamable) { await e.Channel.SendMessage("\"" + track.Title + "\" is not streamable :C"); } try { using (var client = new WebClient()) { client.DownloadFile(track.StreamUrl + "?client_id=" + _soundCloud.ClientID, inPath); } } catch (Exception ex) { Console.Write(ex.ToString()); } var outFile = inPath.Remove(inPath.Length - 4) + "_c" + ".wav"; try { using (var reader = new MediaFoundationReader(inPath)) { var outFormat = new WaveFormat(48000, 16, 2); using (var resampler = new MediaFoundationResampler(reader, outFormat)) { resampler.ResamplerQuality = 60; VolumeWaveProvider16 vol = new VolumeWaveProvider16(resampler); vol.Volume = 0.3f; WaveFileWriter.CreateWaveFile(outFile, vol); } } File.Delete(inPath); } catch (Exception e2) { Console.Write(e2.ToString()); return; } await e.Channel.SendMessage("Added \"" + track.Title + "\" to the queue. It will be played soon."); _queue.musicQueue.Enqueue(Tuple.Create<string, string>(outFile, track.Title)); Thread thread = new Thread(() => { _queue.PlayNextMusicToAllVoiceClients(); }); thread.Start(); } else { IEnumerable<VideoInfo> videoInfos = DownloadUrlResolver.GetDownloadUrls(e.Args[0]); VideoInfo video = videoInfos.Where(info => info.CanExtractAudio).OrderByDescending(info => info.AudioBitrate).First(); if (video.RequiresDecryption) { DownloadUrlResolver.DecryptDownloadUrl(video); } string inPath = Path.Combine(mp3OutputFolder, newFilename + video.AudioExtension); try { var audioDownloader = new AudioDownloader(video, inPath); audioDownloader.Execute(); } catch (Exception ex) { Console.WriteLine("Error while trying to download youtube link."); Console.Write(ex.ToString()); } var outFile = inPath.Remove(inPath.Length - 4) + "_c" + ".wav"; try { using (var reader = new MediaFoundationReader(inPath)) { var outFormat = new WaveFormat(48000, 16, 2); using (var resampler = new MediaFoundationResampler(reader, outFormat)) { resampler.ResamplerQuality = 60; VolumeWaveProvider16 vol = new VolumeWaveProvider16(resampler); vol.Volume = 0.3f; WaveFileWriter.CreateWaveFile(outFile, vol); } } File.Delete(inPath); } catch (Exception e2) { Console.Write(e2.ToString()); return; } await e.Channel.SendMessage("Added \"" + video.Title + "\" to the queue. It will be played soon."); _queue.musicQueue.Enqueue(Tuple.Create<string, string>(outFile, video.Title)); Thread thread = new Thread(() => { _queue.PlayNextMusicToAllVoiceClients(); }); thread.Start(); } }
/// <summary> /// Initializes the specified provider. /// </summary> /// <param name="provider">The provider.</param> /// <param name="sampleRate">The sample rate.</param> /// <param name="channels">The channels.</param> /// <param name="bitsPerSample">The bits per sample.</param> /// <returns></returns> /// <exception cref="System.InvalidOperationException">Wave device already started</exception> public bool Initialize(IAudioDataProvider provider, int sampleRate, int channels, int bitsPerSample) { lock (SyncLock) { if (this.HasInitialized) throw new InvalidOperationException("Wave device already initialized"); var format = new WaveFormat(sampleRate, bitsPerSample, channels); var callbackWaveProvider = new CallbackWaveProvider16(format, provider.RenderAudioBuffer, SyncLock); var volumeWaveProvider = new VolumeWaveProvider16(callbackWaveProvider); m_WaveProvider = volumeWaveProvider; m_Device.Init(m_WaveProvider); m_Device.Play(); return true; } }
/// <summary> /// WAVEファイルを生成する /// </summary> /// <param name="textToSpeak"> /// Text to Speak</param> /// <param name="wave"> /// WAVEファイルのパス</param> private void CreateWave( string textToSpeak, string wave) { // パス関係を生成する var openJTalkDir = TTSYukkuriConfig.Default.OpenJTalkSettings.OpenJTalkDirectory; if (string.IsNullOrWhiteSpace(openJTalkDir)) { openJTalkDir = "OpenJTalk"; } var openJTalk = Path.Combine(openJTalkDir, @"open_jtalk.exe"); var dic = Path.Combine(openJTalkDir, @"dic"); var voice = Path.Combine(openJTalkDir, @"voice\" + TTSYukkuriConfig.Default.OpenJTalkSettings.Voice); var waveTemp = Path.GetTempFileName(); if (File.Exists(waveTemp)) { File.Delete(waveTemp); } var volume = (float)TTSYukkuriConfig.Default.OpenJTalkSettings.Volume / 100f; var speed = (float)TTSYukkuriConfig.Default.OpenJTalkSettings.Speed / 100f; var pitch = (float)TTSYukkuriConfig.Default.OpenJTalkSettings.Pitch / 10f; var textFile = Path.GetTempFileName(); File.WriteAllText(textFile, textToSpeak, Encoding.GetEncoding("Shift_JIS")); var args = new string[] { "-x " + "\"" + dic + "\"", "-m " + "\"" + voice + "\"", "-ow " + "\"" + waveTemp + "\"", "-g " + volume.ToString("N1"), "-r " + speed.ToString("N1"), "-fm " + pitch.ToString("N1"), textFile }; var pi = new ProcessStartInfo() { FileName = openJTalk, CreateNoWindow = true, UseShellExecute = false, Arguments = string.Join(" ", args), RedirectStandardError = true, RedirectStandardOutput = true, }; Debug.WriteLine(pi.FileName + " " + pi.Arguments); using (var p = Process.Start(pi)) { var stderr = p.StandardError.ReadToEnd(); var stdout = p.StandardOutput.ReadToEnd(); if (!string.IsNullOrWhiteSpace(stderr)) { Debug.WriteLine(stderr); } if (!string.IsNullOrWhiteSpace(stdout)) { Debug.WriteLine(stdout); } p.WaitForExit(); } if (File.Exists(textFile)) { File.Delete(textFile); } var gain = (float)TTSYukkuriConfig.Default.OpenJTalkSettings.Gain / 100f; if (gain != 1.0f) { using (var reader = new WaveFileReader(waveTemp)) { var prov = new VolumeWaveProvider16(reader); prov.Volume = gain; WaveFileWriter.CreateWaveFile( wave, prov); } } else { File.Move(waveTemp, wave); } if (File.Exists(waveTemp)) { File.Delete(waveTemp); } }
private void StartPlay() { try { var outFormat = new WaveFormat(48000, 16, 2); using (var resampler = new MediaFoundationResampler(bufferedWaveProvider, outFormat)) { resampler.ResamplerQuality = 60; int blocksize = resampler.WaveFormat.AverageBytesPerSecond / 5; byte[] buffer = new byte[blocksize]; VolumeWaveProvider16 vol = new VolumeWaveProvider16(resampler); vol.Volume = 0.3f; while (bufferedWaveProvider.BufferedBytes > bufferedWaveProvider.WaveFormat.AverageBytesPerSecond / 5 && playbackState != StreamingPlaybackState.Stopped) { vol.Read(buffer, 0, blocksize); _client.Send(buffer, 0, blocksize); _client.Wait(); } } } catch (Exception e2) { Console.Write(e2.ToString()); return; } if(playbackState != StreamingPlaybackState.Stopped) playbackState = StreamingPlaybackState.Buffering; Console.WriteLine(String.Format("Gotta buffer")); }