public async Task play() { using (var ms = new MemoryStream()) { await ffout.CopyToAsync(ms); ms.Position = 0; var buff = new byte[3840]; // buffer to hold the PCM data var br = 0; while ((br = ms.Read(buff, 0, buff.Length)) > 0) { if (stopped) { break; } if (br < buff.Length) // it's possible we got less than expected, let's null the remaining part of the buffer { for (var i = br; i < buff.Length; i++) { buff[i] = 0; } } await _vnc.SendAsync(buff, 20); // we're sending 20ms of data } } }
/// <summary> /// Plays the audio. /// </summary> /// <param name="voiceNextCon">The voice next con.</param> /// <param name="filename">The filename.</param> /// <returns>Completed Task once the audio finishes playing</returns> public async Task PlayAudio(VoiceNextConnection voiceNextCon, string filename) { // wait for current playback to finish while (voiceNextCon.IsPlaying) { Program.Client.DebugLogger.Info($"Waiting for current audio to finish"); await voiceNextCon.WaitForPlaybackFinishAsync(); } // play await voiceNextCon.SendSpeakingAsync(true); try { var ffmpeg_inf = new ProcessStartInfo { FileName = "ffmpeg", Arguments = $"-i \"{filename}\" -ac 2 -f s16le -ar 48000 pipe:1", UseShellExecute = false, RedirectStandardOutput = true, RedirectStandardError = true }; var ffmpeg = Process.Start(ffmpeg_inf); var ffout = ffmpeg.StandardOutput.BaseStream; // let's buffer ffmpeg output using (var ms = new MemoryStream()) { await ffout.CopyToAsync(ms); ms.Position = 0; var buff = new byte[3840]; // buffer to hold the PCM data var br = 0; while ((br = ms.Read(buff, 0, buff.Length)) > 0) { if (br < buff.Length) // it's possible we got less than expected, let's null the remaining part of the buffer { for (var i = br; i < buff.Length; i++) { buff[i] = 0; } } await voiceNextCon.SendAsync(buff, 20); // we're sending 20ms of data } } } catch (Exception ex) { Program.Client.DebugLogger.Info($"Exception playing audio {ex}"); throw ex; } finally { await voiceNextCon.SendSpeakingAsync(false); Program.Client.DebugLogger.Info($"Finished playing audio"); } }
public static async Task SendVoiceData(byte[] bytes, int size, VoiceNextConnection vnc) { if (MusicBot.UsersInChannel != DUtils.GetAmountInVoice(vnc.Channel) && Config.StopPlayingWithNewPlayer) { MusicBot.UsersInChannel = DUtils.GetAmountInVoice(vnc.Channel); MusicBot.StopPlayingJoined = true; } if (MusicBot.StopPlayingJoined) { return; } await vnc.SendAsync(bytes, size, 16); }
public static async Task Say(VoiceNextConnection vnc, string say) { await vnc.SendSpeakingAsync(true); // send a speaking indicator using (MemoryStream stream = new MemoryStream()) { var info = new SpeechAudioFormatInfo(48000, AudioBitsPerSample.Sixteen, AudioChannel.Stereo); using (SpeechSynthesizer synth = new SpeechSynthesizer()) { //synth.SetOutputToAudioStream(stream, info); synth.SetOutputToAudioStream(stream, info); //var t = synth.GetInstalledVoices(); //synth.SelectVoice(t.First().VoiceInfo.Name); synth.Speak(say); synth.SetOutputToNull(); } //await vnc.SendAsync(stream.ToArray(), 20, info.BitsPerSample); stream.Seek(0, SeekOrigin.Begin); Console.WriteLine("Format: {0}", info.EncodingFormat); Console.WriteLine("BitRate: {0}", info.BitsPerSample); Console.WriteLine("Block Alignment: {0}", info.BlockAlign); Console.WriteLine("Samples per second: {0}", info.SamplesPerSecond); var buff = new byte[3840]; var br = 0; while ((br = stream.Read(buff, 0, buff.Length)) > 0) { if (br < buff.Length) // not a full sample, mute the rest { for (var i = br; i < buff.Length; i++) { buff[i] = 0; } } await vnc.SendAsync(buff, 20, info.BitsPerSample); } } await vnc.SendSpeakingAsync(false); // we're not speaking anymore }
public async Task PlayAudio(VoiceNextConnection vnc, Stream stream) { using (var readingStream = Stream.Synchronized(stream)) { var ffout = await GenerateSoundStream(readingStream); var buff = new byte[3840]; var br = 0; while ((br = ffout.Read(buff, 0, buff.Length)) > 0) { if (br < buff.Length) { for (var i = br; i < buff.Length; i++) { buff[i] = 0; } } await vnc.SendAsync(buff, 20); } await vnc.SendSpeakingAsync(false); } }
public static async Task VoiceStream(VoiceNextConnection vnc, Video vid, int SpecialID) { ThreadIDD = SpecialID; Exception exc = null; String filename = CreatePathFromVid(FirstPlay()); // check if file exists if (!File.Exists(filename)) { // file does not exist Utils.Log($"File `{filename}` does not exist.", LogType.Error); //await ctx.RespondAsync($"File `{filename}` does not exist."); return; } try { MediaFoundationResampler resampler; WaveStream mediaStream; int SampleRate = 48000; //float Volume = 1F; int channelCount = 2; // Get the number of AudioChannels our AudioService has been configured to use. WaveFormat OutFormat = new WaveFormat(SampleRate, 16, channelCount); // Create a new Output Format, using the spec that Discord will accept, and with the number of channels that our client supports. try { mediaStream = new WaveChannel32(new MediaFoundationReader(filename), Volume, 0F); using (mediaStream) using (resampler = new MediaFoundationResampler(mediaStream, OutFormat)) // Create a Disposable Resampler, which will convert the read MP3 data to PCM, using our Output Format { int m = Int32.Parse(mediaStream.Length + ""); IntPlayout = (m / 2) + (m / 35); TotalSendBytes = 0; resampler.ResamplerQuality = 60; // Set the quality of the resampler to 60, the highest quality int blockSize = OutFormat.AverageBytesPerSecond / 50; // Establish the size of our AudioBuffer byte[] buffer = new byte[blockSize]; int byteCount; while ((byteCount = resampler.Read(buffer, 0, blockSize)) > 0) // Read audio into our buffer, and keep a loop open while data is present { if (byteCount < blockSize) { // Incomplete Frame for (int i = byteCount; i < blockSize; i++) { buffer[i] = 0; } } TotalSendBytes += buffer.Length; if (SpecialID != ThreadIDD) { return; } await vnc.SendAsync(buffer, 20); // we're sending 20ms of data if (IntPlayout <= TotalSendBytes) { Console.WriteLine("I AM B REAKING UP MY BONES AGAIN, YOU MADE MY SYSTEM BROKE!"); break; } } } } catch (Exception ee) { Utils.Log(ee.StackTrace + "\n" + ee.Message, LogType.Error); } } catch (Exception ex) { exc = ex; } finally { await vnc.SendSpeakingAsync(false); } if (exc != null) { Utils.Log($"An exception occured during playback: `{exc.GetType()}: {exc.Message}`", LogType.Error); } //await ctx.RespondAsync($"An exception occured during playback: `{exc.GetType()}: {exc.Message}`"); }
private static async Task PlaySong(CommandContext ctx) { if (GuildMusicStatuses[ctx.Guild.Id].Playing) { return; } if (GuildQueues[ctx.Guild.Id].Count == 0) { throw new OutputException("No songs in queue! If you queued a song and this message shows, either it is still being locally queued or it silently failed to be retrieved."); } GuildMusicStatuses[ctx.Guild.Id].Playing = true; while (true) { VoiceNextExtension vnext = ctx.Client.GetVoiceNext(); VoiceNextConnection vnc = vnext.GetConnection(ctx.Guild); if (vnc == null || !GuildQueues[ctx.Guild.Id].Any()) { break; } if (GuildQueues[ctx.Guild.Id].First().File == null) { await ctx.RespondAsync("The next song is queuing, please wait..."); while (GuildQueues[ctx.Guild.Id].First().File == null) { } if (GuildQueues[ctx.Guild.Id].First().File == "error") { await ctx.RespondAsync($"Failed to play **{GuildQueues[ctx.Guild.Id].First().Title}** by **{GuildQueues[ctx.Guild.Id].First().Artist}**, " + $"queued by {GuildQueues[ctx.Guild.Id].First().Queuer.Mention}"); GuildQueues[ctx.Guild.Id].RemoveAt(0); await PlaySong(ctx); return; } } DiscordEmbedBuilder nowplayingBuilder = new DiscordEmbedBuilder { Description = $"🎶 Now playing [{GuildQueues[ctx.Guild.Id].First().Title}](https://www.youtube.com/watch?v={GuildQueues[ctx.Guild.Id].First().Id}) 🎶\n\n" + $"[{GuildQueues[ctx.Guild.Id].First().Queuer.Mention}]{(GuildMusicStatuses[ctx.Guild.Id].Repeat == MusicStatus.RepeatType.None ? "" : " [🔁]")}" }; GuildMusicStatuses[ctx.Guild.Id].Skip = false; await ctx.RespondAsync(null, false, nowplayingBuilder.Build()); string songFile = GuildQueues[ctx.Guild.Id].First().File; ProcessStartInfo startInfo = new ProcessStartInfo { FileName = "ffmpeg", Arguments = $@"-i ""{songFile}"" -ac 2 -f s16le -ar 48000 pipe:1", UseShellExecute = false, RedirectStandardOutput = true }; Process ffmpeg = Process.Start(startInfo); Stream ffout = ffmpeg.StandardOutput.BaseStream; await vnc.SendSpeakingAsync(); // send a speaking indicator byte[] buff = new byte[3840]; // buffer to hold the PCM data while (await ffout.ReadAsync(buff, 0, buff.Length) > 0) { if (GuildMusicStatuses[ctx.Guild.Id].Skip) { break; } await vnc.SendAsync(buff, 20); // we're sending 20ms of data buff = new byte[3840]; } try { ffout.Flush(); ffout.Dispose(); ffmpeg.Dispose(); if (GuildMusicStatuses[ctx.Guild.Id].Repeat == MusicStatus.RepeatType.None) { while (true) { try { File.Delete(songFile); break; } catch { // Wait for processes to release file. } } } } catch { // Consume errors. } await vnc.SendSpeakingAsync(false); switch (GuildMusicStatuses[ctx.Guild.Id].Repeat) { case MusicStatus.RepeatType.None: GuildQueues[ctx.Guild.Id].RemoveAt(0); break; case MusicStatus.RepeatType.All: JigglySong jigglySong = GuildQueues[ctx.Guild.Id][0]; GuildQueues[ctx.Guild.Id].Add(jigglySong); GuildQueues[ctx.Guild.Id].RemoveAt(0); break; case MusicStatus.RepeatType.One: // The Song is still number one in queue ;D break; default: GuildQueues[ctx.Guild.Id].RemoveAt(0); break; } GuildMusicStatuses[ctx.Guild.Id].Skip = false; } ctx.Client.GetVoiceNext().GetConnection(ctx.Guild)?.Disconnect(); Directory.Delete(Path.Combine(Globals.AppPath, "Queue", ctx.Guild.Id.ToString()), true); GuildMusicStatuses[ctx.Guild.Id].Playing = false; }