예제 #1
0
        public async Task OnVoiceReceivedPassthrough(VoiceReceiveEventArgs args, VoiceNextConnection voiceConnection)
        {
            var buff = args.PcmData.ToArray();

            voiceConnection.GetTransmitStream().Write(buff, 0, buff.Length);
            voiceConnection.GetTransmitStream().Flush();
        }
예제 #2
0
        private Task OnVoiceReceived(VoiceReceiveEventArgs e)
        {
            lock (this)
            {
                FileStream fs;
                if (!this.ssrcFilemap.ContainsKey(e.SSRC))
                {
                    fs = File.Create($"{e.SSRC}.pcm");
                    this.ssrcFilemap[e.SSRC] = fs;
                }

                fs = this.ssrcFilemap[e.SSRC];

                if (speakingSentence != null)
                {
                    speakingSentence.Dispose();
                }
                speakingSentence = new Timer
                {
                    Interval = 2000
                };

                speakingSentence.Elapsed += (sender, args) => OnTimedEvent(sender, args, fs);
                speakingSentence.Start();

                // e.Client.DebugLogger.LogMessage(LogLevel.Debug, "VNEXT RX", $"{e.User?.Username ?? "Unknown user"} sent voice data.", DateTime.Now);
                var buff = e.Voice.ToArray();
                fs.Write(buff, 0, buff.Length);
                fs.Flush();
            }
            return(Task.CompletedTask);
        }
예제 #3
0
        private async Task OnVoiceReceived(VoiceReceiveEventArgs e)
        {
            if (!this._ssrc_filemap.ContainsKey(e.SSRC))
            {
                this._ssrc_filemap[e.SSRC] = File.Create($"{e.SSRC}.pcm");
            }
            var fs = this._ssrc_filemap[e.SSRC];

            //e.Client.DebugLogger.LogMessage(LogLevel.Debug, "VNEXT RX", $"{e.User?.Username ?? "Unknown user"} sent voice data.", DateTime.Now);
            var buff = e.Voice.ToArray();
            await fs.WriteAsync(buff, 0, buff.Length).ConfigureAwait(false);

            await fs.FlushAsync().ConfigureAwait(false);
        }
예제 #4
0
        private async Task OnVoiceReceived(VoiceNextConnection vnc, VoiceReceiveEventArgs e)
        {
            if (!this._ssrcFilemap.ContainsKey(e.SSRC))
            {
                this._ssrcFilemap[e.SSRC] = File.Create($"{e.SSRC} ({e.AudioFormat.ChannelCount}).pcm");
            }
            var fs = this._ssrcFilemap[e.SSRC];

            // e.Client.DebugLogger.LogMessage(LogLevel.Debug, "VNEXT RX", $"{e.User?.Username ?? "Unknown user"} sent voice data. {e.AudioFormat.ChannelCount}", DateTime.Now);
            var buff = e.PcmData.ToArray();
            await fs.WriteAsync(buff, 0, buff.Length).ConfigureAwait(false);

            // await fs.FlushAsync().ConfigureAwait(false);
        }
예제 #5
0
        private async Task VoiceReceiveHandler(VoiceNextConnection connection, VoiceReceiveEventArgs args)
        {
            var fileName = DateTimeOffset.Now.ToUnixTimeMilliseconds();
            var ffmpeg   = Process.Start(new ProcessStartInfo
            {
                FileName              = "ffmpeg",
                Arguments             = $@"-ac 1 -f s16le -ar 48000 -i pipe:0 -ac 2 -ar 44100 Output/{fileName}.wav",
                RedirectStandardInput = true
            });

            await ffmpeg.StandardInput.BaseStream.WriteAsync(args.PcmData);

            ffmpeg.Dispose();
        }
예제 #6
0
        private Task Connection_VoiceReceived(VoiceReceiveEventArgs e)
        {
            if (!waveProviders.ContainsKey(e.SSRC))
            {
                BufferedWaveProvider provider = new BufferedWaveProvider(format)
                {
                    DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(1000)
                };
                mixer.AddInputStream(new Wave16ToFloatProvider(provider));
                waveProviders[e.SSRC] = provider;
            }

            waveProviders[e.SSRC].AddSamples(e.Voice.ToArray(), 0, e.Voice.Count);
            return(Task.CompletedTask);
        }
예제 #7
0
        private async Task VoiceReceiveHandler(VoiceNextConnection connection, VoiceReceiveEventArgs args)
        {
            var guild = connection.TargetChannel.Guild;
            var user  = args.User;

            if (guild == null || user == null)
            {
                return;
            }

            var configuration = await DataBase.GetServerConfiguration(guild.Id);

            UserChokeData userData = await stash[guild.Id].GetOrCreateUserData(user.Id);

            if (ProcessSoundData(configuration, userData, args.PcmData.ToArray()))
            {
                var member = await guild.GetMemberAsync(user.Id);

                await member.SetMuteAsync(true, $"Being louder than {string.Format("{0:0.##}", configuration.MaxLoudness)}% for {configuration.Interval} ms.");

                userData.Count = 0;
                userData.Time  = DateTime.Now;
                userData.SessionChokes++;
                userData.LastMute = DateTime.Now;
                await DataBase.SetUserChokes(user.Id, ++userData.Chokes);

                if (configuration.MuteTime != -1)
                {
                    var timer = new Timer()
                    {
                        AutoReset = false,
                        Interval  = configuration.MuteTime
                    };
                    timer.Elapsed += async(s, e) =>
                    {
                        await member.SetMuteAsync(false, $"{configuration.MuteTime} ms expired.");

                        timer.Dispose();
                    };
                    timer.Start();
                }
            }
        }
예제 #8
0
        public async Task OnVoiceReceived(VoiceReceiveEventArgs ea)
        {
            if (!this.ffmpegs.ContainsKey(ea.SSRC))
            {
                var psi = new ProcessStartInfo {
                    FileName              = "ffmpeg",
                    Arguments             = $@"-ac 2 -f s16le -ar 48000 -i pipe:0 -ac 2 -ar 44100 {ea.SSRC}.wav",
                    RedirectStandardInput = true
                };

                this.ffmpegs.TryAdd(ea.SSRC, Process.Start(psi));
            }

            var buff = ea.Voice.ToArray();

            var ffmpeg = this.ffmpegs[ea.SSRC];
            await ffmpeg.StandardInput.BaseStream.WriteAsync(buff, 0, buff.Length);

            await ffmpeg.StandardInput.BaseStream.FlushAsync();
        }
        private Task OnVoiceReceived(VoiceReceiveEventArgs e)
        {
            // test
            if (e.User.Username == "neosknight")
            {
                ReadOnlyMemory <byte> m = e.PcmData;
                foreach (var b in m.ToArray())
                {
                    Console.Write(b);
                }
                Console.WriteLine();

                using (var stream = new FileStream("kek.txt", FileMode.Append))
                {
                    stream.Write(m.ToArray(), 0, m.ToArray().Length);
                }
            }

            return(Task.CompletedTask);
        }
예제 #10
0
        public async Task OnVoiceReceived(VoiceReceiveEventArgs args)
        {
            if (args.User != null)
            {
                var audio = args.Client.GetCommandsNext().Services.GetService <IProvideAudioState>();

                var user = true;
                if (!audio.SpeechFromUser.ContainsKey(args.User.Id))
                {
                    user = audio.SpeechFromUser.TryAdd(args.User.Id, new ConcurrentQueue <byte>());
                }

                if (user)
                {
                    var buff = args.PcmData.ToArray();
                    foreach (var b in buff)
                    {
                        audio.SpeechFromUser[args.User.Id].Enqueue(b);
                    }
                }
            }
        }
예제 #11
0
        public async Task OnVoiceReceived(VoiceReceiveEventArgs ea)
        {
            if (!ffmpegs.ContainsKey(ea.SSRC))
            {
                Console.WriteLine($"create input : [{ea.User.Username}]");
                var psi = new ProcessStartInfo
                {
                    FileName              = "ffmpeg",
                    Arguments             = $@"-ac 2 -f s16le -ar 48000 -i pipe:0 -ac 2 -ar 44100 {ea.SSRC}.wav",
                    RedirectStandardInput = true
                };

                ffmpegs.TryAdd(ea.SSRC, Process.Start(psi));
            }
            Console.WriteLine($"Current works... [{ea.User.Username}]");

            var buff = ea.Voice.ToArray();

            var ffmpeg = ffmpegs[ea.SSRC];
            await ffmpeg.StandardInput.BaseStream.WriteAsync(buff, 0, buff.Length);

            await ffmpeg.StandardInput.BaseStream.FlushAsync();
        }