/// <summary> /// 音声出力の /// </summary> /// <param name="sender"></param> /// <param name="args"></param> private async void FrameInputNode_QuantumStarted(AudioFrameInputNode sender, FrameInputNodeQuantumStartedEventArgs args) { if (AudioInStream == null) { return; //throw new Exception("not connected to discord audio channel."); } if (AudioInStream.AvailableFrames == 0) { return; } uint numSamplesNeeded = (uint)args.RequiredSamples; if (numSamplesNeeded == 0) { return; } // audioDataのサイズはAudioInStream内のFrameが示すバッファサイズと同一サイズにしておくべきだけど var sampleNeededBytes = numSamplesNeeded * OpusConvertConstants.SampleBytes * OpusConvertConstants.Channels; // Note: staticで持たせるべき? var audioData = new byte[sampleNeededBytes]; var result = await AudioInStream.ReadAsync(audioData, 0, (int)sampleNeededBytes); AudioFrame audioFrame = GenerateAudioData(audioData, (uint)result); sender.AddFrame(audioFrame); }
internal static async Task StartListenService(ulong User, AudioInStream In) { StopListenService(User); var Source = new CancellationTokenSource(); if (Cancel.TryAdd(User, Source)) { var Queue = new Queue <RTPFrame>(); var Timer = new Timer(e => { if (!Source.IsCancellationRequested) { ProcessVoiceAsync(User, Queue.ToArray()).ConfigureAwait(false); } Queue.Clear(); }, null, Timeout.Infinite, Timeout.Infinite); while (!Source.IsCancellationRequested) { try { Queue.Enqueue(await In.ReadFrameAsync(Source.Token)); Timer.Change(125, 0); } catch (OperationCanceledException) { } }
private async Task StreamCreated(ulong userId, AudioInStream audio) { //var channels = this.discordClient.Guilds.SelectMany(g => g.Channels); //var voiceChannels = channels.Where(x => x.Users.Where(z => z.Id == userId).Any()).Select(z => z as SocketVoiceChannel).Where(y => y != null); #pragma warning disable CS4014 Task.Factory.StartNew(async() => #pragma warning restore CS4014 { SemaphoreSlim semaphore; Stream stream; if (!semaphores.TryGetValue(userId, out semaphore)) { semaphores[userId] = new SemaphoreSlim(1); semaphore = semaphores[userId]; } var user = this.discordClient.GetUser(userId); do { try { // Wait for a frame to show up on the audio channel if (audio.AvailableFrames > 0) { try { await semaphore.WaitAsync(); RTPFrame frame = await audio.ReadFrameAsync(new CancellationToken()); // Wait on the semaphore synchronization if (!streams.TryGetValue(userId, out stream)) { streams[userId] = new MemoryStream(); stream = streams[userId]; } // Write the payload to the memory stream stream.Write(frame.Payload, 0, frame.Payload.Length); // Console.WriteLine($"Frame received for user {user.Username} - {stream.Length}"); } finally { semaphore.Release(); } } else { await Task.Delay(500); } } catch (Exception e) { Console.WriteLine(e); } } while (audio.CanRead); }); }
private Task _audioClient_StreamCreated(ulong userID, AudioInStream arg2) { //Triggers when user joined to the channel Logging.Log($"Stream created {userID}", LogLevel.LogLevel_DEBUG); using (IEffectPlayback joinSound = new Mp3SoundEffect(_audioJoin)) { joinSound.LoadStream(); joinSound.Play(); } return(Task.Run(() => { ListenUserAsync(arg2, userID); })); }
/// <summary> /// Creates a new stream when a user joins the voice channel /// repeats all their audio /// </summary> /// <param name="arg1"></param> /// <param name="arg2"></param> /// <returns></returns> private async Task StreamCreated(ulong arg1, AudioInStream arg2) { try { using (var stream = AudioClient.CreatePCMStream(AudioApplication.Mixed)) { if (Program.DEBUG) { Console.WriteLine(arg1); // User ID //await arg2.CopyToAsync(stream); } } } catch (Exception e) { Console.WriteLine(e); } }
private async void ListenUserAsync(AudioInStream stream, ulong userID) { //do not playback own audio data if (userID == OwnUserID) { return; } var user = await _socketClient.Rest.GetGuildUserAsync(_voiceChannel.GuildId, userID); //if user has no nickname set use username var soundsrv = new SoundService(userID, user.Nickname ?? user.Username); _soundServices.Add(soundsrv); try { await Task.Run(async() => { var buffer = new byte[3840]; soundsrv.StartPlayback(); while (await stream.ReadAsync(buffer, 0, buffer.Length) > 0) { if (!Deaf) { soundsrv.AddSamples(buffer); } } }); } catch (Exception ex) { Logging.Log(ex); } finally { _soundServices.Remove(soundsrv); soundsrv.Dispose(); } }
private async Task <byte[]> BufferIncomingStream(AudioInStream e, int time = 3) { ConcurrentQueue <byte> voiceInQueue = new ConcurrentQueue <byte>(); SemaphoreSlim queueLock = new SemaphoreSlim(1, 1); return(await Task.Run(async() => { DateTime nowTime = DateTime.Now; while (DateTime.Now.Subtract(nowTime).TotalSeconds <= time) { if (e.AvailableFrames > 0) { queueLock.Wait(); RTPFrame frame = await e.ReadFrameAsync(CancellationToken.None); for (int i = 0; i < frame.Payload.Length; i++) { voiceInQueue.Enqueue(frame.Payload[i]); } queueLock.Release(); } } return voiceInQueue.ToArray(); })); }
public void AddInStream(ulong id, AudioInStream stream) { UserVoiceConnections.TryAdd(id, stream); }
/// <summary> /// Makes a new instance of <see cref="AudioPcmEventArgs"/> class. /// </summary> /// <param name="userState">New state.</param> public AudioPcmEventArgs(SocketGuildUser user, AudioInStream stream) { User = user; Stream = stream; }
private async Task VoiceChannelAudioStreamCreated(ulong arg1, AudioInStream stream) { AudioManager.StartAudioOutput(stream); await Task.Delay(0); }
private static async Task UserJoinVoice(ulong s, AudioInStream e) { Speech.RestartListenService(s, e); }