private void VideoFrameReady(RTPFrame frame) { if (OnRemoteVideoSampleReady != null) { //System.Diagnostics.Debug.WriteLine("Remote video frame received " + frame.FramePayload.Length + " bytes."); var payload = frame.GetFramePayload(); OnRemoteVideoSampleReady(payload, payload.Length); } }
private void AudioFrameReady(RTPFrame frame) { if (OnRemoteAudioSampleReady != null) { var payload = frame.GetFramePayload(); //System.Diagnostics.Debug.WriteLine("Remote audio frame received " + payload.Length + " bytes."); OnRemoteAudioSampleReady(payload, payload.Length); } }
private async Task StreamCreated(ulong userId, AudioInStream audio) { //var channels = this.discordClient.Guilds.SelectMany(g => g.Channels); //var voiceChannels = channels.Where(x => x.Users.Where(z => z.Id == userId).Any()).Select(z => z as SocketVoiceChannel).Where(y => y != null); #pragma warning disable CS4014 Task.Factory.StartNew(async() => #pragma warning restore CS4014 { SemaphoreSlim semaphore; Stream stream; if (!semaphores.TryGetValue(userId, out semaphore)) { semaphores[userId] = new SemaphoreSlim(1); semaphore = semaphores[userId]; } var user = this.discordClient.GetUser(userId); do { try { // Wait for a frame to show up on the audio channel if (audio.AvailableFrames > 0) { try { await semaphore.WaitAsync(); RTPFrame frame = await audio.ReadFrameAsync(new CancellationToken()); // Wait on the semaphore synchronization if (!streams.TryGetValue(userId, out stream)) { streams[userId] = new MemoryStream(); stream = streams[userId]; } // Write the payload to the memory stream stream.Write(frame.Payload, 0, frame.Payload.Length); // Console.WriteLine($"Frame received for user {user.Username} - {stream.Length}"); } finally { semaphore.Release(); } } else { await Task.Delay(500); } } catch (Exception e) { Console.WriteLine(e); } } while (audio.CanRead); }); }
public override bool TryReadFrame(CancellationToken cancelToken, out RTPFrame frame) { cancelToken.ThrowIfCancellationRequested(); if (_signal.Wait(0)) { _frames.TryDequeue(out frame); return(true); } frame = default(RTPFrame); return(false); }
void Form1_OnStreamReady(RTPFrame rtpFrame) { //byte[] buffer = rtpFrame.GetFramePayload(); //if (this.m_fs == null) //{ // this.m_fs = new FileStream("D:\\" + "111111" + ".h264", FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite, 8 * 1024); //} //m_fs.Write(buffer, 0, buffer.Length); //m_fs.Flush(); PsToH264(rtpFrame.GetFramePayload()); }
private void RtpChannel_OnFrameReady(RTPFrame frame) { if (paused) { return; } var payload = frame.GetFramePayload(); received += payload.LongLength; DataAvailable?.Invoke(this, new DataEventArgs(payload, payload.Length)); }
public override async Task <int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancelToken) { cancelToken.ThrowIfCancellationRequested(); RTPFrame frame = await ReadFrameAsync(cancelToken).ConfigureAwait(false); if (count < frame.Payload.Length) { throw new InvalidOperationException("Buffer is too small."); } Buffer.BlockCopy(frame.Payload, 0, buffer, offset, frame.Payload.Length); return(frame.Payload.Length); }
private void OnFrameComplete(RTPFrame frame) { Action <RTPFrame> action = OnFrameReady; if (action == null) { return; } foreach (Action <RTPFrame> handler in action.GetInvocationList()) { try { handler(frame); } catch { continue; } } }
/// <summary> /// Event handler for receiving an RTP frmae from the remote end of the VoIP call. /// </summary> /// <param name="rtpFrame">The RTP frame received.</param> private void RTPChannelSampleReceived(RTPFrame rtpFrame) { if (rtpFrame != null) { var framePayload = rtpFrame.GetFramePayload(); if (framePayload != null) { for (int index = 0; index < framePayload.Length; index++) { short pcm = MuLawDecoder.MuLawToLinearSample(framePayload[index]); byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) }; m_waveProvider.AddSamples(pcmSample, 0, 2); } } } }
private void _rtpChannel_OnFrameReady(RTPFrame frame) { if (OnStreamReady != null) { OnStreamReady(frame); } byte[] buffer = frame.GetFramePayload(); PsToH264(buffer); //foreach (var item in frame.FramePackets) //{ // logger.Debug("Seq:" + item.Header.SequenceNumber + "----Timestamp:" + item.Header.Timestamp); //} //if (this.m_fs == null) //{ // this.m_fs = new FileStream("D:\\" + _deviceId + ".h264", FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite, 50 * 1024); //} //m_fs.Write(buffer, 0, buffer.Length); }
private async Task <byte[]> BufferIncomingStream(AudioInStream e, int time = 3) { ConcurrentQueue <byte> voiceInQueue = new ConcurrentQueue <byte>(); SemaphoreSlim queueLock = new SemaphoreSlim(1, 1); return(await Task.Run(async() => { DateTime nowTime = DateTime.Now; while (DateTime.Now.Subtract(nowTime).TotalSeconds <= time) { if (e.AvailableFrames > 0) { queueLock.Wait(); RTPFrame frame = await e.ReadFrameAsync(CancellationToken.None); for (int i = 0; i < frame.Payload.Length; i++) { voiceInQueue.Enqueue(frame.Payload[i]); } queueLock.Release(); } } return voiceInQueue.ToArray(); })); }
/// <summary> /// rtp包回调事件处理 /// </summary> /// <param name="frame"></param> private void _rtpChannel_OnFrameReady(RTPFrame frame) { byte[] buffer = frame.GetFramePayload(); Write(buffer); }