/// <summary> /// Sends two separate RTP streams to an application like ffplay. /// /// ffplay -i ffplay_av.sdp -protocol_whitelist "file,rtp,udp" -loglevel debug /// /// The SDP that describes the streams is: /// /// v=0 /// o=- 1129870806 2 IN IP4 127.0.0.1 /// s=- /// c=IN IP4 192.168.11.50 /// t=0 0 /// m=audio 4040 RTP/AVP 0 /// a=rtpmap:0 PCMU/8000 /// m=video 4042 RTP/AVP 100 /// a=rtpmap:100 VP8/90000 /// </summary> private void SendSamplesAsRtp(IPEndPoint dstBaseEndPoint) { try { Socket videoSrcRtpSocket = null; Socket videoSrcControlSocket = null; Socket audioSrcRtpSocket = null; Socket audioSrcControlSocket = null; // WebRtc multiplexes all the RTP and RTCP sessions onto a single UDP connection. // The approach needed for ffplay is the original way where each media type has it's own UDP connection and the RTCP // also require a separate UDP connection on RTP port + 1. IPAddress localIPAddress = IPAddress.Any; IPEndPoint audioRtpEP = dstBaseEndPoint; IPEndPoint audioRtcpEP = new IPEndPoint(dstBaseEndPoint.Address, dstBaseEndPoint.Port + 1); IPEndPoint videoRtpEP = new IPEndPoint(dstBaseEndPoint.Address, dstBaseEndPoint.Port + 2); IPEndPoint videoRtcpEP = new IPEndPoint(dstBaseEndPoint.Address, dstBaseEndPoint.Port + 3); RTPSession audioRtpSession = new RTPSession((int)RTPPayloadTypesEnum.PCMU, null, null); RTPSession videoRtpSession = new RTPSession(VP8_PAYLOAD_TYPE_ID, null, null); DateTime lastRtcpSenderReportSentAt = DateTime.Now; NetServices.CreateRtpSocket(localIPAddress, RAW_RTP_START_PORT_RANGE, RAW_RTP_END_PORT_RANGE, true, out audioSrcRtpSocket, out audioSrcControlSocket); NetServices.CreateRtpSocket(localIPAddress, ((IPEndPoint)audioSrcRtpSocket.LocalEndPoint).Port, RAW_RTP_END_PORT_RANGE, true, out videoSrcRtpSocket, out videoSrcControlSocket); OnMediaSampleReady += (mediaType, timestamp, sample) => { if (mediaType == MediaSampleTypeEnum.VP8) { videoRtpSession.SendVp8Frame(videoSrcRtpSocket, videoRtpEP, timestamp, sample); } else { audioRtpSession.SendAudioFrame(audioSrcRtpSocket, audioRtpEP, timestamp, sample); } // Deliver periodic RTCP sender reports. This helps the receiver to sync the audio and video stream timestamps. // If there are gaps in the media, silence supression etc. then the sender repors shouldn't be triggered from the media samples. // In this case the samples are from an mp4 file which provides a constant uninterrupted stream. if (DateTime.Now.Subtract(lastRtcpSenderReportSentAt).TotalSeconds >= RTCP_SR_PERIOD_SECONDS) { videoRtpSession.SendRtcpSenderReport(videoSrcControlSocket, videoRtcpEP, _vp8Timestamp); audioRtpSession.SendRtcpSenderReport(audioSrcControlSocket, audioRtcpEP, _mulawTimestamp); lastRtcpSenderReportSentAt = DateTime.Now; } }; } catch (Exception excp) { logger.Error("Exception SendSamplesAsRtp. " + excp); } }
public void SendRtcpSenderReports(uint audioTimestamp, uint videoTimestamp) { var connectedIceCandidate = Peer.LocalIceCandidates.Where(y => y.RemoteRtpEndPoint != null).FirstOrDefault(); if (connectedIceCandidate != null) { var srcRtpEndPoint = connectedIceCandidate.LocalRtpSocket; var dstRtpEndPoint = connectedIceCandidate.RemoteRtpEndPoint; _audioRtpSession.SendRtcpSenderReport(srcRtpEndPoint, dstRtpEndPoint, audioTimestamp); _videoRtpSession.SendRtcpSenderReport(srcRtpEndPoint, dstRtpEndPoint, videoTimestamp); } }