/// <summary> /// Gets the media stream status for the specified media announcement. /// </summary> /// <param name="mediaType">The type of media (audio, video etc) to get the status for.</param> /// <param name="announcementIndex">THe index of the announcement to get the status for.</param> /// <returns>The media stream status set on the announcement or if there is none the session. If /// there is also no status set on the session then the default value of sendrecv is returned.</returns> public MediaStreamStatusEnum GetMediaStreamStatus(SDPMediaTypesEnum mediaType, int announcementIndex) { var announcements = Media.Where(x => x.Media == mediaType).ToList(); if (announcements == null || announcements.Count() < announcementIndex + 1) { return(MediaStreamStatusEnum.None); } else { var announcement = announcements[announcementIndex]; if (announcement.MediaStreamStatus != MediaStreamStatusEnum.None) { return(announcement.MediaStreamStatus); } else if (SessionMediaStreamStatus != MediaStreamStatusEnum.None) { return(SessionMediaStreamStatus); } else { return(MediaStreamStatusEnum.SendRecv); } } }
/// <summary> /// Default constructor. /// </summary> /// <param name="mediaType">The media type this reporting session will be measuring.</param> /// <param name="ssrc">The SSRC of the RTP stream being sent.</param> public RTCPSession(SDPMediaTypesEnum mediaType, uint ssrc) { MediaType = mediaType; Ssrc = ssrc; CreatedAt = DateTime.Now; Cname = Guid.NewGuid().ToString(); }
/// <summary> /// 获取接收方端口号码 /// </summary> /// <param name="sdpStr">SDP</param> /// <returns></returns> public int GetReceivePort(string sdpStr, SDPMediaTypesEnum mediaType) { string[] sdpLines = sdpStr.Split('\n'); foreach (var line in sdpLines) { if (line.Trim().StartsWith("m=")) { Match mediaMatch = Regex.Match(line.Substring(2).Trim(), @"(?<type>\w+)\s+(?<port>\d+)\s+(?<transport>\S+)\s+(?<formats>.*)$"); if (mediaMatch.Success) { SDPMediaAnnouncement announcement = new SDPMediaAnnouncement { Media = SDPMediaTypes.GetSDPMediaType(mediaMatch.Result("${type}")) }; Int32.TryParse(mediaMatch.Result("${port}"), out announcement.Port); announcement.Transport = mediaMatch.Result("${transport}"); announcement.ParseMediaFormats(mediaMatch.Result("${formats}")); if (announcement.Media != mediaType) { continue; } return(announcement.Port); } } } return(0); }
/// <summary> /// Forwards media from the SIP session to the WebRTC session. /// </summary> /// <param name="mediaType">The type of media.</param> /// <param name="rtpPacket">The RTP packet received on the SIP session.</param> private static void ForwardMedia(SDPMediaTypesEnum mediaType, RTPPacket rtpPacket) { if (_peerConnection != null) { _peerConnection.SendMedia(mediaType, (uint)rtpPacket.Payload.Length, rtpPacket.Payload); } }
/// <summary> /// Creates a lightweight class to track a media stream track within an RTP session /// When supporting RFC3550 (the standard RTP specification) the relationship between /// an RTP stream and session is 1:1. For WebRTC and RFC8101 there can be multiple /// streams per session. /// </summary> /// <param name="kind">The type of media for this stream. There can only be one /// stream per media type.</param> /// <param name="isRemote">True if this track corresponds to a media announcement from the /// remote party.</param> /// <param name="capabilities">The capabilities for the track being added. Where the same media /// type is supported locally and remotely only the mutual capabilities can be used. This will /// occur if we receive an SDP offer (add track initiated by the remote party) and we need /// to remove capabilities we don't support.</param> /// <param name="streamStatus">The initial stream status for the media track. Defaults to /// send receive.</param> /// <param name="ssrcAttributes">If th track is being created from an SDP announcement this /// parameter contains a list of </param> public MediaStreamTrack( SDPMediaTypesEnum kind, bool isRemote, List <SDPAudioVideoMediaFormat> capabilities, MediaStreamStatusEnum streamStatus = MediaStreamStatusEnum.SendRecv, List <SDPSsrcAttribute> ssrcAttributes = null) { Kind = kind; IsRemote = isRemote; Capabilities = capabilities; StreamStatus = streamStatus; DefaultStreamStatus = streamStatus; if (!isRemote) { Ssrc = Convert.ToUInt32(Crypto.GetRandomInt(0, Int32.MaxValue)); SeqNum = Convert.ToUInt16(Crypto.GetRandomInt(0, UInt16.MaxValue)); } // Add the source attributes from the remote SDP to help match RTP SSRC and RTCP CNAME values against // RTP and RTCP packets received from the remote party. if (ssrcAttributes?.Count > 0) { foreach (var ssrcAttr in ssrcAttributes) { SdpSsrc.Add(ssrcAttr.SSRC, ssrcAttr); } } }
/// <summary> /// Creates a lightweight class to track an RTP stream within an RTP session. When /// supporting RFC3550 (the standard RTP specification) the relationship between /// an RTP stream and session is 1:1. For WebRTC and RFC8101 there can be multiple /// streams per session. /// </summary> /// <param name="mediaType">The type of media for this stream. There can only be one /// stream per media type.</param> /// <param name="payloadTypeID">The payload type ID set in RTP packets sent by us.</param> /// <param name="remotePayloadIDs">The list of potential payload ID's that the /// remote party may use in RTP packets sent to us. Must be mutually exclusive across /// all streams in the same session.</param> public RTPSessionStream(SDPMediaTypesEnum mediaType, int payloadTypeID, List <int> remotePayloadIDs) { MediaType = mediaType; PayloadTypeID = payloadTypeID; Ssrc = Convert.ToUInt32(Crypto.GetRandomInt(0, Int32.MaxValue)); SeqNum = Convert.ToUInt16(Crypto.GetRandomInt(0, UInt16.MaxValue)); RemotePayloadIDs = remotePayloadIDs; }
public SDPMediaAnnouncement(SDPMediaTypesEnum mediaType, SDPConnectionInformation connection, int port, SDPMessageMediaFormat messageMediaFormat) { Media = mediaType; Port = port; Connection = connection; MessageMediaFormat = messageMediaFormat; }
/// <summary> /// Forwards media from the SIP session to the WebRTC session. /// </summary> /// <param name="remote">The remote endpoint the RTP packet was received from.</param> /// <param name="mediaType">The type of media.</param> /// <param name="rtpPacket">The RTP packet received on the SIP session.</param> private static void ForwardAudioToPeerConnection(IPEndPoint remote, SDPMediaTypesEnum mediaType, RTPPacket rtpPacket) { if (_peerConnection != null && _peerConnection.connectionState == RTCPeerConnectionState.connected && mediaType == SDPMediaTypesEnum.audio) { _peerConnection.SendAudio((uint)rtpPacket.Payload.Length, rtpPacket.Payload); } }
/// <summary> /// Event handler for receiving RTP packets from a remote party. /// </summary> /// <param name="mediaType">The media type of the packets.</param> /// <param name="rtpPacket">The RTP packet with the media sample.</param> private void RtpPacketReceived(SDPMediaTypesEnum mediaType, RTPPacket rtpPacket) { //Log.LogDebug($"RTP packet received for {mediaType}."); if (mediaType == SDPMediaTypesEnum.audio) { RenderAudio(rtpPacket); } }
public void SendMedia(SDPMediaTypesEnum mediaType, uint timestamp, byte[] sample) { if (mediaType == SDPMediaTypesEnum.video) { LastVideoTimeStamp = timestamp; } WebRtcSession.SendMedia(mediaType, timestamp, sample); }
/// <summary> /// /// </summary> /// <param name="audioOutProvider">The audio buffer for the default system audio output device.</param> private static void PlaySample(SDPMediaTypesEnum mediaType, RTPPacket rtpPacket) { var sample = rtpPacket.Payload; for (int index = 0; index < sample.Length; index++) { short pcm = NAudio.Codecs.MuLawDecoder.MuLawToLinearSample(sample[index]); byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) }; m_audioOutProvider.AddSamples(pcmSample, 0, 2); } }
public void SendMedia(SDPMediaTypesEnum mediaType, uint samplePeriod, byte[] sample) { if (mediaType == SDPMediaTypesEnum.audio) { int payloadID = Convert.ToInt32(localDescription.sdp.Media.Where(x => x.Media == SDPMediaTypesEnum.audio).First().MediaFormats.First().FormatID); base.SendAudioFrame(samplePeriod, payloadID, sample); } else { throw new NotImplementedException(); } }
/// <summary> /// Diagnostic handler to print out our RTCP reports from the remote WebRTC peer. /// </summary> private static void RtpSession_OnReceiveReport(SDPMediaTypesEnum mediaType, RTCPCompoundPacket recvRtcpReport) { var rr = recvRtcpReport.ReceiverReport.ReceptionReports.FirstOrDefault(); if (rr != null) { logger.LogDebug($"RTCP {mediaType} Receiver Report: SSRC {rr.SSRC}, pkts lost {rr.PacketsLost}, delay since SR {rr.DelaySinceLastSenderReport}."); } else { logger.LogDebug($"RTCP {mediaType} Receiver Report: empty."); } }
public SDPMediaAnnouncement(SDPMediaTypesEnum mediaType, int port, List <SDPApplicationMediaFormat> appMediaFormats) { Media = mediaType; Port = port; foreach (var fmt in appMediaFormats) { if (!ApplicationMediaFormats.ContainsKey(fmt.ID)) { ApplicationMediaFormats.Add(fmt.ID, fmt); } } }
/// <summary> /// Diagnostic handler to print out our RTCP sender/receiver reports. /// </summary> private static void RtpSession_OnSendReport(SDPMediaTypesEnum mediaType, RTCPCompoundPacket sentRtcpReport) { if (sentRtcpReport.SenderReport != null) { var sr = sentRtcpReport.SenderReport; Console.WriteLine($"RTCP sent SR {mediaType}, ssrc {sr.SSRC}, pkts {sr.PacketCount}, bytes {sr.OctetCount}."); } else { var rrSample = sentRtcpReport.ReceiverReport.ReceptionReports.First(); Console.WriteLine($"RTCP sent RR {mediaType}, ssrc {rrSample.SSRC}, seqnum {rrSample.ExtendedHighestSequenceNumber}."); } }
/// <summary> /// Diagnostic handler to print out our RTCP reports from the remote WebRTC peer. /// </summary> private static void RtpSession_OnReceiveReport(IPEndPoint remoteEndPoint, SDPMediaTypesEnum mediaType, RTCPCompoundPacket recvRtcpReport) { var rr = recvRtcpReport.ReceiverReport.ReceptionReports.FirstOrDefault(); if (rr != null) { Console.WriteLine($"RTCP {mediaType} Receiver Report: SSRC {rr.SSRC}, pkts lost {rr.PacketsLost}, delay since SR {rr.DelaySinceLastSenderReport}."); } else { Console.WriteLine($"RTCP {mediaType} Receiver Report: empty."); } }
/// <summary> /// Event handler for receiving RTP packets from a remote party. /// </summary> /// <param name="mediaType">The media type of the packets.</param> /// <param name="rtpPacket">The RTP packet with the media sample.</param> private void RtpPacketReceived(IPEndPoint remoteEP, SDPMediaTypesEnum mediaType, RTPPacket rtpPacket) { //Log.LogDebug($"RTP packet received for {mediaType}."); if (mediaType == SDPMediaTypesEnum.audio) { RenderAudio(rtpPacket); } else if (mediaType == SDPMediaTypesEnum.video) { RenderVideo(rtpPacket); } }
public SDPMediaAnnouncement(SDPMediaTypesEnum mediaType, int port, List <SDPAudioVideoMediaFormat> mediaFormats) { Media = mediaType; Port = port; foreach (var fmt in mediaFormats) { if (!MediaFormats.ContainsKey(fmt.ID)) { MediaFormats.Add(fmt.ID, fmt); } } }
/// <summary> /// Gets the media stream status for the specified media announcement. /// </summary> /// <param name="mediaType">The type of media (audio, video etc) to get the status for.</param> /// <param name="announcementIndex">THe index of the announcement to get the status for.</param> /// <returns>The media stream status set on the announcement or if there is none the session. If /// there is also no status set on the session then the default value of sendrecv is returned.</returns> public MediaStreamStatusEnum GetMediaStreamStatus(SDPMediaTypesEnum mediaType, int announcementIndex) { var announcements = Media.Where(x => x.Media == mediaType).ToList(); if (announcements == null || announcements.Count() < announcementIndex + 1) { return(DEFAULT_STREAM_STATUS); } else { var announcement = announcements[announcementIndex]; return(announcement.MediaStreamStatus.HasValue ? announcement.MediaStreamStatus.Value : DEFAULT_STREAM_STATUS); } }
public void SendMedia(SDPMediaTypesEnum mediaType, uint sampleTimestamp, byte[] sample) { if (RemoteEndPoint != null && IsDtlsNegotiationComplete) { if (mediaType == SDPMediaTypesEnum.video) { RtpSession.SendVp8Frame(sampleTimestamp, sample); } else if (mediaType == SDPMediaTypesEnum.audio) { RtpSession.SendAudioFrame(sampleTimestamp, sample); } } }
private static List <SDPMediaFormat> FilterCodecs(SDPMediaTypesEnum mediaType, List <SDPMediaFormat> formats) { if (mediaType == SDPMediaTypesEnum.audio) { if (AudioFormatsFilter.Count == 0) { return(formats); } else { var audioFormats = new List <SDPMediaFormat>(); foreach (var format in formats) { if (AudioFormatsFilter.Any(x => x == format.FormatCodec)) { audioFormats.Add(format); } } return(audioFormats); } } else if (mediaType == SDPMediaTypesEnum.video) { if (VideoFormatsFilter.Count == 0) { return(formats); } else { var videoFormats = new List <SDPMediaFormat>(); foreach (var format in formats) { if (VideoFormatsFilter.Any(x => x == format.FormatCodec)) { videoFormats.Add(format); } } return(videoFormats); } } else { return(formats); } }
/// <summary> /// Diagnostic handler to print out our RTCP sender/receiver reports. /// </summary> //private static void RtpSession_OnSendReport(SDPMediaTypesEnum mediaType, RTCPCompoundPacket sentRtcpReport) //{ // if (sentRtcpReport.SenderReport != null) // { // var sr = sentRtcpReport.SenderReport; // Log.LogDebug($"RTCP sent SR {mediaType}, ssrc {sr.SSRC}, pkts {sr.PacketCount}, bytes {sr.OctetCount}."); // } // else // { // var rrSample = sentRtcpReport.ReceiverReport.ReceptionReports.First(); // Log.LogDebug($"RTCP sent RR {mediaType}, ssrc {rrSample.SSRC}, seqnum {rrSample.ExtendedHighestSequenceNumber}."); // } //} /// <summary> /// Diagnostic handler to print out our RTCP reports from the remote WebRTC peer. /// </summary> private static void RtpSession_OnReceiveReport(IPEndPoint remoteEP, SDPMediaTypesEnum mediaType, RTCPCompoundPacket recvRtcpReport) { Log.LogDebug($"RTCP receive {mediaType} from {remoteEP} CNAME {recvRtcpReport.SDesReport.CNAME} SSRC {recvRtcpReport.SDesReport.SSRC}."); //var rr = (recvRtcpReport.SenderReport != null) ? recvRtcpReport.SenderReport.ReceptionReports.FirstOrDefault() : recvRtcpReport.ReceiverReport.ReceptionReports.FirstOrDefault(); //if (rr != null) //{ // Log.LogDebug($"RTCP {mediaType} Receiver Report: SSRC {rr.SSRC}, pkts lost {rr.PacketsLost}, delay since SR {rr.DelaySinceLastSenderReport}."); //} //else //{ // Log.LogDebug($"RTCP {mediaType} Receiver Report: empty."); //} }
/// <summary> /// Send a media sample to the remote party. /// </summary> /// <param name="mediaType">Whether the sample is audio or video.</param> /// <param name="sampleTimestamp">The RTP timestamp for the sample.</param> /// <param name="sample">The sample payload.</param> public void SendMedia(SDPMediaTypesEnum mediaType, uint sampleTimestamp, byte[] sample) { if (RemoteEndPoint != null && IsDtlsNegotiationComplete) { if (mediaType == SDPMediaTypesEnum.video) { int vp8PayloadID = Convert.ToInt32(VideoLocalTrack.Capabilties.Single(x => x.FormatCodec == SDPMediaFormatsEnum.VP8).FormatID); SendVp8Frame(sampleTimestamp, vp8PayloadID, sample); } else if (mediaType == SDPMediaTypesEnum.audio) { int pcmuPayloadID = Convert.ToInt32(AudioLocalTrack.Capabilties.Single(x => x.FormatCodec == SDPMediaFormatsEnum.PCMU).FormatID); SendAudioFrame(sampleTimestamp, pcmuPayloadID, sample); } } }
/// <summary> /// Adds a new audio or video track to the session. Tracks are sent from one peer to another. /// </summary> /// <param name="mediaType">The track's media type (audio or video).</param> /// <param name="capabilities">The codecs we support.</param> /// <returns>Returns the media stream track that was added or updated.</returns> public MediaStreamTrack addTrack(SDPMediaTypesEnum mediaType, List <SDPMediaFormat> capabilities) { string mid; MediaStreamTrack existingTrack = (mediaType == SDPMediaTypesEnum.audio) ? RtpSession.AudioTrack : RtpSession.VideoTrack; if (existingTrack != null) { mid = existingTrack.Transceiver.MID; } else { mid = RtpSession.GetNextMediaID(); } return(RtpSession.AddTrack(mid, mediaType, false, capabilities)); }
public SDPMediaAnnouncement(SDPMediaTypesEnum mediaType, int port, List <SDPAudioVideoMediaFormat> mediaFormats) { Media = mediaType; Port = port; MediaStreamStatus = DEFAULT_STREAM_STATUS; if (mediaFormats != null) { foreach (var fmt in mediaFormats) { if (!MediaFormats.ContainsKey(fmt.ID)) { MediaFormats.Add(fmt.ID, fmt); } } } }
/// <summary> /// Creates a new RTP ICE channel (which manages the UDP socket sending and receiving RTP /// packets) for use with this session. /// </summary> /// <param name="mediaType">The type of media the RTP channel is for. Must be audio or video.</param> /// <returns>A new RTPChannel instance.</returns> protected override RTPChannel CreateRtpChannel(SDPMediaTypesEnum mediaType) { var rtpIceChannel = new RtpIceChannel( _configuration?.X_BindAddress, RTCIceComponent.rtp, _configuration?.iceServers, _configuration != null ? _configuration.iceTransportPolicy : RTCIceTransportPolicy.all); m_rtpChannels.Add(mediaType, rtpIceChannel); rtpIceChannel.OnRTPDataReceived += OnRTPDataReceived; // Start the RTP, and if required the Control, socket receivers and the RTCP session. rtpIceChannel.Start(); return(rtpIceChannel); }
/// <summary> /// Creates a new SDP media format for a dynamic media type. Dynamic media types are those that use /// ID's between 96 and 127 inclusive and require an rtpmap attribute and optionally an fmtp attribute. /// </summary> public SDPAudioVideoMediaFormat(SDPMediaTypesEnum kind, int id, string rtpmap, string fmtp = null) { if (id < 0 || id > DYNAMIC_ID_MAX) { throw new ApplicationException($"SDP media format IDs must be between 0 and {DYNAMIC_ID_MAX}."); } else if (string.IsNullOrWhiteSpace(rtpmap)) { throw new ArgumentNullException("rtpmap", "The rtpmap parameter cannot be empty for a dynamic SDPMediaFormat."); } Kind = kind; ID = id; Rtpmap = rtpmap; Fmtp = fmtp; _isEmpty = false; }
private void Session_OnRtpPacketReceived(SDPMediaTypesEnum arg1, RTPPacket arg2) { RTPPacket packet = arg2; if (packet.Header.PayloadType == 0 || killsession) { killsession = true; return; } rxpacketcount++; if (packet.Header.SequenceNumber != lastseq + 1 && lastseq != 0) { lostpacketcount++; } lastseq = packet.Header.SequenceNumber; int len = 0; try { if (playBuffer.BufferedDuration.TotalMilliseconds < 250) { byte[] decoded = new byte[] { 0 }; Decoder.processBuffer(packet.Payload, out decoded); len = decoded.Length; //decoded = decoder.Decode(encoded, encoded.Length, out len); rxAudioLevel = AudioLevelDB(decoded); decoded = AdjustAudioLevelDB(decoded, rxGain); playBuffer.AddSamples(decoded, 0, len); } } catch (Exception ex) { } if ((int)playBuffer.BufferedDuration.TotalMilliseconds > 20) { waveOut.Play(); } else { //waveOut.Pause(); } }
/// <summary> /// Media announcements can be placed in SDP in any order BUT the orders must match /// up in offer/answer pairs. This method can be used to get the index for a specific /// media type. It is useful for obtaining the index of a particular media type when /// constructing an SDP answer. /// </summary> /// <param name="mediaType">The media type to get the index for.</param> /// <returns></returns> public (int, string) GetIndexForMediaType(SDPMediaTypesEnum mediaType) { int index = 0; foreach (var ann in Media) { if (ann.Media == mediaType) { return(index, ann.MediaID); } else { index++; } } return(MEDIA_INDEX_NOT_PRESENT, MEDIA_INDEX_TAG_NOT_PRESENT); }
/// <summary> /// Media announcements can be placed in SDP in any order BUT the orders must match /// up in offer/answer pairs. This method can be used to get the index for a specific /// media type. It is useful for obtaining the index of a particular media type when /// constructing an SDP answer. /// </summary> /// <param name="mediaType">The media type to get the index for.</param> /// <returns></returns> public int GetIndexForMediaType(SDPMediaTypesEnum mediaType) { int index = 0; foreach (var ann in Media) { if (ann.Media == mediaType) { return(index); } else { index++; } } return(MEDIA_INDEX_NOT_PRESENT); }
public SDPMediaAnnouncement(SDPMediaTypesEnum mediaType, int port, List<SDPMediaFormat> mediaFormats) { Media = mediaType; Port = port; MediaFormats = mediaFormats; }