private static async Task WebSocketMessageReceived(WebSocketContext context, RTCPeerConnection pc, string message) { try { if (pc.localDescription == null) { //logger.LogDebug("Offer SDP: " + message); logger.LogDebug("Offer SDP received."); // Add local media tracks depending on what was offered. Also add local tracks with the same media ID as // the remote tracks so that the media announcement in the SDP answer are in the same order. SDP remoteSdp = SDP.ParseSDPDescription(message); foreach (var ann in remoteSdp.Media) { var capbilities = FilterCodecs(ann.Media, ann.MediaFormats); MediaStreamTrack track = new MediaStreamTrack(ann.Media, false, capbilities, MediaStreamStatusEnum.RecvOnly); pc.addTrack(track); } pc.setRemoteDescription(new RTCSessionDescriptionInit { sdp = message, type = RTCSdpType.offer }); var answer = pc.createAnswer(null); await pc.setLocalDescription(answer); Console.WriteLine(answer.sdp); context.WebSocket.Send(answer.sdp); } else if (pc.remoteDescription == null) { logger.LogDebug("Answer SDP: " + message); pc.setRemoteDescription(new RTCSessionDescriptionInit { sdp = message, type = RTCSdpType.answer }); } else { logger.LogDebug("ICE Candidate: " + message); if (string.IsNullOrWhiteSpace(message) || message.Trim().ToLower() == SDP.END_ICE_CANDIDATES_ATTRIBUTE) { logger.LogDebug("End of candidates message received."); } else { var candInit = Newtonsoft.Json.JsonConvert.DeserializeObject <RTCIceCandidateInit>(message); pc.addIceCandidate(candInit); } } } catch (Exception excp) { logger.LogError("Exception WebSocketMessageReceived. " + excp.Message); } }
public void ParseWebRtcSDPUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); string sdpStr = @"v=0 o=- 1090343221 2 IN IP4 127.0.0.1 s=- t=0 0 a=group:BUNDLE audio video m=audio 11158 RTP/SAVP 0 c=IN IP4 127.0.0.1 a=candidate:1988909849 1 udp 1124657401 192.168.11.50 11158 typ host generation 0 a=candidate:1846148317 1 udp 2094219785 127.0.0.1 11158 typ host generation 0 a=candidate:2012632329 1 udp 2122820711 172.30.224.1 11158 typ host generation 0 a=end-of-candidates a=ice-ufrag:UWWAVCUMPZHPCLNIMZYA a=ice-pwd:IEUVYLWMXMQZKCMLTXQHZZVWXRCBLPPNUYFPCABK a=fingerprint:sha-256 C6:ED:8C:9D:06:50:77:23:0A:4A:D8:42:68:29:D0:70:2F:BB:C7:72:EC:98:5C:62:07:1B:0C:5D:CB:CE:BE:CD a=setup:actpass a=sendonly a=rtcp-mux a=mid:audio a=rtpmap:0 PCMU/8000 m=video 0 RTP/SAVP 100 c=IN IP4 127.0.0.1 a=ice-ufrag:UWWAVCUMPZHPCLNIMZYA a=ice-pwd:IEUVYLWMXMQZKCMLTXQHZZVWXRCBLPPNUYFPCABK a=fingerprint:sha-256 C6:ED:8C:9D:06:50:77:23:0A:4A:D8:42:68:29:D0:70:2F:BB:C7:72:EC:98:5C:62:07:1B:0C:5D:CB:CE:BE:CD a=bundle-only a=setup:actpass a=sendonly a=rtcp-mux a=mid:video a=rtpmap:100 VP8/90000"; SDP sdp = SDP.ParseSDPDescription(sdpStr); logger.LogDebug(sdp.ToString()); SDP rndTripSdp = SDP.ParseSDPDescription(sdp.ToString()); Assert.Equal("BUNDLE audio video", sdp.Group); Assert.Equal("BUNDLE audio video", rndTripSdp.Group); Assert.Equal("UWWAVCUMPZHPCLNIMZYA", sdp.Media[0].IceUfrag); Assert.Equal("UWWAVCUMPZHPCLNIMZYA", rndTripSdp.Media[0].IceUfrag); Assert.Equal("IEUVYLWMXMQZKCMLTXQHZZVWXRCBLPPNUYFPCABK", sdp.Media[0].IcePwd); Assert.Equal("IEUVYLWMXMQZKCMLTXQHZZVWXRCBLPPNUYFPCABK", rndTripSdp.Media[0].IcePwd); Assert.Equal(3, sdp.Media[0].IceCandidates.Count()); Assert.Equal(3, rndTripSdp.Media[0].IceCandidates.Count()); Assert.Equal("sha-256 C6:ED:8C:9D:06:50:77:23:0A:4A:D8:42:68:29:D0:70:2F:BB:C7:72:EC:98:5C:62:07:1B:0C:5D:CB:CE:BE:CD", sdp.Media[0].DtlsFingerprint); Assert.Equal("sha-256 C6:ED:8C:9D:06:50:77:23:0A:4A:D8:42:68:29:D0:70:2F:BB:C7:72:EC:98:5C:62:07:1B:0C:5D:CB:CE:BE:CD", rndTripSdp.Media[0].DtlsFingerprint); Assert.Equal("audio", sdp.Media[0].MediaID); Assert.Equal("audio", rndTripSdp.Media[0].MediaID); Assert.Equal("video", sdp.Media[1].MediaID); Assert.Equal("video", rndTripSdp.Media[1].MediaID); }
public void RFC5118_4_9() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); string sipMsg = "INVITE sip:[email protected] SIP/2.0" + CRLF + "To: sip:[email protected]" + CRLF + "From: sip:[email protected];tag=81x2" + CRLF + "Via: SIP/2.0/UDP [::ffff:192.0.2.10]:19823;branch=z9hG4bKbh19" + CRLF + "Via: SIP/2.0/UDP [::ffff:192.0.2.2];branch=z9hG4bKas3-111" + CRLF + "Call-ID: SSG9559905523997077@hlau_4100" + CRLF + "Contact: \"T. desk phone\" <sip:ted@[::ffff:192.0.2.2]>" + CRLF + "CSeq: 612 INVITE" + CRLF + "Max-Forwards: 70" + CRLF + "Content-Type: application/sdp" + CRLF + "Content-Length: 236" + CRLF + CRLF + "v=0" + CRLF + "o=assistant 971731711378798081 0 IN IP6 ::ffff:192.0.2.2" + CRLF + "s=Call me soon, please!" + CRLF + "c=IN IP6 ::ffff:192.0.2.2" + CRLF + "t=3338481189 3370017201" + CRLF + "m=audio 6000 RTP/AVP 2" + CRLF + "a=rtpmap:2 G726-32/8000" + CRLF + "m=video 6024 RTP/AVP 107" + CRLF + "a=rtpmap:107 H263-1998/90000"; SIPMessageBuffer sipMessageBuffer = SIPMessageBuffer.ParseSIPMessage(Encoding.UTF8.GetBytes(sipMsg), null, null); Assert.True(sipMessageBuffer != null, "The SIP message not parsed correctly."); SIPRequest sipRequest = SIPRequest.ParseSIPRequest(sipMessageBuffer); Assert.Equal(SIPMethodsEnum.INVITE, sipRequest.Method); IPAddress ip6; Assert.NotEmpty(sipRequest.Header.Vias.Via); Assert.True(IPAddress.TryParse(sipRequest.Header.Vias.TopViaHeader.Host, out ip6)); Assert.Equal(AddressFamily.InterNetworkV6, ip6.AddressFamily); Assert.Equal(19823, sipRequest.Header.Vias.TopViaHeader.Port); Assert.True(IPAddress.TryParse(sipRequest.Header.Vias.BottomViaHeader.ReceivedFromAddress, out ip6)); Assert.Equal(AddressFamily.InterNetworkV6, ip6.AddressFamily); Assert.NotEmpty(sipRequest.Header.Contact); Assert.True(IPAddress.TryParse(sipRequest.Header.Contact[0].ContactURI.HostAddress, out ip6)); Assert.Equal(AddressFamily.InterNetworkV6, ip6.AddressFamily); Assert.False(IPAddress.TryParse(sipRequest.URI.HostAddress, out ip6)); Assert.False(string.IsNullOrWhiteSpace(sipRequest.Body)); SDP sdp = SDP.ParseSDPDescription(sipRequest.Body); Assert.NotNull(sdp); Assert.NotNull(sdp.Connection); Assert.True(IPAddress.TryParse(sdp.Connection.ConnectionAddress, out ip6)); Assert.Equal(AddressFamily.InterNetworkV6, ip6.AddressFamily); Assert.NotEmpty(sdp.Media); logger.LogDebug("-----------------------------------------"); }
/// <summary> /// Creates a new RTP media session object based on a remote Session Description /// Protocol (SDP) offer. /// </summary> /// <param name="offerSdp">The SDP offer from the remote party.</param> /// <returns>A new RTP media session object.</returns> public virtual RTPMediaSession Create(string offerSdp) { var remoteSDP = SDP.ParseSDPDescription(offerSdp); var dstRtpEndPoint = remoteSDP.GetSDPRTPEndPoint(); RTPMediaSession = Create(dstRtpEndPoint.Address.AddressFamily); return(RTPMediaSession); }
public void MediaOrderMatchesRemoteOfferUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); // By default offers made by us always put audio first. Create a remote SDP offer // with the video first. string remoteSdp = @"v=0 o=- 1986548327 0 IN IP4 127.0.0.1 s=- c=IN IP4 127.0.0.1 t=0 0 m=video 60638 RTP/AVP 100 a=rtpmap:100 VP8/90000 a=sendrecv m=audio 60640 RTP/AVP 0 111 a=rtpmap:0 PCMU/8000 a=rtpmap:111 OPUS/48000/2 a=sendrecv"; // Create a local session and add the video track first. RTPSession rtpSession = new RTPSession(false, false, false); MediaStreamTrack localAudioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU), new SDPAudioVideoMediaFormat(SDPMediaTypesEnum.audio, 110, "OPUS/48000/2") }); rtpSession.addTrack(localAudioTrack); MediaStreamTrack localVideoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPMediaTypesEnum.video, 96, "VP8", 90000) }); rtpSession.addTrack(localVideoTrack); var offer = SDP.ParseSDPDescription(remoteSdp); logger.LogDebug($"Remote offer: {offer}"); var result = rtpSession.SetRemoteDescription(SIP.App.SdpType.offer, offer); logger.LogDebug($"Set remote description on local session result {result}."); Assert.Equal(SetDescriptionResultEnum.OK, result); var answer = rtpSession.CreateAnswer(null); logger.LogDebug($"Local answer: {answer}"); Assert.Equal(111, rtpSession.AudioLocalTrack.Capabilities.Single(x => x.Name() == "OPUS").ID); Assert.Equal(100, rtpSession.VideoLocalTrack.Capabilities.Single(x => x.Name() == "VP8").ID); //Assert.True(SDPAudioVideoMediaFormat.AreMatch(offer.Media.Single(x => x.Media == SDPMediaTypesEnum.audio)., answer.Media.First().Media)); //Assert.Equal(offer.Media.Last().Media, answer.Media.Last().Media); rtpSession.Close("normal"); }
private static async Task WebSocketMessageReceived(WebSocketContext context, RTCPeerConnection peerConnection, string message) { try { if (peerConnection.localDescription == null) { logger.LogDebug("Offer SDP: " + message); // Add local media tracks depending on what was offered. Also add local tracks with the same media ID as // the remote tracks so that the media announcement in the SDP answer are in the same order. SDP remoteSdp = SDP.ParseSDPDescription(message); var remoteAudioAnn = remoteSdp.Media.Where(x => x.Media == SDPMediaTypesEnum.audio).FirstOrDefault(); var remoteVideoAnn = remoteSdp.Media.Where(x => x.Media == SDPMediaTypesEnum.video).FirstOrDefault(); if (remoteAudioAnn != null) { MediaStreamTrack audioTrack = new MediaStreamTrack(remoteAudioAnn.MediaID, SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.PCMU) }, MediaStreamStatusEnum.RecvOnly); peerConnection.addTrack(audioTrack); } if (remoteVideoAnn != null) { MediaStreamTrack videoTrack = new MediaStreamTrack(remoteVideoAnn.MediaID, SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.VP8) }, MediaStreamStatusEnum.RecvOnly); peerConnection.addTrack(videoTrack); } // After local media tracks have been added the remote description can be set. await peerConnection.setRemoteDescription(new RTCSessionDescriptionInit { sdp = message, type = RTCSdpType.offer }); var answer = await peerConnection.createAnswer(null); await peerConnection.setLocalDescription(answer); context.WebSocket.Send(answer.sdp); } else if (peerConnection.remoteDescription == null) { logger.LogDebug("Answer SDP: " + message); await peerConnection.setRemoteDescription(new RTCSessionDescriptionInit { sdp = message, type = RTCSdpType.answer }); } else { logger.LogDebug("ICE Candidate: " + message); await peerConnection.addIceCandidate(new RTCIceCandidateInit { candidate = message }); } } catch (Exception excp) { logger.LogError("Exception WebSocketMessageReceived. " + excp.Message); } }
private void SetRemoteSDP(string remoteSDP) { var sdp = SDP.ParseSDPDescription(remoteSDP); SetRemoteSDP(sdp); CheckRemotePartyHoldCondition(sdp); logger.LogDebug($"Remote RTP socket {DestinationEndPoint}."); }
public void Start(string endpoint) { this.endpoint = endpoint; var caller = "1003"; var password = passwords[0]; var port = FreePort.FindNextAvailableUDPPort(15090); rtpChannel = new RTPChannel { DontTimeout = true, RemoteEndPoint = new IPEndPoint(IPAddress.Parse(asterisk), port) }; rtpChannel.SetFrameType(FrameTypesEnum.Audio); rtpChannel.ReservePorts(15000, 15090); rtpChannel.OnFrameReady += RtpChannel_OnFrameReady; uac = new SIPClientUserAgent(transport, null, null, null, null); var uri = SIPURI.ParseSIPURIRelaxed($"{ endpoint }@{ asterisk }"); var from = (new SIPFromHeader(caller, new SIPURI(caller, asterisk, null), null)).ToString(); var random = Crypto.GetRandomInt(5).ToString(); var sdp = new SDP { Version = 2, Username = "******", SessionId = random, Address = localIPEndPoint.Address.ToString(), SessionName = "redfox_" + random, Timing = "0 0", Connection = new SDPConnectionInformation(publicIPAddress.ToString()) }; var announcement = new SDPMediaAnnouncement { Media = SDPMediaTypesEnum.audio, MediaFormats = new List <SDPMediaFormat>() { new SDPMediaFormat((int)SDPMediaFormatsEnum.PCMU, "PCMU", 8000) }, Port = rtpChannel.RTPPort }; sdp.Media.Add(announcement); var descriptor = new SIPCallDescriptor(caller, password, uri.ToString(), from, null, null, null, null, SIPCallDirection.Out, SDP.SDP_MIME_CONTENTTYPE, sdp.ToString(), null); uac.CallTrying += Uac_CallTrying; uac.CallRinging += Uac_CallRinging; uac.CallAnswered += Uac_CallAnswered; uac.CallFailed += Uac_CallFailed; uac.Call(descriptor); }
/// <summary> /// Answers an incoming SIP call. /// </summary> public async Task <bool> Answer() { if (m_pendingIncomingCall == null) { StatusMessage(this, $"There was no pending call available to answer."); return(false); } else { var sipRequest = m_pendingIncomingCall.ClientTransaction.TransactionRequest; SDP offerSDP = SDP.ParseSDPDescription(sipRequest.Body); bool hasAudio = offerSDP.Media.Any(x => x.Media == SDPMediaTypesEnum.audio && x.MediaStreamStatus != MediaStreamStatusEnum.Inactive); bool hasVideo = offerSDP.Media.Any(x => x.Media == SDPMediaTypesEnum.video && x.MediaStreamStatus != MediaStreamStatusEnum.Inactive); AudioOptions audioOpts = new AudioOptions { AudioSource = AudioSourcesEnum.None }; if (hasAudio) { audioOpts = new AudioOptions { AudioSource = AudioSourcesEnum.CaptureDevice, OutputDeviceIndex = m_audioOutDeviceIndex, AudioCodecs = new List <SDPMediaFormatsEnum> { SDPMediaFormatsEnum.PCMU, SDPMediaFormatsEnum.PCMA } }; } VideoOptions videoOpts = new VideoOptions { VideoSource = VideoSourcesEnum.None }; if (hasVideo) { videoOpts = new VideoOptions { VideoSource = VideoSourcesEnum.TestPattern, SourceFile = RtpAVSession.VIDEO_TESTPATTERN, SourceFramesPerSecond = VIDEO_LIVE_FRAMES_PER_SECOND }; } MediaSession = new RtpAVSession(audioOpts, videoOpts); m_userAgent.RemotePutOnHold += OnRemotePutOnHold; m_userAgent.RemoteTookOffHold += OnRemoteTookOffHold; bool result = await m_userAgent.Answer(m_pendingIncomingCall, MediaSession); m_pendingIncomingCall = null; return(result); } }
private static async void MessageReceived(WebSocketContext context, string msg) { //Console.WriteLine($"websocket recv: {msg}"); var offerSDP = SDP.ParseSDPDescription(msg); Console.WriteLine($"offer sdp: {offerSDP}"); var webRtcSession = new WebRtcSession( AddressFamily.InterNetwork, DTLS_CERTIFICATE_FINGERPRINT, null, null); webRtcSession.setRemoteDescription(SdpType.offer, offerSDP); webRtcSession.RtpSession.OnReceiveReport += RtpSession_OnReceiveReport; webRtcSession.RtpSession.OnSendReport += RtpSession_OnSendReport; webRtcSession.RtpSession.OnRtpPacketReceived += RtpSession_OnRtpPacketReceived; webRtcSession.OnClose += (reason) => { Console.WriteLine($"webrtc session closed: {reason}"); _webRtcSessions.Remove(webRtcSession); }; // Add local recvonly tracks. This ensures that the SDP answer includes only // the codecs we support. var videoTrack = webRtcSession.addTrack(SDPMediaTypesEnum.video, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.VP8) }); videoTrack.Transceiver.SetStreamStatus(MediaStreamStatusEnum.RecvOnly); var audioTrack = webRtcSession.addTrack(SDPMediaTypesEnum.audio, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.PCMU) }); audioTrack.Transceiver.SetStreamStatus(MediaStreamStatusEnum.RecvOnly); var answerSdp = await webRtcSession.createAnswer(); webRtcSession.setLocalDescription(answerSdp); Console.WriteLine($"answer sdp: {answerSdp}"); context.WebSocket.Send(answerSdp.ToString()); if (DoDtlsHandshake(webRtcSession)) { _webRtcSessions.Add(webRtcSession); } else { webRtcSession.Close("dtls handshake failed."); } }
public static SDP GetSDP(string ipAddress, int port, string username, string password, List <XElement> payloads) { string iceUsername = (username.IsNullOrBlank()) ? Crypto.GetRandomString(6) : username; string icePwd = (password.IsNullOrBlank()) ? Crypto.GetRandomString(6) : password; SDP sdp = new SDP() { Address = ipAddress, Username = "******", SessionId = Crypto.GetRandomString(5), AnnouncementVersion = Crypto.GetRandomInt(5), Connection = new SDPConnectionInformation(ipAddress), Timing = "0 0", IceUfrag = iceUsername, IcePwd = icePwd, Media = new List <SDPMediaAnnouncement>() { new SDPMediaAnnouncement(port) { //BandwidthAttributes = new List<string>(){"RS:0", "RR:0"} // Indicate that RTCP is not being used. } } }; sdp.ExtraAttributes.Add("a=candidate:1 1 UDP " + Crypto.GetRandomString(10) + " " + ipAddress + " " + port + " typ host"); sdp.ExtraAttributes.Add("a=candidate:1 2 UDP " + Crypto.GetRandomString(10) + " " + ipAddress + " " + (port + 1) + " typ host"); foreach (XElement payload in payloads) { int formatID; Int32.TryParse(payload.Attribute("id").Value, out formatID); string name = payload.Attribute("name").Value; int clockRate = 0; if (payload.Attribute("clockrate") != null) { Int32.TryParse(payload.Attribute("clockrate").Value, out clockRate); } if (clockRate == 0) { sdp.Media[0].MediaFormats.Add(new SDPMediaFormat(formatID, name)); } else { sdp.Media[0].MediaFormats.Add(new SDPMediaFormat(formatID, name, clockRate)); } } //Console.WriteLine("SDPToJingle SDP=> " + sdp.ToString()); return(sdp); }
public void MediaOrderMatchesRemoteOfferUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); // By default offers made by us always put audio first. Create a remote SDP offer // with the video first. string remoteSdp = @"v=0 o=- 1986548327 0 IN IP4 127.0.0.1 s=- c=IN IP4 127.0.0.1 t=0 0 m=video 60638 RTP/AVP 100 a=rtpmap:100 VP8/90000 a=sendrecv m=audio 60640 RTP/AVP 0 a=rtpmap:0 PCMU/8000 a=sendrecv"; // Create a local session and add the video track first. RTPSession localSession = new RTPSession(false, false, false); MediaStreamTrack localAudioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.PCMU) }); localSession.addTrack(localAudioTrack); MediaStreamTrack localVideoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.VP8) }); localSession.addTrack(localVideoTrack); var offer = SDP.ParseSDPDescription(remoteSdp); logger.LogDebug($"Remote offer: {offer}"); var result = localSession.SetRemoteDescription(SIP.App.SdpType.offer, offer); logger.LogDebug($"Set remote description on local session result {result}."); Assert.Equal(SetDescriptionResultEnum.OK, result); var answer = localSession.CreateAnswer(null); logger.LogDebug($"Local answer: {answer}"); Assert.Equal(offer.Media.First().Media, answer.Media.First().Media); Assert.Equal(offer.Media.Last().Media, answer.Media.Last().Media); localSession.Close("normal"); }
internal void AcceptCall(SDP sdp, Message incomingCall) { foreach (UserAgent userAgent in Useragents.ToArray()) { if (userAgent.CallID == incomingCall.First("Call-ID").Value.ToString()) { Message response = userAgent.CreateResponse(200, "OK"); response.InsertHeader(new Header("application/sdp", "Content-Type")); response.Body = sdp.ToString(); userAgent.SendResponse(response); } } }
private static void SDPAnswerReceived(WebRtcSession webRtcSession, string sdpAnswer) { try { logger.LogDebug("Answer SDP: " + sdpAnswer); var answerSDP = SDP.ParseSDPDescription(sdpAnswer); webRtcSession.OnSdpAnswer(answerSDP); } catch (Exception excp) { logger.LogError("Exception SDPAnswerReceived. " + excp.Message); } }
/// <summary> /// Answers an incoming SIP call. /// </summary> public void Answer(MediaManager mediaManager) { _mediaManager = mediaManager; _mediaManager.NewCall(); SDP sdpAnswer = SDP.ParseSDPDescription(m_uas.CallRequest.Body); _mediaManager.SetRemoteSDP(sdpAnswer); SDP sdp = _mediaManager.GetSDP(false); m_uas.Answer(_sdpMimeContentType, sdp.ToString(), null, SIPDialogueTransferModesEnum.NotAllowed); }
public HttpResponseMessage Delete(int id) { if (id < 0) { return(Request.CreateResponse(HttpStatusCode.NotFound)); } else { SDP s = uow.SDPs.Get(id); uow.SDPs.Delete(s); return(Request.CreateResponse(HttpStatusCode.OK, s)); } }
public void ParseBriaSDPUnitTest() { Console.WriteLine(System.Reflection.MethodBase.GetCurrentMethod().Name); string sdpStr = "v=0\r\no=- 5 2 IN IP4 10.1.1.2\r\ns=CounterPath Bria\r\nc=IN IP4 144.137.16.240\r\nt=0 0\r\nm=audio 34640 RTP/AVP 0 8 101\r\na=sendrecv\r\na=rtpmap:101 telephone-event/8000\r\na=fmtp:101 0-15\r\na=alt:1 1 : STu/ZtOu 7hiLQmUp 10.1.1.2 34640\r\n"; SDP sdp = SDP.ParseSDPDescription(sdpStr); Debug.WriteLine(sdp.ToString()); Assert.IsTrue(sdp.Connection.ConnectionAddress == "144.137.16.240", "The connection address was not parsed correctly."); Assert.IsTrue(sdp.Media[0].Port == 34640, "The connection port was not parsed correctly."); Assert.IsTrue(sdp.Media[0].MediaFormats[0].Name == "PCMU", "The highest priority media format name was incorrect."); }
public void ParseBadFormatBriaSDPUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); string sdpStr = " v=0\r\no=- 5 2 IN IP4 10.1.1.2\r\n s=CounterPath Bria\r\nc=IN IP4 144.137.16.240\r\nt=0 0\r\n m=audio 34640 RTP/AVP 0 8 101\r\na=sendrecv\r\na=rtpmap:101 telephone-event/8000\r\na=fmtp:101 0-15\r\na=alt:1 1 : STu/ZtOu 7hiLQmUp 10.1.1.2 34640\r\n"; SDP sdp = SDP.ParseSDPDescription(sdpStr); Debug.WriteLine(sdp.ToString()); Assert.True(sdp.Connection.ConnectionAddress == "144.137.16.240", "The connection address was not parsed correctly."); Assert.True(sdp.SessionName == "CounterPath Bria", "The SessionName was not parsed correctly."); }
public HttpResponseMessage Post([FromBody] SDP value) { try { uow.SDPs.Create(value); HttpResponseMessage msg = Request.CreateResponse(HttpStatusCode.Created, value); return(msg); } catch (Exception) { return(Request.CreateErrorResponse(HttpStatusCode.ServiceUnavailable, "Server error")); } }
private void Answered(SDP xmppSDP) { //Console.WriteLine("Yay call answered."); //Console.WriteLine(sdp.ToString()); m_xmppServerEndPoint = SDP.GetSDPRTPEndPoint(xmppSDP.ToString()); logger.Debug("Sending STUN binding request to " + m_xmppServerEndPoint + "."); STUNMessage initMessage = new STUNMessage(STUNMessageTypesEnum.BindingRequest); initMessage.AddUsernameAttribute(xmppSDP.IceUfrag + m_localSTUNUFrag); byte[] stunMessageBytes = initMessage.ToByteBuffer(); m_xmppMediaSocket.Send(stunMessageBytes, stunMessageBytes.Length, m_xmppServerEndPoint); m_uas.Answer("application/sdp", GetSDPForSIPResponse().ToString(), null, SIPDialogueTransferModesEnum.NotAllowed); }
public HttpResponseMessage Put(int id, [FromBody] SDP value) { if (id < 0) { return(Request.CreateResponse(HttpStatusCode.NotFound)); } else { value.Id = id; uow.SDPs.Update(value); return(Request.CreateResponse(HttpStatusCode.OK, uow.SDPs.Get(id))); } }
/// <summary> /// Gets the a basic Session Description Protocol object that describes this RTP session. /// </summary> /// <param name="localAddress">The RTP socket we will be sending from. Note this can't be IPAddress.Any as /// it's getting sent to the callee. An IP address of 0.0.0.0 or [::0] will typically be interpreted as /// "don't send me any RTP".</param> /// <returns>An Session Description Protocol object that can be sent to a remote callee.</returns> public SDP GetSDP(IPAddress localAddress) { var sdp = new SDP(localAddress) { SessionId = Crypto.GetRandomInt(5).ToString(), SessionName = SDP_SESSION_NAME, Timing = "0 0", Connection = new SDPConnectionInformation(localAddress), }; sdp.Media = MediaAnnouncements; return(sdp); }
// private static ILogger logger = Log.Logger; public static string MangleSDP(string sdpBody, string publicIPAddress, out bool wasMangled) { wasMangled = false; try { if (sdpBody != null && publicIPAddress != null) { IPAddress addr = SDP.GetSDPRTPEndPoint(sdpBody).Address; //rj2: need to consider publicAddress and IPv6 for mangling IPAddress pubaddr = IPAddress.Parse(publicIPAddress); string sdpAddress = addr.ToString(); // Only mangle if there is something to change. For example the server could be on the same private subnet in which case it can't help. if (IPSocket.IsPrivateAddress(sdpAddress) && publicIPAddress != sdpAddress && pubaddr.AddressFamily == AddressFamily.InterNetworkV6 && addr.AddressFamily == AddressFamily.InterNetworkV6) { string mangledSDP = Regex.Replace(sdpBody, @"c=IN IP6 (?<ipaddress>([:a-fA-F0-9]+))", "c=IN IP6" + publicIPAddress, RegexOptions.Singleline); wasMangled = true; return(mangledSDP); } else if (IPSocket.IsPrivateAddress(sdpAddress) && publicIPAddress != sdpAddress && pubaddr.AddressFamily == AddressFamily.InterNetwork && addr.AddressFamily == AddressFamily.InterNetwork) { //logger.LogDebug("MangleSDP replacing private " + sdpAddress + " with " + publicIPAddress + "."); string mangledSDP = Regex.Replace(sdpBody, @"c=IN IP4 (?<ipaddress>(\d+\.){3}\d+)", "c=IN IP4 " + publicIPAddress, RegexOptions.Singleline); wasMangled = true; return(mangledSDP); } } else { Logger.Logger.Warn("Mangle SDP was called with an empty body or public IP address."); } return(sdpBody); } catch (Exception excp) { Logger.Logger.Error("Exception MangleSDP. ->" + excp.Message); return(sdpBody); } }
/// <summary> /// An outgoing call was successfully answered. /// </summary> /// <param name="uac">The local SIP user agent client that initiated the call.</param> /// <param name="sipResponse">The SIP answer response received from the remote party.</param> private void CallAnswered(ISIPClientUserAgent uac, SIPResponse sipResponse) { StatusMessage("Call answered: " + sipResponse.StatusCode + " " + sipResponse.ReasonPhrase + "."); if (sipResponse.StatusCode >= 200 && sipResponse.StatusCode <= 299) { IPEndPoint remoteSDPEndPoint = SDP.GetSDPRTPEndPoint(sipResponse.Body); _audioChannel.SetRemoteRTPEndPoint(remoteSDPEndPoint); CallAnswer(); } else { CallFinished(); } }
/// <summary> /// Handles responses to our re-INVITE requests. /// </summary> /// <param name="localSIPEndPoint">The local end point the response was received on.</param> /// <param name="remoteEndPoint">The remote end point the response came from.</param> /// <param name="sipTransaction">The UAS transaction the response is part of.</param> /// <param name="sipResponse">The SIP response.</param> private Task <SocketError> ReinviteRequestFinalResponseReceived(SIPEndPoint localSIPEndPoint, SIPEndPoint remoteEndPoint, SIPTransaction sipTransaction, SIPResponse sipResponse) { if (sipResponse.Status == SIPResponseStatusCodesEnum.Ok) { // Update the remote party's SDP. Dialogue.RemoteSDP = sipResponse.Body; MediaSession.SetRemoteDescription(SDP.ParseSDPDescription(sipResponse.Body)); } else { logger.LogWarning($"Re-INVITE request failed with response {sipResponse.ShortDescription}."); } return(Task.FromResult(SocketError.Success)); }
private static void SDPAnswerReceived(WebRtcSession webRtcSession, string sdpAnswer) { try { logger.LogDebug("Answer SDP: " + sdpAnswer); var answerSdp = SDP.ParseSDPDescription(sdpAnswer); webRtcSession.setRemoteDescription(new RTCSessionDescription { sdp = answerSdp, type = RTCSdpType.answer }); } catch (Exception excp) { logger.LogError("Exception SDPAnswerReceived. " + excp.Message); } }
public void SendInvite(string uri, SDP sdp) { uri = checkURI(uri); UserAgent cua = new UserAgent(Stack) { LocalParty = PublicServiceIdentity, RemoteParty = new Address(uri) }; Useragents.Add(cua); Message invite = cua.CreateRequest("INVITE"); invite.InsertHeader(new Header("application/sdp", "Content-Type")); invite.Body = sdp.ToString(); cua.SendRequest(invite); }
private void Answered(SDP sdp) { Console.WriteLine("XMPP client call answered."); IsUACAnswered = true; SIPResponse okResponse = new SIPResponse(SIPResponseStatusCodesEnum.Ok, "Ok", new SIPEndPoint(new IPEndPoint(IPAddress.Loopback, 0))); okResponse.Header.ContentType = SDP.SDP_MIME_CONTENTTYPE; okResponse.Body = sdp.ToString(); SIPDialogue = new SIPDialogue(null, null, null, null, -1, null, null, null, Guid.NewGuid(), Owner, AdminMemberId, null, sdp.ToString()); SIPDialogue.CallDurationLimit = CallDescriptor.CallDurationLimit; CallAnswered(this, okResponse); }
/// <summary> /// Event handler for an answer on an outgoing Google Voice call. /// </summary> /// <param name="xmppSDP">The SDP packet received from the Google Voice gateway.</param> private void XMPPAnswered(SDP xmppSDP) { StatusMessage("Google Voice call answered."); IPEndPoint remoteSDPEndPoint = SDP.GetSDPRTPEndPoint(xmppSDP.ToString()); _audioChannel.SetRemoteRTPEndPoint(remoteSDPEndPoint); // Google Voice require that a STUN exchange occurs on the RTP socket before the RTP packet can flow. // This code block sends a STUN binding request to the Google Voice gateway. STUNMessage initMessage = new STUNMessage(STUNMessageTypesEnum.BindingRequest); initMessage.AddUsernameAttribute(xmppSDP.IceUfrag + m_localSTUNUFrag); byte[] stunMessageBytes = initMessage.ToByteBuffer(); //_audioChannel.SendRTPRaw(stunMessageBytes, stunMessageBytes.Length); }
/// <summary> /// Answers an incoming SIP call. /// </summary> public async Task Answer() { if (m_pendingIncomingCall == null) { StatusMessage(this, $"There was no pending call available to answer."); } else { var sipRequest = m_pendingIncomingCall.ClientTransaction.TransactionRequest; SDP offerSDP = SDP.ParseSDPDescription(sipRequest.Body); bool hasAudio = offerSDP.Media.Any(x => x.Media == SDPMediaTypesEnum.audio); bool hasVideo = offerSDP.Media.Any(x => x.Media == SDPMediaTypesEnum.video); AudioOptions audioOpts = new AudioOptions { AudioSource = AudioSourcesEnum.None }; if (hasAudio) { audioOpts = new AudioOptions { AudioSource = AudioSourcesEnum.Microphone }; } VideoOptions videoOpts = new VideoOptions { VideoSource = VideoSourcesEnum.None }; if (hasVideo) { videoOpts = new VideoOptions { VideoSource = VideoSourcesEnum.TestPattern, SourceFile = RtpAVSession.VIDEO_TESTPATTERN, SourceFramesPerSecond = VIDEO_LIVE_FRAMES_PER_SECOND }; } MediaSession = new RtpAVSession(sipRequest.RemoteSIPEndPoint.Address.AddressFamily, audioOpts, videoOpts); m_userAgent.RemotePutOnHold += OnRemotePutOnHold; m_userAgent.RemoteTookOffHold += OnRemoteTookOffHold; await m_userAgent.Answer(m_pendingIncomingCall, MediaSession); m_pendingIncomingCall = null; } }
public void Start() { RTSPChannelParameters videoParameters = new RTSPChannelParameters {}; RTSPChannelParameters audioParameters = new RTSPChannelParameters {}; rtspSession = RTSPSession.Open(this.MediaDevice.ONVIF.GetCurrentMediaProfileRtspStreamUri().AbsoluteUri); //rtspSession.RTSPServerResponse += new RTSPSession.RTSPResponseHandler(rtsp_RTSPServerResponse); // OPTIONS возвращает команды сервера // OPTIONS, DESCRIBE, SETUP, PLAY, PAUSE, GET_PARAMETER, TEARDOWN, SET_PARAMETER RTSPResponse respons = rtspSession.Options(); // DESCRIBE возвращает SDP файл respons = rtspSession.Describe(); string ContentBase = respons.ContentBase; // Парсим SDP пакет sdp = SDP.Parse(respons.Body); videoParameters.Codec = sdp.GetCodec(MediaType: "video"); videoParameters.SampleRate = sdp.GetSampleRate(MediaType: "video"); string VideoControl = sdp.GetControl(MediaType: "video"); audioParameters.Codec = sdp.GetCodec(MediaType: "audio"); audioParameters.SampleRate = sdp.GetSampleRate(MediaType: "audio"); string AudioControl = sdp.GetControl(MediaType: "audio"); string VideoSetupUri = String.Format("{0}{1}", ContentBase, VideoControl); string AudioSetupUri = String.Format("{0}{1}", ContentBase, AudioControl); int[] ports = GetPortRange(4); videoParameters.RTPPort = ports[0]; videoParameters.RTCPPort = ports[1]; audioParameters.RTPPort= ports[2]; audioParameters.RTCPPort = ports[3]; respons = rtspSession.Setup(VideoSetupUri, videoParameters.RTPPort, videoParameters.RTCPPort); rtspSession.Parameters.Session = respons.Session; videoParameters.SSRT = respons.SSRT; respons = rtspSession.Setup(AudioSetupUri, audioParameters.RTPPort, audioParameters.RTCPPort, rtspSession.Parameters.Session); audioParameters.SSRT = respons.SSRT; respons = rtspSession.Play(rtspSession.Parameters.Session); videoChannel = new RTSPChannel(videoParameters); audioChannel = new RTSPChannel(audioParameters); //audioChannel.DataRecieved += MediaDevice.AVProcessor.AudioDataRecieved; //videoChannel.DataRecieved += MediaDevice.AVProcessor.VideoDataRecieved; audioChannel.DataRecieved += MediaDevice.Decoder.AudioDataRecieved; videoChannel.DataRecieved += MediaDevice.Decoder.VideoDataRecieved; videoChannel.StartRecieving(); audioChannel.StartRecieving(); }