/// <summary> /// Hands the socket handle to the DTLS context and waits for the handshake to complete. /// </summary> /// <param name="peerConnection">The WebRTC session to perform the DTLS handshake on.</param> /// <returns>True if the handshake completes successfully. False if not.</returns> private static async Task <bool> DoDtlsHandshake(RTCPeerConnection peerConnection, DtlsHandshake dtls) { Console.WriteLine("DoDtlsHandshake started."); int res = dtls.DoHandshakeAsServer((ulong)peerConnection.GetRtpChannel(SDPMediaTypesEnum.audio).RtpSocket.Handle); Console.WriteLine("DtlsContext initialisation result=" + res); if (dtls.IsHandshakeComplete()) { Console.WriteLine("DTLS negotiation complete."); // TODO fix race condition!!! First RTP packet is not getting decrypted. var srtpSendContext = new Srtp(dtls, false); var srtpReceiveContext = new Srtp(dtls, true); peerConnection.SetSecurityContext( srtpSendContext.ProtectRTP, srtpReceiveContext.UnprotectRTP, srtpSendContext.ProtectRTCP, srtpReceiveContext.UnprotectRTCP); await peerConnection.Start(); return(true); } else { return(false); } }
private static Task <RTCPeerConnection> CreatePeerConnection() { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; var pc = new RTCPeerConnection(config); var testPatternSource = new VideoTestPatternSource(); var videoEncoderEndPoint = new VideoEncoderEndPoint(); var audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.Music }); MediaStreamTrack videoTrack = new MediaStreamTrack(videoEncoderEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(videoTrack); MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(audioTrack); testPatternSource.OnVideoSourceRawSample += videoEncoderEndPoint.ExternalVideoSourceRawSample; videoEncoderEndPoint.OnVideoSourceEncodedSample += pc.SendVideo; audioSource.OnAudioSourceEncodedSample += pc.SendAudio; pc.OnVideoFormatsNegotiated += (formats) => videoEncoderEndPoint.SetVideoSourceFormat(formats.First()); pc.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.connected) { await audioSource.StartAudio(); await testPatternSource.StartVideo(); } else if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { await testPatternSource.CloseVideo(); await audioSource.CloseAudio(); } }; // Diagnostics. pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); return(Task.FromResult(pc)); }
/// <summary> /// Hands the socket handle to the DTLS context and waits for the handshake to complete. /// </summary> /// <param name="webRtcSession">The WebRTC session to perform the DTLS handshake on.</param> /// <returns>True if the handshake completed successfully or false otherwise.</returns> private static bool DoDtlsHandshake(RTCPeerConnection peerConnection) { logger.LogDebug("DoDtlsHandshake started."); var dtls = new DtlsHandshake(DTLS_CERTIFICATE_PATH, DTLS_KEY_PATH); int res = dtls.DoHandshakeAsServer((ulong)peerConnection.GetRtpChannel(SDPMediaTypesEnum.audio).RtpSocket.Handle); logger.LogDebug("DtlsContext initialisation result=" + res); if (dtls.IsHandshakeComplete()) { logger.LogDebug("DTLS negotiation complete."); var srtpSendContext = new Srtp(dtls, false); var srtpReceiveContext = new Srtp(dtls, true); peerConnection.SetSecurityContext( srtpSendContext.ProtectRTP, srtpReceiveContext.UnprotectRTP, srtpSendContext.ProtectRTCP, srtpReceiveContext.UnprotectRTCP); dtls.Shutdown(); return(true); } else { dtls.Shutdown(); return(false); } }
private static Task <RTCPeerConnection> CreatePeerConnection() { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; var pc = new RTCPeerConnection(config); pc.createDataChannel("test", null); pc.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } }; // Diagnostics. pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); return(Task.FromResult(pc)); }
/// <summary> /// Hands the socket handle to the DTLS context and waits for the handshake to complete. /// </summary> /// <param name="webRtcSession">The WebRTC session to perform the DTLS handshake on.</param> /// <returns>True if the handshake completed successfully or false otherwise.</returns> private static async Task <bool> DoDtlsHandshake(RTCPeerConnection peerConnection, DtlsHandshake dtls) { logger.LogDebug("DoDtlsHandshake started."); int res = dtls.DoHandshakeAsServer((ulong)peerConnection.GetRtpChannel(SDPMediaTypesEnum.audio).RtpSocket.Handle); logger.LogDebug("DtlsContext initialisation result=" + res); if (dtls.IsHandshakeComplete()) { logger.LogDebug("DTLS negotiation complete."); var srtpSendContext = new Srtp(dtls, false); var srtpReceiveContext = new Srtp(dtls, true); peerConnection.SetSecurityContext( srtpSendContext.ProtectRTP, srtpReceiveContext.UnprotectRTP, srtpSendContext.ProtectRTCP, srtpReceiveContext.UnprotectRTCP); await peerConnection.Start(); if (!_isSampling) { _ = Task.Run(StartMedia); } return(true); } else { return(false); } }
private static RTCPeerConnection CreatePeerConnection() { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; var pc = new RTCPeerConnection(config); var testPatternSource = new VideoTestPatternSource(); WindowsVideoEndPoint windowsVideoEndPoint = new WindowsVideoEndPoint(true); MediaStreamTrack track = new MediaStreamTrack(windowsVideoEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly); pc.addTrack(track); testPatternSource.OnVideoSourceRawSample += windowsVideoEndPoint.ExternalVideoSourceRawSample; windowsVideoEndPoint.OnVideoSourceEncodedSample += pc.SendVideo; pc.OnVideoFormatsNegotiated += (sdpFormat) => windowsVideoEndPoint.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec)); pc.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { await testPatternSource.CloseVideo(); await windowsVideoEndPoint.CloseVideo(); } else if (state == RTCPeerConnectionState.connected) { await windowsVideoEndPoint.StartVideo(); await testPatternSource.StartVideo(); } }; // Diagnostics. pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); return(pc); }
private static RTCPeerConnection Createpc(WebSocketContext context) { var pc = new RTCPeerConnection(null); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => { bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate); Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}."); }; pc.onicecandidateerror += (candidate, error) => logger.LogWarning($"Error adding remote ICE candidate. {error} {candidate}"); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); //pc.OnReceiveReport += (type, rtcp) => logger.LogDebug($"RTCP {type} report received."); pc.OnRtcpBye += (reason) => logger.LogDebug($"RTCP BYE receive, reason: {(string.IsNullOrWhiteSpace(reason) ? "<none>" : reason)}."); //pc.onicecandidate += (candidate) => //{ // if (pc.signalingState == RTCSignalingState.have_local_offer || // pc.signalingState == RTCSignalingState.have_remote_offer) // { // context.WebSocket.Send($"candidate:{candidate}"); // } //}; pc.onconnectionstatechange += (state) => { logger.LogDebug($"Peer connection state changed to {state}."); if (state == RTCPeerConnectionState.connected) { logger.LogDebug("Creating RTP session for ffplay."); var rtpSession = CreateRtpSession(pc.AudioLocalTrack?.Capabilities, pc.VideoLocalTrack?.Capabilities); pc.OnRtpPacketReceived += (rep, media, rtpPkt) => { if (media == SDPMediaTypesEnum.audio && rtpSession.AudioDestinationEndPoint != null) { //logger.LogDebug($"Forwarding {media} RTP packet to ffplay timestamp {rtpPkt.Header.Timestamp}."); rtpSession.SendRtpRaw(media, rtpPkt.Payload, rtpPkt.Header.Timestamp, rtpPkt.Header.MarkerBit, rtpPkt.Header.PayloadType); } else if (media == SDPMediaTypesEnum.video && rtpSession.VideoDestinationEndPoint != null) { //logger.LogDebug($"Forwarding {media} RTP packet to ffplay timestamp {rtpPkt.Header.Timestamp}."); rtpSession.SendRtpRaw(media, rtpPkt.Payload, rtpPkt.Header.Timestamp, rtpPkt.Header.MarkerBit, rtpPkt.Header.PayloadType); } }; pc.OnRtpClosed += (reason) => rtpSession.Close(reason); } }; _activePeerConnection = pc; return(pc); }
private static Task <RTCPeerConnection> CreatePeerConnection() { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; var pc = new RTCPeerConnection(config); AudioExtrasSource audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.SineWave }); audioSource.OnAudioSourceEncodedSample += pc.SendAudio; MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendOnly); pc.addTrack(audioTrack); pc.OnAudioFormatsNegotiated += (sdpFormat) => audioSource.SetAudioSourceFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec)); pc.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.connected) { await audioSource.StartAudio(); } else if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { await audioSource.CloseAudio(); } }; // Diagnostics. pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); return(Task.FromResult(pc)); }
/// <summary> /// Hands the socket handle to the DTLS context and waits for the handshake to complete. /// </summary> /// <param name="webRtcSession">The WebRTC session to perform the DTLS handshake on.</param> private static bool DoDtlsHandshake(RTCPeerConnection peerConnection) { Log.LogDebug("DoDtlsHandshake started."); if (!File.Exists(DTLS_CERTIFICATE_PATH)) { throw new ApplicationException($"The DTLS certificate file could not be found at {DTLS_CERTIFICATE_PATH}."); } else if (!File.Exists(DTLS_KEY_PATH)) { throw new ApplicationException($"The DTLS key file could not be found at {DTLS_KEY_PATH}."); } var dtls = new DtlsHandshake(DTLS_CERTIFICATE_PATH, DTLS_KEY_PATH); peerConnection.onconnectionstatechange += (state) => { if (state == RTCPeerConnectionState.closed) { dtls.Shutdown(); } }; byte[] clientFingerprint = null; int res = dtls.DoHandshakeAsServer((ulong)peerConnection.GetRtpChannel(SDPMediaTypesEnum.audio).RtpSocket.Handle, ref clientFingerprint); Log.LogDebug("DtlsContext initialisation result=" + res); if (dtls.IsHandshakeComplete()) { // TODO: Check client fingerprint matches one supplied in the SDP. Log.LogDebug("DTLS negotiation complete."); var srtpSendContext = new Srtp(dtls, false); var srtpReceiveContext = new Srtp(dtls, true); peerConnection.SetSecurityContext( srtpSendContext.ProtectRTP, srtpReceiveContext.UnprotectRTP, srtpSendContext.ProtectRTCP, srtpReceiveContext.UnprotectRTCP); return(true); } else { return(false); } }
/// <summary> /// Hands the socket handle to the DTLS context and waits for the handshake to complete. /// </summary> /// <param name="webRtcSession">The WebRTC session to perform the DTLS handshake on.</param> private static async Task <bool> DoDtlsHandshake(RTCPeerConnection peerConnection, DtlsHandshake dtls) { logger.LogDebug("DoDtlsHandshake started."); if (!File.Exists(DTLS_CERTIFICATE_PATH)) { throw new ApplicationException($"The DTLS certificate file could not be found at {DTLS_CERTIFICATE_PATH}."); } else if (!File.Exists(DTLS_KEY_PATH)) { throw new ApplicationException($"The DTLS key file could not be found at {DTLS_KEY_PATH}."); } byte[] clientFingerprint = null; int res = dtls.DoHandshakeAsServer((ulong)peerConnection.GetRtpChannel(SDPMediaTypesEnum.audio).RtpSocket.Handle, ref clientFingerprint); logger.LogDebug("DtlsContext initialisation result=" + res); if (dtls.IsHandshakeComplete()) { logger.LogDebug("DTLS negotiation complete."); // TODO: Check client fingerprint matches one supplied in the SDP. var srtpSendContext = new Srtp(dtls, false); var srtpReceiveContext = new Srtp(dtls, true); peerConnection.SetSecurityContext( srtpSendContext.ProtectRTP, srtpReceiveContext.UnprotectRTP, srtpSendContext.ProtectRTCP, srtpReceiveContext.UnprotectRTCP); await peerConnection.Start(); //dtls.Shutdown(); if (_sendTestPatternTimer == null) { _sendTestPatternTimer = new Timer(SendTestPattern, null, 0, TEST_PATTERN_SPACING_MILLISECONDS); } return(true); } else { return(false); } }
/// <summary> /// Hands the socket handle to the DTLS context and waits for the handshake to complete. /// </summary> /// <param name="webRtcSession">The WebRTC session to perform the DTLS handshake on.</param> private static async Task <bool> DoDtlsHandshake(RTCPeerConnection peerConnection) { logger.LogDebug("DoDtlsHandshake started."); if (!File.Exists(DTLS_CERTIFICATE_PATH)) { throw new ApplicationException($"The DTLS certificate file could not be found at {DTLS_CERTIFICATE_PATH}."); } else if (!File.Exists(DTLS_KEY_PATH)) { throw new ApplicationException($"The DTLS key file could not be found at {DTLS_KEY_PATH}."); } var dtls = new DtlsHandshake(DTLS_CERTIFICATE_PATH, DTLS_KEY_PATH); var dtlsResult = await Task.Run(() => dtls.DoHandshakeAsServer((ulong)peerConnection.GetRtpChannel(SDPMediaTypesEnum.audio).RtpSocket.Handle)); logger.LogDebug("DtlsContext initialisation result=" + dtlsResult); if (dtls.IsHandshakeComplete()) { logger.LogDebug("DTLS handshake succeeded."); var srtpSendContext = new Srtp(dtls, false); var srtpReceiveContext = new Srtp(dtls, true); peerConnection.SetSecurityContext( srtpSendContext.ProtectRTP, srtpReceiveContext.UnprotectRTP, srtpSendContext.ProtectRTCP, srtpReceiveContext.UnprotectRTCP); return(true); } else { logger.LogWarning("DTLS handshake failed."); dtls.Shutdown(); return(false); } }
/// <summary> /// Hands the socket handle to the DTLS context and waits for the handshake to complete. /// </summary> /// <param name="webRtcSession">The WebRTC session to perform the DTLS handshake on.</param> private static bool DoDtlsHandshake(RTCPeerConnection peerConnection, DtlsHandshake dtls) { Log.LogDebug("DoDtlsHandshake started."); if (!File.Exists(DTLS_CERTIFICATE_PATH)) { throw new ApplicationException($"The DTLS certificate file could not be found at {DTLS_CERTIFICATE_PATH}."); } else if (!File.Exists(DTLS_KEY_PATH)) { throw new ApplicationException($"The DTLS key file could not be found at {DTLS_KEY_PATH}."); } byte[] clientFingerprint = null; var dtlsResult = dtls.DoHandshakeAsServer((ulong)peerConnection.GetRtpChannel(SDPMediaTypesEnum.audio).RtpSocket.Handle, ref clientFingerprint); Log.LogDebug($"DtlsContext initialisation result {dtlsResult}."); if (dtls.IsHandshakeComplete()) { Log.LogDebug("DTLS negotiation complete."); var srtpSendContext = new Srtp(dtls, false); var srtpReceiveContext = new Srtp(dtls, true); peerConnection.SetSecurityContext( srtpSendContext.ProtectRTP, srtpReceiveContext.UnprotectRTP, srtpSendContext.ProtectRTCP, srtpReceiveContext.UnprotectRTCP); return(true); } else { Log.LogWarning("DTLS handshake failed."); dtls.Shutdown(); return(false); } }
private static Task <RTCPeerConnection> Createpc(WebSocketContext context, RTCIceServer stunServer, bool relayOnly) { if (_peerConnection != null) { _peerConnection.Close("normal"); } List <RTCCertificate> presetCertificates = null; if (File.Exists(LOCALHOST_CERTIFICATE_PATH)) { var localhostCert = new X509Certificate2(LOCALHOST_CERTIFICATE_PATH, (string)null, X509KeyStorageFlags.Exportable); presetCertificates = new List <RTCCertificate> { new RTCCertificate { Certificate = localhostCert } }; } RTCConfiguration pcConfiguration = new RTCConfiguration { //certificates = presetCertificates, //X_RemoteSignallingAddress = (context != null) ? context.UserEndPoint.Address : null, iceServers = stunServer != null ? new List <RTCIceServer> { stunServer } : null, iceTransportPolicy = relayOnly ? RTCIceTransportPolicy.relay : RTCIceTransportPolicy.all, //X_BindAddress = IPAddress.Any, // NOTE: Not reqd. Using this to filter out IPv6 addresses so can test with Pion. }; _peerConnection = new RTCPeerConnection(pcConfiguration); //_peerConnection.GetRtpChannel().MdnsResolve = (hostname) => Task.FromResult(NetServices.InternetDefaultAddress); _peerConnection.GetRtpChannel().MdnsResolve = MdnsResolve; //_peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isrelay) => logger.LogDebug($"STUN message received from {ep}, message type {msg.Header.MessageType}."); //var dc = _peerConnection.createDataChannel(DATA_CHANNEL_LABEL, null); //dc.onmessage += (msg) => logger.LogDebug($"data channel receive ({dc.label}-{dc.id}): {msg}"); // Add a send-only audio track (this doesn't require any native libraries for encoding so is good for x-platform testing). AudioExtrasSource audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.Music }); audioSource.OnAudioSourceEncodedSample += _peerConnection.SendAudio; MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendOnly); _peerConnection.addTrack(audioTrack); _peerConnection.OnAudioFormatsNegotiated += (formats) => audioSource.SetAudioSourceFormat(formats.First()); _peerConnection.onicecandidateerror += (candidate, error) => logger.LogWarning($"Error adding remote ICE candidate. {error} {candidate}"); _peerConnection.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state changed to {state}."); if (state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed) { _peerConnection.Close("remote disconnection"); } if (state == RTCPeerConnectionState.connected) { await audioSource.StartAudio(); } else if (state == RTCPeerConnectionState.closed) { await audioSource.CloseAudio(); } }; _peerConnection.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); _peerConnection.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); _peerConnection.OnRtcpBye += (reason) => logger.LogDebug($"RTCP BYE receive, reason: {(string.IsNullOrWhiteSpace(reason) ? "<none>" : reason)}."); // Peer ICE connection state changes are for ICE events such as the STUN checks completing. _peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); _peerConnection.ondatachannel += (dc) => { logger.LogDebug($"Data channel opened by remote peer, label {dc.label}, stream ID {dc.id}."); dc.onmessage += (msg) => { logger.LogDebug($"data channel ({dc.label}:{dc.id}): {msg}."); }; }; _peerConnection.onsignalingstatechange += () => { if (_peerConnection.signalingState == RTCSignalingState.have_remote_offer) { logger.LogDebug("Remote SDP:"); logger.LogDebug(_peerConnection.remoteDescription.sdp.ToString()); } else if (_peerConnection.signalingState == RTCSignalingState.have_local_offer) { logger.LogDebug("Local SDP:"); logger.LogDebug(_peerConnection.localDescription.sdp.ToString()); } }; return(Task.FromResult(_peerConnection)); }
private static RTCPeerConnection WebSocketOpened(WebSocketContext context) { WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(); winVideoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { _form.BeginInvoke(new Action(() => { unsafe { fixed(byte *s = bmp) { System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, (int)(bmp.Length / height), System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _picBox.Image = bmpImage; } } })); }; var peerConnection = new RTCPeerConnection(null); // Add local recvonly tracks. This ensures that the SDP answer includes only // the codecs we support. MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.PCMU) }, MediaStreamStatusEnum.RecvOnly); peerConnection.addTrack(audioTrack); MediaStreamTrack videoTrack = new MediaStreamTrack(winVideoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly); peerConnection.addTrack(videoTrack); peerConnection.OnReceiveReport += RtpSession_OnReceiveReport; peerConnection.OnSendReport += RtpSession_OnSendReport; peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => { bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate); Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}."); }; peerConnection.oniceconnectionstatechange += (state) => Console.WriteLine($"ICE connection state changed to {state}."); peerConnection.onconnectionstatechange += async(state) => { Console.WriteLine($"Peer connection state changed to {state}."); if (state == RTCPeerConnectionState.closed) { await winVideoEP.CloseVideo(); } }; //peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) => //{ // //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}."); // if (media == SDPMediaTypesEnum.video) // { // winVideoEP.GotVideoRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload); // } //}; peerConnection.OnVideoFrameReceived += winVideoEP.GotVideoFrame; return(peerConnection); }
private static Task <RTCPeerConnection> CreatePeerConnection() { var peerConnection = new RTCPeerConnection(); //FileStream captureStream = new FileStream("capture.stm", FileMode.Create, FileAccess.ReadWrite); var videoEP = new Vp8NetVideoEncoderEndPoint(); videoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { unsafe { fixed(byte *s = bmp) { Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s); _picBox.Image = bmpImage; } } })); } }; // Sink (speaker) only audio end point. //WindowsAudioEndPoint windowsAudioEP = new WindowsAudioEndPoint(new AudioEncoder(), -1, -1, true, false); //MediaStreamTrack audioTrack = new MediaStreamTrack(windowsAudioEP.GetAudioSinkFormats(), MediaStreamStatusEnum.RecvOnly); //peerConnection.addTrack(audioTrack); MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly); peerConnection.addTrack(videoTrack); peerConnection.OnVideoFrameReceived += (rep, ts, frame, pixelFmt) => { Console.WriteLine($"Video frame received {frame.Length} bytes."); //Console.WriteLine(frame.HexStr()); //captureStream.Write(Encoding.ASCII.GetBytes($"{frame.Length},")); //captureStream.Write(frame); //captureStream.Flush(); videoEP.GotVideoFrame(rep, ts, frame, pixelFmt); }; peerConnection.OnVideoFormatsNegotiated += (formats) => videoEP.SetVideoSinkFormat(formats.First()); //peerConnection.OnAudioFormatsNegotiated += (formats) => // windowsAudioEP.SetAudioSinkFormat(formats.First()); peerConnection.OnTimeout += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}."); peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}."); peerConnection.onconnectionstatechange += (state) => { logger.LogDebug($"Peer connection connected changed to {state}."); if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed || state == RTCPeerConnectionState.disconnected) { //captureStream.Close(); } }; peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => { bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate); Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}."); }; peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) => { //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}."); //if (media == SDPMediaTypesEnum.audio) //{ // windowsAudioEP.GotAudioRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload); //} }; return(Task.FromResult(peerConnection)); }
private static RTCPeerConnection Createpc(WebSocketContext context, RTCIceServer stunServer) { if (_peerConnection != null) { _peerConnection.Close("normal"); } List <RTCCertificate> presetCertificates = null; if (File.Exists(LOCALHOST_CERTIFICATE_PATH)) { var localhostCert = new X509Certificate2(LOCALHOST_CERTIFICATE_PATH, (string)null, X509KeyStorageFlags.Exportable); presetCertificates = new List <RTCCertificate> { new RTCCertificate { Certificate = localhostCert } }; } RTCConfiguration pcConfiguration = new RTCConfiguration { certificates = presetCertificates, X_RemoteSignallingAddress = (context != null) ? context.UserEndPoint.Address : null, iceServers = stunServer != null ? new List <RTCIceServer> { stunServer } : null, iceTransportPolicy = RTCIceTransportPolicy.all, //X_BindAddress = IPAddress.Any, // NOTE: Not reqd. Using this to filter out IPv6 addresses so can test with Pion. }; _peerConnection = new RTCPeerConnection(pcConfiguration); //_peerConnection.GetRtpChannel().MdnsResolve = (hostname) => Task.FromResult(NetServices.InternetDefaultAddress); _peerConnection.GetRtpChannel().MdnsResolve = MdnsResolve; _peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isrelay) => logger.LogDebug($"STUN message received from {ep}, message class {msg.Header.MessageClass}."); var dc = _peerConnection.createDataChannel(DATA_CHANNEL_LABEL, null); dc.onmessage += (msg) => logger.LogDebug($"data channel receive ({dc.label}-{dc.id}): {msg}"); // Add inactive audio and video tracks. //MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List<SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.PCMU) }, MediaStreamStatusEnum.RecvOnly); //pc.addTrack(audioTrack); //MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List<SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.VP8) }, MediaStreamStatusEnum.Inactive); //pc.addTrack(videoTrack); _peerConnection.onicecandidateerror += (candidate, error) => logger.LogWarning($"Error adding remote ICE candidate. {error} {candidate}"); _peerConnection.onconnectionstatechange += (state) => { logger.LogDebug($"Peer connection state changed to {state}."); if (state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed) { _peerConnection.Close("remote disconnection"); } }; _peerConnection.OnReceiveReport += (ep, type, rtcp) => logger.LogDebug($"RTCP {type} report received."); _peerConnection.OnRtcpBye += (reason) => logger.LogDebug($"RTCP BYE receive, reason: {(string.IsNullOrWhiteSpace(reason) ? "<none>" : reason)}."); _peerConnection.onicecandidate += (candidate) => { if (_peerConnection.signalingState == RTCSignalingState.have_local_offer || _peerConnection.signalingState == RTCSignalingState.have_remote_offer) { if (context != null) { context.WebSocket.Send($"candidate:{candidate}"); } } }; // Peer ICE connection state changes are for ICE events such as the STUN checks completing. _peerConnection.oniceconnectionstatechange += (state) => { logger.LogDebug($"ICE connection state change to {state}."); }; _peerConnection.ondatachannel += (dc) => { logger.LogDebug($"Data channel opened by remote peer, label {dc.label}, stream ID {dc.id}."); dc.onmessage += (msg) => { logger.LogDebug($"data channel ({dc.label}:{dc.id}): {msg}."); }; }; return(_peerConnection); }
/// <summary> /// Hands the socket handle to the DTLS context and waits for the handshake to complete. /// </summary> /// <param name="webRtcSession">The WebRTC session to perform the DTLS handshake on.</param> private static bool DoDtlsHandshake(RTCPeerConnection pc, SIPSorceryMedia.DtlsHandshake dtls, bool isClient, byte[] sdpFingerprint) { logger.LogDebug("DoDtlsHandshake started."); if (!File.Exists(DTLS_CERTIFICATE_PATH)) { throw new ApplicationException($"The DTLS certificate file could not be found at {DTLS_CERTIFICATE_PATH}."); } else if (!File.Exists(DTLS_KEY_PATH)) { throw new ApplicationException($"The DTLS key file could not be found at {DTLS_KEY_PATH}."); } int res = 0; bool fingerprintMatch = false; if (isClient) { logger.LogDebug($"DTLS client handshake starting to {pc.AudioDestinationEndPoint}."); // For the DTLS handshake to work connect must be called on the socket so openssl knows where to send. var rtpSocket = pc.GetRtpChannel(SDPMediaTypesEnum.audio).RtpSocket; rtpSocket.Connect(pc.AudioDestinationEndPoint); byte[] fingerprint = null; var peerEP = pc.AudioDestinationEndPoint; res = dtls.DoHandshakeAsClient((ulong)rtpSocket.Handle, (short)peerEP.AddressFamily.GetHashCode(), peerEP.Address.GetAddressBytes(), (ushort)peerEP.Port, ref fingerprint); if (fingerprint != null) { logger.LogDebug($"DTLS server fingerprint {ByteBufferInfo.HexStr(fingerprint)}."); fingerprintMatch = sdpFingerprint.SequenceEqual(fingerprint); } } else { byte[] fingerprint = null; res = dtls.DoHandshakeAsServer((ulong)pc.GetRtpChannel(SDPMediaTypesEnum.audio).RtpSocket.Handle, ref fingerprint); if (fingerprint != null) { logger.LogDebug($"DTLS client fingerprint {ByteBufferInfo.HexStr(fingerprint)}."); fingerprintMatch = sdpFingerprint.SequenceEqual(fingerprint); } } logger.LogDebug("DtlsContext initialisation result=" + res); if (dtls.IsHandshakeComplete()) { logger.LogDebug("DTLS negotiation complete."); if (!fingerprintMatch) { logger.LogWarning("DTLS fingerprint mismatch."); return(false); } else { var srtpSendContext = new SIPSorceryMedia.Srtp(dtls, isClient); var srtpReceiveContext = new SIPSorceryMedia.Srtp(dtls, !isClient); pc.SetSecurityContext( srtpSendContext.ProtectRTP, srtpReceiveContext.UnprotectRTP, srtpSendContext.ProtectRTCP, srtpReceiveContext.UnprotectRTCP); return(true); } } else { return(false); } }
public async Task <RTCSessionDescriptionInit> GotOffer(RTCSessionDescriptionInit offer) { _logger.LogDebug($"SDP offer received."); _logger.LogTrace($"Offer SDP:\n{offer.sdp}"); var pc = new RTCPeerConnection(); if (_publicIPv4 != null) { var rtpPort = pc.GetRtpChannel().RTPPort; var publicIPv4Candidate = new RTCIceCandidate(RTCIceProtocol.udp, _publicIPv4, (ushort)rtpPort, RTCIceCandidateType.host); pc.addLocalIceCandidate(publicIPv4Candidate); } if (_publicIPv6 != null) { var rtpPort = pc.GetRtpChannel().RTPPort; var publicIPv6Candidate = new RTCIceCandidate(RTCIceProtocol.udp, _publicIPv6, (ushort)rtpPort, RTCIceCandidateType.host); pc.addLocalIceCandidate(publicIPv6Candidate); } MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPAudioVideoMediaFormat> { new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU) }, MediaStreamStatusEnum.SendRecv); pc.addTrack(audioTrack); MediaStreamTrack videoTrack = new MediaStreamTrack(new VideoFormat(VideoCodecsEnum.VP8, VP8_PAYLOAD_ID), MediaStreamStatusEnum.SendRecv); pc.addTrack(videoTrack); pc.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) => { pc.SendRtpRaw(media, rtpPkt.Payload, rtpPkt.Header.Timestamp, rtpPkt.Header.MarkerBit, rtpPkt.Header.PayloadType); //_logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}, SeqNum {rtpPkt.Header.SequenceNumber}."); }; //peerConnection.OnReceiveReport += RtpSession_OnReceiveReport; //peerConnection.OnSendReport += RtpSession_OnSendReport; pc.OnTimeout += (mediaType) => _logger.LogWarning($"Timeout for {mediaType}."); pc.onconnectionstatechange += (state) => { _logger.LogDebug($"Peer connection state changed to {state}."); if (state == RTCPeerConnectionState.failed) { pc.Close("ice failure"); } }; var setResult = pc.setRemoteDescription(offer); if (setResult == SetDescriptionResultEnum.OK) { var offerSdp = pc.createOffer(null); await pc.setLocalDescription(offerSdp); var answer = pc.createAnswer(null); await pc.setLocalDescription(answer); _logger.LogTrace($"Answer SDP:\n{answer.sdp}"); return(answer); } else { return(null); } }
public async Task <RTCSessionDescriptionInit> GotOffer(RTCSessionDescriptionInit offer) { logger.LogTrace($"SDP offer received."); logger.LogTrace(offer.sdp); var pc = new RTCPeerConnection(); if (_presetIceAddresses != null) { foreach (var addr in _presetIceAddresses) { var rtpPort = pc.GetRtpChannel().RTPPort; var publicIPv4Candidate = new RTCIceCandidate(RTCIceProtocol.udp, addr, (ushort)rtpPort, RTCIceCandidateType.host); pc.addLocalIceCandidate(publicIPv4Candidate); } } MediaStreamTrack audioTrack = new MediaStreamTrack(SDPWellKnownMediaFormatsEnum.PCMU); pc.addTrack(audioTrack); MediaStreamTrack videoTrack = new MediaStreamTrack(new VideoFormat(VideoCodecsEnum.VP8, VP8_PAYLOAD_ID)); pc.addTrack(videoTrack); pc.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) => { pc.SendRtpRaw(media, rtpPkt.Payload, rtpPkt.Header.Timestamp, rtpPkt.Header.MarkerBit, rtpPkt.Header.PayloadType); }; pc.OnTimeout += (mediaType) => logger.LogWarning($"Timeout for {mediaType}."); pc.oniceconnectionstatechange += (state) => logger.LogInformation($"ICE connection state changed to {state}."); pc.onsignalingstatechange += () => logger.LogInformation($"Signaling state changed to {pc.signalingState}."); pc.onconnectionstatechange += (state) => { logger.LogInformation($"Peer connection state changed to {state}."); if (state == RTCPeerConnectionState.failed) { pc.Close("ice failure"); } }; var setResult = pc.setRemoteDescription(offer); if (setResult == SetDescriptionResultEnum.OK) { var offerSdp = pc.createOffer(null); await pc.setLocalDescription(offerSdp); var answer = pc.createAnswer(null); logger.LogTrace($"SDP answer created."); logger.LogTrace(answer.sdp); return(answer); } else { logger.LogWarning($"Failed to set remote description {setResult}."); return(null); } }
private static RTCPeerConnection CreatePeerConnection() { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; var pc = new RTCPeerConnection(config); WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(); //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(false, 640, 480, 30); //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(false, 1920, 1080, 30); //await winVideoEP.InitialiseVideoSourceDevice(); var audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.Music }); MediaStreamTrack videoTrack = new MediaStreamTrack(winVideoEP.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(videoTrack); MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(audioTrack); winVideoEP.OnVideoSourceEncodedSample += pc.SendVideo; audioSource.OnAudioSourceEncodedSample += pc.SendAudio; pc.OnVideoFormatsNegotiated += (sdpFormat) => winVideoEP.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec)); pc.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.connected) { await audioSource.StartAudio(); await winVideoEP.StartVideo(); } else if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { await winVideoEP.CloseVideo(); await audioSource.CloseAudio(); } }; // Diagnostics. pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); return(pc); }
private RTCPeerConnection Createpc() { List <RTCCertificate> presetCertificates = null; if (File.Exists(LOCALHOST_CERTIFICATE_PATH)) { var localhostCert = new X509Certificate2(LOCALHOST_CERTIFICATE_PATH, (string)null, X509KeyStorageFlags.Exportable); presetCertificates = new List <RTCCertificate> { new RTCCertificate { Certificate = localhostCert } }; } RTCConfiguration pcConfiguration = new RTCConfiguration { certificates = presetCertificates, }; var pc = new RTCPeerConnection(pcConfiguration); pc.GetRtpChannel().MdnsResolve = MdnsResolve; //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isrelay) => logger.LogDebug($"{_peerName}: STUN message received from {ep}, message class {msg.Header.MessageClass}."); var dataChannel = pc.createDataChannel(_dataChannelLabel, null); dataChannel.onDatamessage -= DataChannel_onDatamessage; dataChannel.onDatamessage += DataChannel_onDatamessage; _dataChannels.Add(_dataChannelLabel, dataChannel); pc.onicecandidateerror += (candidate, error) => logger.LogWarning($"{_peerName}: Error adding remote ICE candidate. {error} {candidate}"); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"{_peerName}: ICE connection state change to {state}."); pc.onconnectionstatechange += (state) => { logger.LogDebug($"{_peerName}: Peer connection state changed to {state}."); if (state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed) { pc.Close("remote disconnection"); } }; pc.onicecandidate += (candidate) => { if (pc.signalingState == RTCSignalingState.have_local_offer || pc.signalingState == RTCSignalingState.have_remote_offer) { OnIceCandidateAvailable?.Invoke(new RTCIceCandidateInit() { candidate = candidate.ToString(), sdpMid = candidate.sdpMid, sdpMLineIndex = candidate.sdpMLineIndex }); } }; pc.ondatachannel += (dc) => { dc.onopen += () => logger.LogDebug($"{_peerName}: Data channel now open label {dc.label}, stream ID {dc.id}."); dc.onDatamessage -= DataChannel_onDatamessage; dc.onDatamessage += DataChannel_onDatamessage; logger.LogDebug($"{_peerName}: Data channel created by remote peer, label {dc.label}, stream ID {dc.id}."); _dataChannels.Add(dc.label, dc); }; return(pc); }
private static async Task <RTCPeerConnection> CreatePeerConnection() { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; var pc = new RTCPeerConnection(config); WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(new VpxVideoEncoder(), WEBCAM_NAME); //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(new FFmpegVideoEncoder(), WEBCAM_NAME); //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(WEBCAM_NAME, 1920, 1080, 30); //winVideoEP.RestrictFormats(x => x.Codec == SIPSorceryMedia.Abstractions.V1.VideoCodecsEnum.H264); bool initResult = await winVideoEP.InitialiseVideoSourceDevice(); if (!initResult) { throw new ApplicationException("Could not initialise video capture device."); } var audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.Music }); MediaStreamTrack videoTrack = new MediaStreamTrack(winVideoEP.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(videoTrack); MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(audioTrack); winVideoEP.OnVideoSourceEncodedSample += pc.SendVideo; audioSource.OnAudioSourceEncodedSample += pc.SendAudio; pc.OnVideoFormatsNegotiated += (videoFormats) => winVideoEP.SetVideoSourceFormat(videoFormats.First()); pc.OnAudioFormatsNegotiated += (audioFormats) => audioSource.SetAudioSourceFormat(audioFormats.First()); pc.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.connected) { await audioSource.StartAudio(); await winVideoEP.StartVideo(); } else if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { await winVideoEP.CloseVideo(); await audioSource.CloseAudio(); } }; // Diagnostics. pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); return(pc); }
private static RTCPeerConnection Createpc(WebSocketContext context) { var pc = new RTCPeerConnection(null); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => { bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate); Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}."); }; pc.onicecandidateerror += (candidate, error) => logger.LogWarning($"Error adding remote ICE candidate. {error} {candidate}"); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); //pc.OnReceiveReport += (type, rtcp) => logger.LogDebug($"RTCP {type} report received."); pc.OnRtcpBye += (reason) => logger.LogDebug($"RTCP BYE receive, reason: {(string.IsNullOrWhiteSpace(reason) ? "<none>" : reason)}."); //pc.onicecandidate += (candidate) => //{ // if (pc.signalingState == RTCSignalingState.have_local_offer || // pc.signalingState == RTCSignalingState.have_remote_offer) // { // context.WebSocket.Send($"candidate:{candidate}"); // } //}; pc.onconnectionstatechange += (state) => { logger.LogDebug($"Peer connection state changed to {state}."); if (state == RTCPeerConnectionState.connected) { logger.LogDebug("Creating RTP session for ffplay."); var rtpSession = CreateRtpSession(pc.AudioLocalTrack?.Capabilities, pc.VideoLocalTrack?.Capabilities); Core.Initialize(); _libvlc = new LibVLC(); _mediaPlayer = new MediaPlayer(_libvlc); //_mediaPlayer.Volume = 10; var sdpFullPath = Path.Combine(Directory.GetParent(typeof(Program).Assembly.Location).FullName, FFPLAY_DEFAULT_SDP_PATH); using var media = new Media(_libvlc, new Uri(sdpFullPath)); _mediaPlayer.Play(media); //if (_activePeerConnection != null) { // request key frames var localVideoSsrc = _activePeerConnection.VideoLocalTrack.Ssrc; var remoteVideoSsrc = _activePeerConnection.VideoRemoteTrack.Ssrc; RTCPFeedback pli = new RTCPFeedback(localVideoSsrc, remoteVideoSsrc, PSFBFeedbackTypesEnum.PLI); _activePeerConnection.SendRtcpFeedback(SDPMediaTypesEnum.video, pli); } pc.OnRtpPacketReceived += (rep, media, rtpPkt) => { if (media == SDPMediaTypesEnum.audio && rtpSession.AudioDestinationEndPoint != null) { //logger.LogDebug($"Forwarding {media} RTP packet to ffplay timestamp {rtpPkt.Header.Timestamp}."); rtpSession.SendRtpRaw(media, rtpPkt.Payload, rtpPkt.Header.Timestamp, rtpPkt.Header.MarkerBit, rtpPkt.Header.PayloadType); } else if (media == SDPMediaTypesEnum.video && rtpSession.VideoDestinationEndPoint != null) { //logger.LogDebug($"Forwarding {media} RTP packet to ffplay timestamp {rtpPkt.Header.Timestamp}."); rtpSession.SendRtpRaw(media, rtpPkt.Payload, rtpPkt.Header.Timestamp, rtpPkt.Header.MarkerBit, rtpPkt.Header.PayloadType); } }; pc.OnRtpClosed += (reason) => rtpSession.Close(reason); } }; _activePeerConnection = pc; return(pc); }
static async Task Main() { Console.WriteLine("SIPSorcery Asterisk + ICE Demo"); AddConsoleLogger(); CancellationTokenSource exitCts = new CancellationTokenSource(); var sipTransport = new SIPTransport(); EnableTraceLogs(sipTransport); var userAgent = new SIPUserAgent(sipTransport, null); userAgent.ClientCallFailed += (uac, error, sipResponse) => Console.WriteLine($"Call failed {error}."); userAgent.OnCallHungup += async(dialog) => { // Give time for the BYE response before exiting. await Task.Delay(1000); exitCts.Cancel(); }; var audioExtras = new AudioExtrasSource(); audioExtras.SetSource(AudioSourcesEnum.PinkNoise); var testPattern = new VideoTestPatternSource(new VpxVideoEncoder()); var pc = new RTCPeerConnection(null); pc.OnAudioFormatsNegotiated += (formats) => audioExtras.SetAudioSourceFormat(formats.First()); pc.OnVideoFormatsNegotiated += (formats) => testPattern.SetVideoSourceFormat(formats.First()); var audioTrack = new MediaStreamTrack(audioExtras.GetAudioSourceFormats(), MediaStreamStatusEnum.SendOnly); pc.addTrack(audioTrack); audioExtras.OnAudioSourceEncodedSample += pc.SendAudio; var videoTrack = new MediaStreamTrack(testPattern.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly); pc.addTrack(videoTrack); testPattern.OnVideoSourceEncodedSample += pc.SendVideo; // Diagnostics. pc.OnReceiveReport += (re, media, rr) => Console.WriteLine($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); pc.OnSendReport += (media, sr) => Console.WriteLine($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => Console.WriteLine($"ICE connection state change to {state}."); // ICE connection state handler. pc.onconnectionstatechange += (state) => { Console.WriteLine($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.connected) { audioExtras.StartAudio(); testPattern.StartVideo(); } else if (state == RTCPeerConnectionState.failed) { if (userAgent.IsCallActive) { Console.WriteLine("ICE connection failed, hanging up active call."); userAgent.Hangup(); } } }; // Place the call and wait for the result. var callTask = userAgent.Call(DESTINATION, USERNAME, PASSWORD, pc); Console.CancelKeyPress += delegate(object sender, ConsoleCancelEventArgs e) { e.Cancel = true; if (userAgent != null) { if (userAgent.IsCalling || userAgent.IsRinging) { Console.WriteLine("Cancelling in progress call."); userAgent.Cancel(); } else if (userAgent.IsCallActive) { Console.WriteLine("Hanging up established call."); userAgent.Hangup(); } } ; exitCts.Cancel(); }; Console.WriteLine("press ctrl-c to exit..."); bool callResult = await callTask; if (callResult) { Console.WriteLine($"Call to {DESTINATION} succeeded."); exitCts.Token.WaitHandle.WaitOne(); } else { Console.WriteLine($"Call to {DESTINATION} failed."); } Console.WriteLine("Exiting..."); if (userAgent?.IsHangingUp == true) { Console.WriteLine("Waiting 1s for the call hangup or cancel to complete..."); await Task.Delay(1000); } // Clean up. sipTransport.Shutdown(); }
private static Task <RTCPeerConnection> CreatePeerConnection() { var peerConnection = new RTCPeerConnection(null); var videoEP = new SIPSorceryMedia.Encoders.VideoEncoderEndPoint(); //var videoEP = new SIPSorceryMedia.Windows.WindowsEncoderEndPoint(); //var videoEP = new FFmpegVideoEndPoint(); videoEP.RestrictCodecs(new List <VideoCodecsEnum> { VideoCodecsEnum.VP8 }); videoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { _form.BeginInvoke(new Action(() => { unsafe { fixed(byte *s = bmp) { Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s); _picBox.Image = bmpImage; } } })); }; // Sink (speaker) only audio end point. WindowsAudioEndPoint windowsAudioEP = new WindowsAudioEndPoint(new AudioEncoder(), -1, -1, true, false); MediaStreamTrack audioTrack = new MediaStreamTrack(windowsAudioEP.GetAudioSinkFormats(), MediaStreamStatusEnum.RecvOnly); peerConnection.addTrack(audioTrack); MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly); peerConnection.addTrack(videoTrack); peerConnection.OnVideoFrameReceived += videoEP.GotVideoFrame; peerConnection.OnVideoFormatsNegotiated += (sdpFormat) => videoEP.SetVideoSinkFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec)); peerConnection.OnAudioFormatsNegotiated += (sdpFormat) => windowsAudioEP.SetAudioSinkFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec)); peerConnection.OnTimeout += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}."); peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}."); peerConnection.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection connected changed to {state}."); if (state == RTCPeerConnectionState.connected) { await windowsAudioEP.StartAudio(); } else if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed) { await windowsAudioEP.CloseAudio(); } }; peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => { bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate); Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}."); }; peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) => { //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}."); if (media == SDPMediaTypesEnum.audio) { windowsAudioEP.GotAudioRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload); } }; return(Task.FromResult(peerConnection)); }