コード例 #1
0
ファイル: Program.cs プロジェクト: xljiulang/sipsorcery
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            var testPatternSource    = new VideoTestPatternSource();
            var videoEncoderEndPoint = new VideoEncoderEndPoint();
            var audioSource          = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.Music
            });

            MediaStreamTrack videoTrack = new MediaStreamTrack(videoEncoderEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            testPatternSource.OnVideoSourceRawSample        += videoEncoderEndPoint.ExternalVideoSourceRawSample;
            videoEncoderEndPoint.OnVideoSourceEncodedSample += pc.SendVideo;
            audioSource.OnAudioSourceEncodedSample          += pc.SendAudio;
            pc.OnVideoFormatsNegotiated += (formats) =>
                                           videoEncoderEndPoint.SetVideoSourceFormat(formats.First());
            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await audioSource.StartAudio();

                    await testPatternSource.StartVideo();
                }
                else if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await testPatternSource.CloseVideo();

                    await audioSource.CloseAudio();
                }
            };

            // Diagnostics.
            pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
コード例 #2
0
        private static RTCPeerConnection WebSocketOpened(WebSocketContext context)
        {
            RTCConfiguration pcConfiguration = new RTCConfiguration
            {
                certificates = new List <RTCCertificate>
                {
                    new RTCCertificate
                    {
                        X_CertificatePath = DTLS_CERTIFICATE_PATH,
                        X_KeyPath         = DTLS_KEY_PATH,
                        X_Fingerprint     = DTLS_CERTIFICATE_FINGERPRINT
                    }
                }
            };

            var peerConnection = new RTCPeerConnection(pcConfiguration);

            // Add local recvonly tracks. This ensures that the SDP answer includes only
            // the codecs we support.
            MediaStreamTrack audioTrack = new MediaStreamTrack("0", SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack("1", SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
            }, MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(videoTrack);

            return(peerConnection);
        }
コード例 #3
0
ファイル: Program.cs プロジェクト: zanzo420/sipsorcery
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            var pc = new RTCPeerConnection(null);

            MediaStreamTrack audioTrack = new MediaStreamTrack(new List <AudioFormat> {
                new AudioFormat(SDPWellKnownMediaFormatsEnum.PCMU)
            });
            MediaStreamTrack videoTrack = new MediaStreamTrack(new List <VideoFormat> {
                new VideoFormat(VideoCodecsEnum.VP8, 100)
            });

            pc.addTrack(audioTrack);
            pc.addTrack(videoTrack);

            pc.onconnectionstatechange += (state) =>
            {
                Log.LogDebug($"Peer connection state change to {state}.");
                RequestSIPAgentKeyFrame(_rtpSession);
            };
            pc.OnRtpPacketReceived  += ForwardAudioToSIP;
            pc.OnVideoFrameReceived += ForwardVideoFrameToSIP; // ForwardVideoFrameToPeerConnection;
            _peerConnection          = pc;

            return(Task.FromResult(pc));
        }
コード例 #4
0
ファイル: Program.cs プロジェクト: soitgoes/sipsorcery
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            var mediaFileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(MP4_PATH, false, new AudioEncoder());

            mediaFileSource.Initialise();
            mediaFileSource.RestrictFormats(x => x.Codec == VideoCodecsEnum.VP8);
            mediaFileSource.RestrictFormats(x => x.Codec == AudioCodecsEnum.PCMU);
            mediaFileSource.OnEndOfFile += () => pc.Close("source eof");

            MediaStreamTrack videoTrack = new MediaStreamTrack(mediaFileSource.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(mediaFileSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            mediaFileSource.OnVideoSourceEncodedSample += pc.SendVideo;
            mediaFileSource.OnAudioSourceEncodedSample += pc.SendAudio;
            pc.OnVideoFormatsNegotiated += (videoFormats) => mediaFileSource.SetVideoSourceFormat(videoFormats.First());
            pc.OnAudioFormatsNegotiated += (audioFormats) => mediaFileSource.SetAudioSourceFormat(audioFormats.First());

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await mediaFileSource.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await mediaFileSource.StartVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
コード例 #5
0
ファイル: Program.cs プロジェクト: lordrak007/sipsorcery
        private static async Task WebSocketMessageReceived(WebSocketContext context, RTCPeerConnection peerConnection, string message)
        {
            try
            {
                if (peerConnection.localDescription == null)
                {
                    logger.LogDebug("Offer SDP: " + message);

                    // Add local media tracks depending on what was offered. Also add local tracks with the same media ID as
                    // the remote tracks so that the media announcement in the SDP answer are in the same order.
                    SDP remoteSdp = SDP.ParseSDPDescription(message);

                    var remoteAudioAnn = remoteSdp.Media.Where(x => x.Media == SDPMediaTypesEnum.audio).FirstOrDefault();
                    var remoteVideoAnn = remoteSdp.Media.Where(x => x.Media == SDPMediaTypesEnum.video).FirstOrDefault();

                    if (remoteAudioAnn != null)
                    {
                        MediaStreamTrack audioTrack = new MediaStreamTrack(remoteAudioAnn.MediaID, SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                            new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
                        }, MediaStreamStatusEnum.RecvOnly);
                        peerConnection.addTrack(audioTrack);
                    }

                    if (remoteVideoAnn != null)
                    {
                        MediaStreamTrack videoTrack = new MediaStreamTrack(remoteVideoAnn.MediaID, SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                            new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
                        }, MediaStreamStatusEnum.RecvOnly);
                        peerConnection.addTrack(videoTrack);
                    }

                    // After local media tracks have been added the remote description can be set.
                    await peerConnection.setRemoteDescription(new RTCSessionDescriptionInit { sdp = message, type = RTCSdpType.offer });

                    var answer = await peerConnection.createAnswer(null);

                    await peerConnection.setLocalDescription(answer);

                    context.WebSocket.Send(answer.sdp);
                }
                else if (peerConnection.remoteDescription == null)
                {
                    logger.LogDebug("Answer SDP: " + message);
                    await peerConnection.setRemoteDescription(new RTCSessionDescriptionInit { sdp = message, type = RTCSdpType.answer });
                }
                else
                {
                    logger.LogDebug("ICE Candidate: " + message);
                    await peerConnection.addIceCandidate(new RTCIceCandidateInit { candidate = message });
                }
            }
            catch (Exception excp)
            {
                logger.LogError("Exception WebSocketMessageReceived. " + excp.Message);
            }
        }
コード例 #6
0
        private static async Task <RTCPeerConnection> SendSDPOffer(WebSocketContext context)
        {
            logger.LogDebug($"Web socket client connection from {context.UserEndPoint}.");

            var peerConnection = new RTCPeerConnection(null);

            MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.SendOnly);

            peerConnection.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
            }, MediaStreamStatusEnum.SendOnly);

            peerConnection.addTrack(videoTrack);

            peerConnection.OnReceiveReport            += RtpSession_OnReceiveReport;
            peerConnection.OnSendReport               += RtpSession_OnSendReport;
            peerConnection.OnTimeout                  += (mediaType) => peerConnection.Close("remote timeout");
            peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}.");
            peerConnection.onconnectionstatechange    += (state) =>
            {
                logger.LogDebug($"Peer connection state changed to {state}.");

                if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed)
                {
                    OnVideoSampleReady             -= peerConnection.SendVideo;
                    OnAudioSampleReady             -= peerConnection.SendAudio;
                    peerConnection.OnReceiveReport -= RtpSession_OnReceiveReport;
                    peerConnection.OnSendReport    -= RtpSession_OnSendReport;
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    if (!_isSampling)
                    {
                        _isSampling         = true;
                        OnVideoSampleReady += peerConnection.SendVideo;
                        OnAudioSampleReady += peerConnection.SendAudio;
                        _ = Task.Run(StartMedia);
                    }
                }
            };

            var offerInit = peerConnection.createOffer(null);
            await peerConnection.setLocalDescription(offerInit);

            logger.LogDebug($"Sending SDP offer to client {context.UserEndPoint}.");

            context.WebSocket.Send(offerInit.sdp);

            return(peerConnection);
        }
コード例 #7
0
ファイル: Program.cs プロジェクト: simis00873/sipsorcery
        private static Task <RTCPeerConnection> CreatePeerConnection(X509Certificate2 cert)
        {
            //RTCConfiguration config = new RTCConfiguration
            //{
            //    iceServers = new List<RTCIceServer> { new RTCIceServer { urls = STUN_URL } },
            //    certificates = new List<RTCCertificate> { new RTCCertificate { Certificate = cert } }
            //};
            //var pc = new RTCPeerConnection(config);
            var pc = new RTCPeerConnection(null);

            var testPatternSource = new VideoTestPatternSource(new SIPSorceryMedia.Encoders.VideoEncoder());

            testPatternSource.SetFrameRate(60);
            //testPatternSource.SetMaxFrameRate(true);
            //var videoEndPoint = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint();
            //videoEndPoint.RestrictFormats(format => format.Codec == VideoCodecsEnum.H264);
            //var videoEndPoint = new SIPSorceryMedia.Windows.WindowsEncoderEndPoint();
            //var videoEndPoint = new SIPSorceryMedia.Encoders.VideoEncoderEndPoint();

            MediaStreamTrack track = new MediaStreamTrack(testPatternSource.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly);

            pc.addTrack(track);

            //testPatternSource.OnVideoSourceRawSample += videoEndPoint.ExternalVideoSourceRawSample;
            testPatternSource.OnVideoSourceRawSample     += MesasureTestPatternSourceFrameRate;
            testPatternSource.OnVideoSourceEncodedSample += pc.SendVideo;
            pc.OnVideoFormatsNegotiated += (formats) => testPatternSource.SetVideoSourceFormat(formats.First());

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await testPatternSource.CloseVideo();

                    await testPatternSource.CloseVideo();

                    testPatternSource.Dispose();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await testPatternSource.StartVideo();

                    await testPatternSource.StartVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
コード例 #8
0
        private static RTCPeerConnection CreatePeerConnection()
        {
            var pc = new RTCPeerConnection(new RTCConfiguration {
                X_UseRtpFeedbackProfile = true
            });

            MediaStreamTrack audioTrack = new MediaStreamTrack(SDPWellKnownMediaFormatsEnum.PCMU);

            pc.addTrack(audioTrack);

            pc.onicecandidateerror        += (candidate, error) => logger.LogWarning($"Error adding remote ICE candidate. {error} {candidate}");
            pc.OnTimeout                  += (mediaType) => logger.LogWarning($"Timeout for {mediaType}.");
            pc.oniceconnectionstatechange += (state) => logger.LogInformation($"ICE connection state changed to {state}.");
            pc.onsignalingstatechange     += () => logger.LogInformation($"Signaling state changed to {pc.signalingState}.");
            pc.OnReceiveReport            += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport               += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.OnRtcpBye                  += (reason) => logger.LogDebug($"RTCP BYE receive, reason: {(string.IsNullOrWhiteSpace(reason) ? "<none>" : reason)}.");

            pc.onsignalingstatechange += () =>
            {
                if (pc.signalingState == RTCSignalingState.have_remote_offer)
                {
                    logger.LogTrace("Remote SDP:");
                    logger.LogTrace(pc.remoteDescription.sdp.ToString());
                }
                else if (pc.signalingState == RTCSignalingState.have_local_offer)
                {
                    logger.LogTrace("Local SDP:");
                    logger.LogTrace(pc.localDescription.sdp.ToString());
                }
            };

            return(pc);
        }
コード例 #9
0
        public void JsonRoundtripUnitTest()
        {
            RTCPeerConnection pcSrc = new RTCPeerConnection(null);
            var videoTrackSrc       = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPAudioVideoMediaFormat> {
                new SDPAudioVideoMediaFormat(SDPMediaTypesEnum.video, 96, "VP8", 90000)
            });

            pcSrc.addTrack(videoTrackSrc);

            var offer = pcSrc.createOffer(new RTCOfferOptions());

            Assert.NotNull(offer.toJSON());

            logger.LogDebug($"offer: {offer.toJSON()}");

            var parseResult = RTCSessionDescriptionInit.TryParse(offer.toJSON(), out var init);

            Assert.True(parseResult);

            Assert.Equal(RTCSdpType.offer, init.type);
            Assert.NotNull(init.sdp);

            SDP sdp = SDP.ParseSDPDescription(init.sdp);

            Assert.Equal(0, sdp.Version);
        }
コード例 #10
0
        private static Task <RTCPeerConnection> CreatePeerConnection(IVideoSource videoSource, IVideoSink videoSink)
        {
            var pc = new RTCPeerConnection(null);

            MediaStreamTrack videoTrack = new MediaStreamTrack(videoSink.GetVideoSinkFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);

            videoSource.OnVideoSourceEncodedSample += pc.SendVideo;
            pc.OnVideoFrameReceived     += videoSink.GotVideoFrame;
            pc.OnVideoFormatsNegotiated += (formats) =>
            {
                videoSink.SetVideoSinkFormat(formats.First());
                videoSource.SetVideoSourceFormat(formats.First());
            };

            pc.OnTimeout += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}.");
            pc.onconnectionstatechange    += async(state) =>
            {
                logger.LogDebug($"Peer connection connected changed to {state}.");

                if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed)
                {
                    await videoSource.CloseVideo().ConfigureAwait(false);
                }
            };

            return(Task.FromResult(pc));
        }
コード例 #11
0
        public void GenerateLocalOfferWithAudioTrackUnitTest()
        {
            logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name);
            logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name);

            RTCPeerConnection pc = new RTCPeerConnection(null);

            pc.IceSession.StartGathering();
            var audioTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            });

            pc.addTrack(audioTrack);
            var offer = pc.createOffer(new RTCOfferOptions());

            SDP offerSDP = SDP.ParseSDPDescription(offer.sdp);

            Assert.NotNull(offer);
            Assert.NotNull(offer.sdp);
            Assert.Equal(RTCSdpType.offer, offer.type);
            Assert.Single(offerSDP.Media);
            Assert.Contains(offerSDP.Media, x => x.Media == SDPMediaTypesEnum.audio);

            logger.LogDebug(offer.sdp);
        }
コード例 #12
0
ファイル: Program.cs プロジェクト: sdwflmw/sipsorcery
        private static async Task <RTCPeerConnection> SendSDPOffer(WebSocketContext context)
        {
            Log.LogDebug($"Web socket client connection from {context.UserEndPoint}.");

            _peerConnection = new RTCPeerConnection(null);

            _peerConnection.OnReceiveReport += RtpSession_OnReceiveReport;
            _peerConnection.OnSendReport    += RtpSession_OnSendReport;

            Log.LogDebug($"Sending SDP offer to client {context.UserEndPoint}.");

            _peerConnection.onconnectionstatechange += (state) =>
            {
                Log.LogDebug($"WebRTC peer connection state changed to {state}.");

                if (state == RTCPeerConnectionState.closed)
                {
                    _peerConnection.OnReceiveReport -= RtpSession_OnReceiveReport;
                    _peerConnection.OnSendReport    -= RtpSession_OnSendReport;
                }
            };

            MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            });

            _peerConnection.addTrack(audioTrack);

            var offerInit = _peerConnection.createOffer(null);
            await _peerConnection.setLocalDescription(offerInit);

            context.WebSocket.Send(offerInit.sdp);

            return(_peerConnection);
        }
コード例 #13
0
        public async void CheckPeerConnectionEstablishment()
        {
            logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name);
            logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name);

            var aliceConnected = new TaskCompletionSource <bool>(TaskCreationOptions.RunContinuationsAsynchronously);
            var bobConnected   = new TaskCompletionSource <bool>(TaskCreationOptions.RunContinuationsAsynchronously);

            var alice = new RTCPeerConnection();

            alice.onconnectionstatechange += (state) =>
            {
                if (state == RTCPeerConnectionState.connected)
                {
                    logger.LogDebug("Alice connected.");
                    aliceConnected.SetResult(true);
                }
            };
            alice.addTrack(new MediaStreamTrack(SDPWellKnownMediaFormatsEnum.PCMU));
            var aliceOffer = alice.createOffer();
            await alice.setLocalDescription(aliceOffer);

            logger.LogDebug($"alice offer: {aliceOffer.sdp}");

            var bob = new RTCPeerConnection();

            bob.onconnectionstatechange += (state) =>
            {
                if (state == RTCPeerConnectionState.connected)
                {
                    logger.LogDebug("Bob connected.");
                    bobConnected.SetResult(true);
                }
            };
            bob.addTrack(new MediaStreamTrack(SDPWellKnownMediaFormatsEnum.PCMU));

            var setOfferResult = bob.setRemoteDescription(aliceOffer);

            Assert.Equal(SetDescriptionResultEnum.OK, setOfferResult);

            var bobAnswer = bob.createAnswer();
            await bob.setLocalDescription(bobAnswer);

            var setAnswerResult = alice.setRemoteDescription(bobAnswer);

            Assert.Equal(SetDescriptionResultEnum.OK, setAnswerResult);

            logger.LogDebug($"answer: {bobAnswer.sdp}");

            await Task.WhenAny(Task.WhenAll(aliceConnected.Task, bobConnected.Task), Task.Delay(2000));

            Assert.True(aliceConnected.Task.IsCompleted);
            Assert.True(aliceConnected.Task.Result);
            Assert.True(bobConnected.Task.IsCompleted);
            Assert.True(bobConnected.Task.Result);

            bob.close();
            alice.close();
        }
コード例 #14
0
        private static async Task WebSocketMessageReceived(WebSocketContext context, RTCPeerConnection pc, string message)
        {
            try
            {
                if (pc.localDescription == null)
                {
                    //logger.LogDebug("Offer SDP: " + message);
                    logger.LogDebug("Offer SDP received.");

                    // Add local media tracks depending on what was offered. Also add local tracks with the same media ID as
                    // the remote tracks so that the media announcement in the SDP answer are in the same order.
                    SDP remoteSdp = SDP.ParseSDPDescription(message);

                    foreach (var ann in remoteSdp.Media)
                    {
                        var capbilities        = FilterCodecs(ann.Media, ann.MediaFormats);
                        MediaStreamTrack track = new MediaStreamTrack(ann.Media, false, capbilities, MediaStreamStatusEnum.RecvOnly);
                        pc.addTrack(track);
                    }

                    pc.setRemoteDescription(new RTCSessionDescriptionInit {
                        sdp = message, type = RTCSdpType.offer
                    });

                    var answer = pc.createAnswer(null);
                    await pc.setLocalDescription(answer);

                    Console.WriteLine(answer.sdp);

                    context.WebSocket.Send(answer.sdp);
                }
                else if (pc.remoteDescription == null)
                {
                    logger.LogDebug("Answer SDP: " + message);
                    pc.setRemoteDescription(new RTCSessionDescriptionInit {
                        sdp = message, type = RTCSdpType.answer
                    });
                }
                else
                {
                    logger.LogDebug("ICE Candidate: " + message);

                    if (string.IsNullOrWhiteSpace(message) || message.Trim().ToLower() == SDP.END_ICE_CANDIDATES_ATTRIBUTE)
                    {
                        logger.LogDebug("End of candidates message received.");
                    }
                    else
                    {
                        var candInit = Newtonsoft.Json.JsonConvert.DeserializeObject <RTCIceCandidateInit>(message);
                        pc.addIceCandidate(candInit);
                    }
                }
            }
            catch (Exception excp)
            {
                logger.LogError("Exception WebSocketMessageReceived. " + excp.Message);
            }
        }
コード例 #15
0
ファイル: Program.cs プロジェクト: yuyixiaoxiang/sipsorcery
        static void Main()
        {
            Console.WriteLine("SIPSorcery sip.js Demo");

            Log = AddConsoleLogger();

            var sipTransport = new SIPTransport();

            EnableTraceLogs(sipTransport);

            var sipChannel = new SIPWebSocketChannel(IPAddress.Loopback, 8081);

            sipTransport.AddSIPChannel(sipChannel);

            var userAgent = new SIPUserAgent(sipTransport, null, true);

            userAgent.OnIncomingCall += async(ua, req) =>
            {
                Log.LogDebug($"Auto-answering incoming call from {req.Header.From}.");
                var uas = userAgent.AcceptCall(req);

                var peerConnection = new RTCPeerConnection(null);

                peerConnection.onconnectionstatechange += (state) =>
                {
                    Log.LogDebug($"Peer connection state change to {state}.");

                    if (state == RTCPeerConnectionState.failed)
                    {
                        peerConnection.Close("ice disconnection");
                    }
                    else if (state == RTCPeerConnectionState.connected)
                    {
                        peerConnection.OnRtpPacketReceived += OnRtpPacketReceived;
                    }
                    else if (state == RTCPeerConnectionState.closed)
                    {
                        peerConnection.OnRtpPacketReceived -= OnRtpPacketReceived;
                    }
                };

                MediaStreamTrack audioTrack = new MediaStreamTrack(new List <AudioFormat> {
                    new AudioFormat(SDPWellKnownMediaFormatsEnum.PCMU)
                }, MediaStreamStatusEnum.SendRecv);
                peerConnection.addTrack(audioTrack);
                //MediaStreamTrack videoTrack = new MediaStreamTrack("1", SDPMediaTypesEnum.video, false, new List<SDPMediaFormat> { new SDPMediaFormat(SDPMediaFormatsEnum.VP8) }, MediaStreamStatusEnum.Inactive);
                //peerConnection.addTrack(videoTrack);

                var answerResult = await userAgent.Answer(uas, peerConnection);
            };

            Console.Write("press any key to exit...");
            Console.Read();

            sipTransport.Shutdown();
        }
コード例 #16
0
ファイル: Program.cs プロジェクト: yuyixiaoxiang/sipsorcery
        private static async Task <RTCPeerConnection> SendSDPOffer(WebSocketContext context)
        {
            logger.LogDebug($"Web socket client connection from {context.UserEndPoint}.");

            var peerConnection = new RTCPeerConnection(null);

            // Sink (speaker) only audio end point.
            WindowsAudioEndPoint windowsAudioEP = new WindowsAudioEndPoint(new AudioEncoder(), -1, -1, true, false);

            MediaStreamTrack audioTrack = new MediaStreamTrack(windowsAudioEP.GetAudioSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(audioTrack);

            peerConnection.OnAudioFormatsNegotiated += (audioFormats) =>
                                                       windowsAudioEP.SetAudioSinkFormat(audioFormats.First());
            peerConnection.OnReceiveReport            += RtpSession_OnReceiveReport;
            peerConnection.OnSendReport               += RtpSession_OnSendReport;
            peerConnection.OnTimeout                  += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}.");
            peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}.");
            peerConnection.onconnectionstatechange    += async(state) =>
            {
                logger.LogDebug($"Peer connection connected changed to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await windowsAudioEP.StartAudio();
                }
                else if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed)
                {
                    peerConnection.OnReceiveReport -= RtpSession_OnReceiveReport;
                    peerConnection.OnSendReport    -= RtpSession_OnSendReport;

                    await windowsAudioEP.CloseAudio();
                }
            };

            peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) =>
            {
                //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}.");
                if (media == SDPMediaTypesEnum.audio)
                {
                    windowsAudioEP.GotAudioRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload);
                }
            };

            var offerSdp = peerConnection.createOffer(null);
            await peerConnection.setLocalDescription(offerSdp);

            logger.LogDebug($"Sending SDP offer to client {context.UserEndPoint}.");
            logger.LogDebug(offerSdp.sdp);

            context.WebSocket.Send(offerSdp.sdp);

            return(peerConnection);
        }
コード例 #17
0
ファイル: Program.cs プロジェクト: sdwflmw/sipsorcery
        private static RTCPeerConnection CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            var testPatternSource = new VideoTestPatternSource();
            WindowsVideoEndPoint windowsVideoEndPoint = new WindowsVideoEndPoint(true);

            MediaStreamTrack track = new MediaStreamTrack(windowsVideoEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly);

            pc.addTrack(track);

            testPatternSource.OnVideoSourceRawSample        += windowsVideoEndPoint.ExternalVideoSourceRawSample;
            windowsVideoEndPoint.OnVideoSourceEncodedSample += pc.SendVideo;
            pc.OnVideoFormatsNegotiated += (sdpFormat) =>
                                           windowsVideoEndPoint.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await testPatternSource.CloseVideo();

                    await windowsVideoEndPoint.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await windowsVideoEndPoint.StartVideo();

                    await testPatternSource.StartVideo();
                }
            };

            // Diagnostics.
            pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(pc);
        }
コード例 #18
0
        private static async Task <RTCPeerConnection> SendSDPOffer(WebSocketContext context)
        {
            logger.LogDebug($"Web socket client connection from {context.UserEndPoint}.");

            var pc = new RTCPeerConnection(null);

            MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
            }, MediaStreamStatusEnum.SendOnly);

            pc.addTrack(videoTrack);

            pc.OnReceiveReport            += RtpSession_OnReceiveReport;
            pc.OnSendReport               += RtpSession_OnSendReport;
            pc.OnTimeout                  += (mediaType) => pc.Close("remote timeout");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            pc.onconnectionstatechange += (state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed)
                {
                    pc.Close("remote disconnection");
                }

                if (state == RTCPeerConnectionState.closed)
                {
                    OnTestPatternSampleReady -= pc.SendMedia;
                    pc.OnReceiveReport       -= RtpSession_OnReceiveReport;
                    pc.OnSendReport          -= RtpSession_OnSendReport;
                    _sendTestPatternTimer?.Dispose();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    OnTestPatternSampleReady += pc.SendMedia;

                    if (_sendTestPatternTimer == null)
                    {
                        _sendTestPatternTimer = new Timer(SendTestPattern, null, 0, TEST_PATTERN_SPACING_MILLISECONDS);
                    }
                }
            };

            var offerSdp = pc.createOffer(null);
            await pc.setLocalDescription(offerSdp);

            logger.LogDebug($"Sending SDP offer to client {context.UserEndPoint}.");
            logger.LogDebug(offerSdp.sdp);

            context.WebSocket.Send(offerSdp.sdp);

            return(pc);
        }
コード例 #19
0
ファイル: Program.cs プロジェクト: sdwflmw/sipsorcery
        private static async Task <RTCPeerConnection> SendSDPOffer(WebSocketContext context)
        {
            logger.LogDebug($"Web socket client connection from {context.UserEndPoint}.");

            var pc = new RTCPeerConnection(null);

            AudioExtrasSource audioSource = new AudioExtrasSource(new AudioEncoder());

            audioSource.OnAudioSourceEncodedSample += pc.SendAudio;

            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendOnly);

            pc.addTrack(audioTrack);

            pc.OnAudioFormatsNegotiated += (sdpFormat) =>
                                           audioSource.SetAudioSourceFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.OnReceiveReport            += RtpSession_OnReceiveReport;
            pc.OnSendReport               += RtpSession_OnSendReport;
            pc.OnTimeout                  += (mediaType) => pc.Close("remote timeout");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            pc.onconnectionstatechange += (state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    audioSource.SetSource(new AudioSourceOptions {
                        AudioSource = AudioSourcesEnum.SineWave
                    });
                }
                else if (state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed)
                {
                    pc.Close("remote disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    audioSource?.CloseAudio();
                    pc.OnReceiveReport -= RtpSession_OnReceiveReport;
                    pc.OnSendReport    -= RtpSession_OnSendReport;
                }
            };

            var offerSdp = pc.createOffer(null);
            await pc.setLocalDescription(offerSdp);

            logger.LogDebug($"Sending SDP offer to client {context.UserEndPoint}.");
            logger.LogDebug(offerSdp.sdp);

            context.WebSocket.Send(offerSdp.sdp);

            return(pc);
        }
コード例 #20
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            AudioExtrasSource audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.SineWave
            });

            audioSource.OnAudioSourceEncodedSample += pc.SendAudio;

            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendOnly);

            pc.addTrack(audioTrack);

            pc.OnAudioFormatsNegotiated += (sdpFormat) =>
                                           audioSource.SetAudioSourceFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec));

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await audioSource.StartAudio();
                }
                else if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await audioSource.CloseAudio();
                }
            };

            // Diagnostics.
            pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
コード例 #21
0
        private RTCPeerConnection CreatePeerConnection()
        {
            RTCConfiguration  config     = new RTCConfiguration();
            RTCPeerConnection pc         = new RTCPeerConnection(config);
            MediaStreamTrack  videoTrack = new MediaStreamTrack(new List <VideoCodecsEnum>()
            {
                VideoCodecsEnum.H264
            });

            pc.addTrack(videoTrack);
            pc.OnVideoFormatsNegotiated += Pc_OnVideoFormatsNegotiated;
            ConnectedPeer = pc;
            return(pc);
        }
コード例 #22
0
    private Task <RTCPeerConnection> CreatePeerConnection()
    {
        var pc = new RTCPeerConnection();

        // Set up sources and hook up send events to peer connection.
        //AudioExtrasSource audioSrc = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.None });
        //audioSrc.OnAudioSourceEncodedSample += pc.SendAudio;
        //var testPatternSource = new VideoTestPatternSource();
        //testPatternSource.SetMaxFrameRate(true);
        //testPatternSource.OnVideoSourceRawSample += VideoEncoderEndPoint.ExternalVideoSourceRawSample;
        //VideoEncoderEndPoint.OnVideoSourceEncodedSample += pc.SendVideo;

        // Add tracks.
        //var audioTrack = new SIPSorcery.Net.MediaStreamTrack(audioSrc.GetAudioSourceFormats(), SIPSorcery.Net.MediaStreamStatusEnum.SendOnly);
        //pc.addTrack(audioTrack);
        var videoTrack = new MediaStreamTrack(VideoEncoderEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.RecvOnly);

        pc.addTrack(videoTrack);

        // Handlers to set the codecs to use on the sources once the SDP negotiation is complete.
        pc.OnVideoFormatsNegotiated += (sdpFormat) => VideoEncoderEndPoint.SetVideoSourceFormat(sdpFormat.First());
        //pc.OnAudioFormatsNegotiated += (sdpFormat) => audioSrc.SetAudioSourceFormat(sdpFormat.First());
        pc.OnVideoFrameReceived += VideoEncoderEndPoint.GotVideoFrame;

        pc.OnTimeout += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}.");
        pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}.");
        pc.onconnectionstatechange    += (state) =>
        {
            logger.LogDebug($"Peer connection connected changed to {state}.");
            if (state == RTCPeerConnectionState.connected)
            {
                //await audioSrc.StartAudio();
                //await testPatternSource.StartVideo();
            }
            else if (state == RTCPeerConnectionState.closed || state == SIPSorcery.Net.RTCPeerConnectionState.failed)
            {
                //await audioSrc.CloseAudio();
                //await testPatternSource.CloseVideo();
            }
        };

        //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) =>
        //{
        //    bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == SIPSorcery.Net.STUNAttributeTypesEnum.UseCandidate);
        //    logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}.");
        //};

        return(Task.FromResult(pc));
    }
コード例 #23
0
        public void SendVideoRtcpFeedbackReportUnitTest()
        {
            logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name);
            logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name);

            RTCConfiguration pcConfiguration = new RTCConfiguration
            {
                certificates = new List <RTCCertificate>
                {
                    new RTCCertificate
                    {
                        Certificate = DtlsUtils.CreateSelfSignedCert()
                    }
                },
                X_UseRtpFeedbackProfile = true
            };

            RTCPeerConnection pcSrc = new RTCPeerConnection(pcConfiguration);
            var videoTrackSrc       = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
            });

            pcSrc.addTrack(videoTrackSrc);
            var offer = pcSrc.createOffer(new RTCOfferOptions());

            logger.LogDebug($"offer: {offer.sdp}");

            RTCPeerConnection pcDst = new RTCPeerConnection(pcConfiguration);
            var videoTrackDst       = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
            });

            pcDst.addTrack(videoTrackDst);

            var setOfferResult = pcDst.setRemoteDescription(offer);

            Assert.Equal(SetDescriptionResultEnum.OK, setOfferResult);

            var answer          = pcDst.createAnswer(null);
            var setAnswerResult = pcSrc.setRemoteDescription(answer);

            Assert.Equal(SetDescriptionResultEnum.OK, setAnswerResult);

            logger.LogDebug($"answer: {answer.sdp}");

            RTCPFeedback pliReport = new RTCPFeedback(pcDst.VideoLocalTrack.Ssrc, pcDst.VideoRemoteTrack.Ssrc, PSFBFeedbackTypesEnum.PLI);

            pcDst.SendRtcpFeedback(SDPMediaTypesEnum.video, pliReport);
        }
コード例 #24
0
        public void SendVideoRtcpFeedbackReportUnitTest()
        {
            logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name);
            logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name);

            RTCConfiguration pcConfiguration = new RTCConfiguration
            {
                certificates = new List <RTCCertificate>
                {
                    new RTCCertificate
                    {
                        X_Fingerprint = "sha-256 C6:ED:8C:9D:06:50:77:23:0A:4A:D8:42:68:29:D0:70:2F:BB:C7:72:EC:98:5C:62:07:1B:0C:5D:CB:CE:BE:CD"
                    }
                },
                X_UseRtpFeedbackProfile = true
            };

            RTCPeerConnection pcSrc = new RTCPeerConnection(pcConfiguration);
            var videoTrackSrc       = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
            });

            pcSrc.addTrack(videoTrackSrc);
            var offer = pcSrc.createOffer(new RTCOfferOptions());

            logger.LogDebug($"offer: {offer.sdp}");

            RTCPeerConnection pcDst = new RTCPeerConnection(pcConfiguration);
            var videoTrackDst       = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
            });

            pcDst.addTrack(videoTrackDst);

            var setOfferResult = pcDst.setRemoteDescription(offer);

            Assert.Equal(SetDescriptionResultEnum.OK, setOfferResult);

            var answer          = pcDst.createAnswer(null);
            var setAnswerResult = pcSrc.setRemoteDescription(answer);

            Assert.Equal(SetDescriptionResultEnum.OK, setAnswerResult);

            logger.LogDebug($"answer: {answer.sdp}");

            RTCPFeedback pliReport = new RTCPFeedback(pcDst.VideoLocalTrack.Ssrc, pcDst.VideoRemoteTrack.Ssrc, PSFBFeedbackTypesEnum.PLI);

            pcDst.SendRtcpFeedback(SDPMediaTypesEnum.video, pliReport);
        }
コード例 #25
0
        private static RTCPeerConnection WebSocketOpened(WebSocketContext context)
        {
            var peerConnection = new RTCPeerConnection(null);

            // Add local recvonly tracks. This ensures that the SDP answer includes only
            // the codecs we support.
            MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
            }, MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(videoTrack);

            peerConnection.OnReceiveReport            += RtpSession_OnReceiveReport;
            peerConnection.OnSendReport               += RtpSession_OnSendReport;
            peerConnection.oniceconnectionstatechange += (state) => Console.WriteLine($"ICE connection state changed to {state}.");
            peerConnection.onconnectionstatechange    += (state) =>
            {
                Console.WriteLine($"Peer connection state changed to {state}.");

                if (state == RTCPeerConnectionState.closed)
                {
                    peerConnection.OnRtpPacketReceived -= RtpSession_OnRtpPacketReceived;
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    peerConnection.OnRtpPacketReceived += RtpSession_OnRtpPacketReceived;
                }
            };

            return(peerConnection);
        }
コード例 #26
0
        private static async Task <RTCPeerConnection> SendSDPOffer(WebSocketContext context)
        {
            Log.LogDebug($"Web socket client connection from {context.UserEndPoint}.");

            _peerConnection = new RTCPeerConnection(null);
            //AddressFamily.InterNetwork,
            //DTLS_CERTIFICATE_FINGERPRINT,
            //null,
            //null);

            _peerConnection.OnReceiveReport += RtpSession_OnReceiveReport;
            _peerConnection.OnSendReport    += RtpSession_OnSendReport;

            Log.LogDebug($"Sending SDP offer to client {context.UserEndPoint}.");

            _peerConnection.onconnectionstatechange += (state) =>
            {
                if (state == RTCPeerConnectionState.closed)
                {
                    Log.LogDebug($"RTC peer connection closed.");
                    _peerConnection.OnReceiveReport -= RtpSession_OnReceiveReport;
                    _peerConnection.OnSendReport    -= RtpSession_OnSendReport;
                }
            };

            MediaStreamTrack audioTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            });

            _peerConnection.addTrack(audioTrack);

            var offerInit = await _peerConnection.createOffer(null);

            await _peerConnection.setLocalDescription(offerInit);

            context.WebSocket.Send(offerInit.sdp);

            if (DoDtlsHandshake(_peerConnection))
            {
                Log.LogInformation("DTLS handshake completed successfully.");
            }
            else
            {
                _peerConnection.Close("dtls handshake failed.");
            }

            return(_peerConnection);
        }
コード例 #27
0
ファイル: Program.cs プロジェクト: zanzo420/sipsorcery
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            var pc = new RTCPeerConnection(null);

            MediaStreamTrack track = new MediaStreamTrack(SDPMediaTypesEnum.audio, false,
                                                          new List <SDPAudioVideoMediaFormat> {
                new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU)
            });

            pc.addTrack(track);
            pc.onconnectionstatechange += (state) => Log.LogDebug($"Peer connection state change to {state}.");
            pc.OnRtpPacketReceived     += ForwardMediaToSIP;
            _peerConnection             = pc;

            return(Task.FromResult(pc));
        }
コード例 #28
0
        private static RTCPeerConnection Createpc(WebSocketContext context, SDPAudioVideoMediaFormat videoFormat)
        {
            var pc = new RTCPeerConnection(null);

            MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPAudioVideoMediaFormat> {
                videoFormat
            }, MediaStreamStatusEnum.SendOnly);

            pc.addTrack(videoTrack);

            pc.onicecandidateerror        += (candidate, error) => logger.LogWarning($"Error adding remote ICE candidate. {error} {candidate}");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");
            //pc.OnReceiveReport += (type, rtcp) => logger.LogDebug($"RTCP {type} report received.");
            pc.OnRtcpBye   += (reason) => logger.LogDebug($"RTCP BYE receive, reason: {(string.IsNullOrWhiteSpace(reason) ? "<none>" : reason)}.");
            pc.OnRtpClosed += (reason) => logger.LogDebug($"Peer connection closed, reason: {(string.IsNullOrWhiteSpace(reason) ? "<none>" : reason)}.");

            pc.onicecandidate += (candidate) =>
            {
                if (pc.signalingState == RTCSignalingState.have_local_offer ||
                    pc.signalingState == RTCSignalingState.have_remote_offer)
                {
                    context.WebSocket.Send($"candidate:{candidate}");
                }
            };

            pc.onconnectionstatechange += (state) =>
            {
                logger.LogDebug($"Peer connection state changed to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    logger.LogDebug("Creating RTP session to receive ffmpeg stream.");

                    _ffmpegListener.OnRtpPacketReceived += (ep, media, rtpPkt) =>
                    {
                        if (media == SDPMediaTypesEnum.video && pc.VideoDestinationEndPoint != null)
                        {
                            //logger.LogDebug($"Forwarding {media} RTP packet to webrtc peer timestamp {rtpPkt.Header.Timestamp}.");
                            pc.SendRtpRaw(media, rtpPkt.Payload, rtpPkt.Header.Timestamp, rtpPkt.Header.MarkerBit, rtpPkt.Header.PayloadType);
                        }
                    };
                }
            };

            return(pc);
        }
コード例 #29
0
        public async Task <RTCSessionDescriptionInit> GetOffer(string id)
        {
            if (string.IsNullOrWhiteSpace(id))
            {
                throw new ArgumentNullException("id", "A unique ID parameter must be supplied when creating a new peer connection.");
            }
            else if (_peerConnections.ContainsKey(id))
            {
                throw new ArgumentNullException("id", "The specified peer connection ID is already in use.");
            }
            var peerConnection = new RTCPeerConnection(null);

            MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false,
                                                               new List <SDPAudioVideoMediaFormat> {
                new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(audioTrack);

            peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) => _logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}, SeqNum {rtpPkt.Header.SequenceNumber}.");
            //peerConnection.OnReceiveReport += RtpSession_OnReceiveReport;
            //peerConnection.OnSendReport += RtpSession_OnSendReport;

            peerConnection.OnTimeout += (mediaType) => _logger.LogWarning($"Timeout for {mediaType}.");
            peerConnection.onconnectionstatechange += (state) =>
            {
                _logger.LogDebug($"Peer connection {id} state changed to {state}.");

                if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed)
                {
                    _peerConnections.TryRemove(id, out _);
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    _logger.LogDebug("Peer connection connected.");
                }
            };

            var offerSdp = peerConnection.createOffer(null);
            await peerConnection.setLocalDescription(offerSdp);

            _peerConnections.TryAdd(id, peerConnection);

            return(offerSdp);
        }
コード例 #30
0
ファイル: Program.cs プロジェクト: wangscript007/sipsorcery
        private static async Task <RTCPeerConnection> SendSDPOffer(WebSocketContext context)
        {
            logger.LogDebug($"Web socket client connection from {context.UserEndPoint}.");

            var pc = new RTCPeerConnection(null);

            MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.SendOnly);

            pc.addTrack(videoTrack);

            pc.OnReceiveReport            += RtpSession_OnReceiveReport;
            pc.OnSendReport               += RtpSession_OnSendReport;
            pc.OnTimeout                  += (mediaType) => pc.Close("remote timeout");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            pc.onconnectionstatechange += (state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed)
                {
                    pc.Close("remote disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    pc.OnReceiveReport -= RtpSession_OnReceiveReport;
                    pc.OnSendReport    -= RtpSession_OnSendReport;
                }
            };

            var offerSdp = pc.createOffer(null);
            await pc.setLocalDescription(offerSdp);

            logger.LogDebug($"Sending SDP offer to client {context.UserEndPoint}.");
            logger.LogDebug(offerSdp.sdp);

            context.WebSocket.Send(offerSdp.sdp);

            return(pc);
        }