コード例 #1
0
ファイル: Program.cs プロジェクト: sdwflmw/sipsorcery
        private static RTCPeerConnection WebSocketOpened(WebSocketContext context)
        {
            WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint();

            winVideoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) =>
            {
                _form.BeginInvoke(new Action(() =>
                {
                    unsafe
                    {
                        fixed(byte *s = bmp)
                        {
                            System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, (int)(bmp.Length / height), System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s);
                            _picBox.Image = bmpImage;
                        }
                    }
                }));
            };

            var peerConnection = new RTCPeerConnection(null);

            // Add local recvonly tracks. This ensures that the SDP answer includes only
            // the codecs we support.
            MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack(winVideoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(videoTrack);

            peerConnection.OnReceiveReport += RtpSession_OnReceiveReport;
            peerConnection.OnSendReport    += RtpSession_OnSendReport;
            peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) =>
            {
                bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate);
                Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}.");
            };
            peerConnection.oniceconnectionstatechange += (state) => Console.WriteLine($"ICE connection state changed to {state}.");
            peerConnection.onconnectionstatechange    += async(state) =>
            {
                Console.WriteLine($"Peer connection state changed to {state}.");

                if (state == RTCPeerConnectionState.closed)
                {
                    await winVideoEP.CloseVideo();
                }
            };
            //peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) =>
            //{
            //    //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}.");

            //    if (media == SDPMediaTypesEnum.video)
            //    {
            //        winVideoEP.GotVideoRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload);
            //    }
            //};
            peerConnection.OnVideoFrameReceived += winVideoEP.GotVideoFrame;

            return(peerConnection);
        }
コード例 #2
0
ファイル: Program.cs プロジェクト: zanzo420/sipsorcery
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            var peerConnection = new RTCPeerConnection();

            //FileStream captureStream = new FileStream("capture.stm", FileMode.Create, FileAccess.ReadWrite);

            var videoEP = new Vp8NetVideoEncoderEndPoint();

            videoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) =>
            {
                if (_isFormActivated)
                {
                    _form?.BeginInvoke(new Action(() =>
                    {
                        unsafe
                        {
                            fixed(byte *s = bmp)
                            {
                                Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s);
                                _picBox.Image   = bmpImage;
                            }
                        }
                    }));
                }
            };

            // Sink (speaker) only audio end point.
            //WindowsAudioEndPoint windowsAudioEP = new WindowsAudioEndPoint(new AudioEncoder(), -1, -1, true, false);

            //MediaStreamTrack audioTrack = new MediaStreamTrack(windowsAudioEP.GetAudioSinkFormats(), MediaStreamStatusEnum.RecvOnly);
            //peerConnection.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(videoTrack);

            peerConnection.OnVideoFrameReceived += (rep, ts, frame, pixelFmt) =>
            {
                Console.WriteLine($"Video frame received {frame.Length} bytes.");
                //Console.WriteLine(frame.HexStr());
                //captureStream.Write(Encoding.ASCII.GetBytes($"{frame.Length},"));
                //captureStream.Write(frame);
                //captureStream.Flush();
                videoEP.GotVideoFrame(rep, ts, frame, pixelFmt);
            };
            peerConnection.OnVideoFormatsNegotiated += (formats) =>
                                                       videoEP.SetVideoSinkFormat(formats.First());
            //peerConnection.OnAudioFormatsNegotiated += (formats) =>
            //    windowsAudioEP.SetAudioSinkFormat(formats.First());

            peerConnection.OnTimeout += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}.");
            peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}.");
            peerConnection.onconnectionstatechange    += (state) =>
            {
                logger.LogDebug($"Peer connection connected changed to {state}.");

                if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed || state == RTCPeerConnectionState.disconnected)
                {
                    //captureStream.Close();
                }
            };

            peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) =>
            {
                bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate);
                Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}.");
            };

            peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) =>
            {
                //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}.");
                //if (media == SDPMediaTypesEnum.audio)
                //{
                //    windowsAudioEP.GotAudioRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload);
                //}
            };

            return(Task.FromResult(peerConnection));
        }
コード例 #3
0
 public IMediaSource CreateMediaSource(MediaStreamTrack track, string id)
 {
     return(track?.CreateMediaSource());
 }
コード例 #4
0
        static private Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };

            PeerConnection = new RTCPeerConnection(config);

            switch (VideoSourceType)
            {
            case VIDEO_SOURCE.FILE_OR_STREAM:
                // Do we use same file for Audio ?
                if ((AudioSourceType == AUDIO_SOURCE.FILE_OR_STREAM) && (AudioSourceFile == VideoSourceFile))
                {
                    SIPSorceryMedia.FFmpeg.FFmpegFileSource fileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(VideoSourceFile, RepeatVideoFile, new AudioEncoder(), true);
                    fileSource.OnEndOfFile += () => PeerConnection.Close("source eof");

                    videoSource = fileSource as IVideoSource;
                    audioSource = fileSource as IAudioSource;
                }
                else
                {
                    SIPSorceryMedia.FFmpeg.FFmpegFileSource fileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(VideoSourceFile, RepeatVideoFile, new AudioEncoder(), true);
                    fileSource.OnEndOfFile += () => PeerConnection.Close("source eof");

                    videoSource = fileSource as IVideoSource;
                }
                break;

            case VIDEO_SOURCE.CAMERA:
                List <SIPSorceryMedia.FFmpeg.Camera>?cameras = SIPSorceryMedia.FFmpeg.FFmpegCameraManager.GetCameraDevices();

                SIPSorceryMedia.FFmpeg.Camera?camera = null;
                if (cameras?.Count > 0)
                {
                    // Get last one
                    camera = cameras.Last();
                }
                if (camera != null)
                {
                    videoSource = new SIPSorceryMedia.FFmpeg.FFmpegCameraSource(camera.Path);
                }
                else
                {
                    throw new NotSupportedException($"Cannot find adequate camera ...");
                }

                break;

            case VIDEO_SOURCE.SCREEN:
                List <SIPSorceryMedia.FFmpeg.Monitor>?monitors       = SIPSorceryMedia.FFmpeg.FFmpegMonitorManager.GetMonitorDevices();
                SIPSorceryMedia.FFmpeg.Monitor?       primaryMonitor = null;
                if (monitors?.Count > 0)
                {
                    foreach (SIPSorceryMedia.FFmpeg.Monitor monitor in monitors)
                    {
                        if (monitor.Primary)
                        {
                            primaryMonitor = monitor;
                            break;
                        }
                    }
                    if (primaryMonitor == null)
                    {
                        primaryMonitor = monitors[0];
                    }
                }

                if (primaryMonitor != null)
                {
                    videoSource = new SIPSorceryMedia.FFmpeg.FFmpegScreenSource(primaryMonitor.Path, primaryMonitor.Rect, 10);
                }
                else
                {
                    throw new NotSupportedException($"Cannot find adequate monitor ...");
                }
                break;
            }

            if (audioSource == null)
            {
                switch (AudioSourceType)
                {
                case AUDIO_SOURCE.FILE_OR_STREAM:
                    SIPSorceryMedia.FFmpeg.FFmpegFileSource fileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(AudioSourceFile, RepeatAudioFile, new AudioEncoder(), false);
                    fileSource.OnEndOfFile += () => PeerConnection.Close("source eof");

                    audioSource = fileSource as IAudioSource;
                    break;

                case AUDIO_SOURCE.MICROPHONE:
                    audioSource = new SIPSorceryMedia.FFmpeg.FFmpegMicrophoneSource(MicrophoneDevicePath, new AudioEncoder());
                    break;
                }
            }

            if (videoSource != null)
            {
                videoSource.RestrictFormats(x => x.Codec == VideoCodec);

                MediaStreamTrack videoTrack = new MediaStreamTrack(videoSource.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);
                PeerConnection.addTrack(videoTrack);


                videoSource.OnVideoSourceEncodedSample  += PeerConnection.SendVideo;
                PeerConnection.OnVideoFormatsNegotiated += (videoFormats) => videoSource.SetVideoSourceFormat(videoFormats.First());
            }

            if (audioSource != null)
            {
                audioSource.RestrictFormats(x => x.Codec == AudioCodec);

                MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);
                PeerConnection.addTrack(audioTrack);

                audioSource.OnAudioSourceEncodedSample  += AudioSource_OnAudioSourceEncodedSample;
                PeerConnection.OnAudioFormatsNegotiated += (audioFormats) => audioSource.SetAudioSourceFormat(audioFormats.First());
            }

            PeerConnection.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    PeerConnection.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    if (videoSource != null)
                    {
                        await videoSource.CloseVideo();
                    }

                    if (audioSink != null)
                    {
                        await audioSink.CloseAudioSink();
                    }

                    if (audioSource != null)
                    {
                        await audioSource.CloseAudio();
                    }
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    if (videoSource != null)
                    {
                        await videoSource.StartVideo();
                    }

                    if (audioSink != null)
                    {
                        await audioSink.StartAudioSink();
                    }

                    if (audioSource != null)
                    {
                        await audioSource.StartAudio();
                    }
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            PeerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(PeerConnection));
        }
コード例 #5
0
 public virtual RTCRtpTransceiver AddTransceiver(string connectionId, MediaStreamTrack track, RTCRtpTransceiverDirection direction = RTCRtpTransceiverDirection.SendRecv)
 {
     return(m_handler.AddTransceiver(connectionId, track, direction));
 }
コード例 #6
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            var peerConnection = new RTCPeerConnection(null);

            var videoEP = new SIPSorceryMedia.Encoders.VideoEncoderEndPoint();

            //var videoEP = new SIPSorceryMedia.Windows.WindowsEncoderEndPoint();
            //var videoEP = new FFmpegVideoEndPoint();
            videoEP.RestrictFormats(format => format.Codec == VideoCodecsEnum.VP8);

            videoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) =>
            {
                _form.BeginInvoke(new Action(() =>
                {
                    unsafe
                    {
                        fixed(byte *s = bmp)
                        {
                            Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s);
                            _picBox.Image   = bmpImage;
                        }
                    }
                }));
            };

            // Sink (speaker) only audio end point.
            WindowsAudioEndPoint windowsAudioEP = new WindowsAudioEndPoint(new AudioEncoder(), -1, -1, true, false);

            MediaStreamTrack audioTrack = new MediaStreamTrack(windowsAudioEP.GetAudioSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(videoTrack);

            peerConnection.OnVideoFrameReceived     += videoEP.GotVideoFrame;
            peerConnection.OnVideoFormatsNegotiated += (formats) =>
                                                       videoEP.SetVideoSinkFormat(formats.First());
            peerConnection.OnAudioFormatsNegotiated += (formats) =>
                                                       windowsAudioEP.SetAudioSinkFormat(formats.First());

            peerConnection.OnTimeout += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}.");
            peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}.");
            peerConnection.onconnectionstatechange    += async(state) =>
            {
                logger.LogDebug($"Peer connection connected changed to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await windowsAudioEP.StartAudio();
                }
                else if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed)
                {
                    await windowsAudioEP.CloseAudio();
                }
            };

            peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) =>
            {
                bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate);
                Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}.");
            };

            peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) =>
            {
                //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}.");
                if (media == SDPMediaTypesEnum.audio)
                {
                    windowsAudioEP.GotAudioRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload);
                }
            };

            return(Task.FromResult(peerConnection));
        }
コード例 #7
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            var videoEP = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint();

            videoEP.RestrictFormats(format => format.Codec == VideoCodecsEnum.H264);

            videoEP.OnVideoSinkDecodedSampleFaster += (RawImage rawImage) =>
            {
                _form.BeginInvoke(new Action(() =>
                {
                    unsafe
                    {
                        if (_picBox.Width != rawImage.Width || _picBox.Height != rawImage.Height)
                        {
                            logger.LogDebug($"Adjusting video display from {_picBox.Width}x{_picBox.Height} to {rawImage.Width}x{rawImage.Height}.");
                            _picBox.Width  = rawImage.Width;
                            _picBox.Height = rawImage.Height;
                        }

                        Bitmap bmpImage = new Bitmap(rawImage.Width, rawImage.Height, rawImage.Stride, PixelFormat.Format24bppRgb, rawImage.Sample);
                        _picBox.Image   = bmpImage;
                    }
                }));
            };

            RTCConfiguration config = new RTCConfiguration
            {
                //iceServers = new List<RTCIceServer> { new RTCIceServer { urls = STUN_URL } }
                X_UseRtpFeedbackProfile = true
            };
            var pc = new RTCPeerConnection(config);

            // Add local receive only tracks. This ensures that the SDP answer includes only the codecs we support.
            if (!_options.NoAudio)
            {
                MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false,
                                                                   new List <SDPAudioVideoMediaFormat> {
                    new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU)
                }, MediaStreamStatusEnum.RecvOnly);
                pc.addTrack(audioTrack);

                pc.OnRtpPacketReceived += Pc_OnRtpPacketReceived;
            }
            MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            //MediaStreamTrack videoTrack = new MediaStreamTrack(new VideoFormat(96, "VP8", 90000, "x-google-max-bitrate=5000000"), MediaStreamStatusEnum.RecvOnly);
            pc.addTrack(videoTrack);

            pc.OnVideoFrameReceived     += videoEP.GotVideoFrame;
            pc.OnVideoFormatsNegotiated += (formats) => videoEP.SetVideoSinkFormat(formats.First());


            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await videoEP.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await videoEP.StartVideoSink();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"RECV STUN {msg.Header.MessageType} (txid: {msg.Header.TransactionId.HexStr()}) from {ep}.");
            //pc.GetRtpChannel().OnStunMessageSent += (msg, ep, isRelay) => logger.LogDebug($"SEND STUN {msg.Header.MessageType} (txid: {msg.Header.TransactionId.HexStr()}) to {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
コード例 #8
0
        public async Task HandleInvalidSdpPortOnPlaceCallUnitTest()
        {
            logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name);
            logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name);

            // This transport will act as the call receiver. It allows the test to provide a
            // tailored response to an incoming call.
            SIPTransport calleeTransport = new SIPTransport();

            // This transport will be used by the SIPUserAgent being tested to place the call.
            SIPTransport callerTransport = new SIPTransport();
            RTPSession   rtpSession      = new RTPSession(false, false, false);

            try
            {
                calleeTransport.AddSIPChannel(new SIPUDPChannel(IPAddress.Loopback, 0));
                calleeTransport.SIPTransportRequestReceived += async(lep, rep, req) =>
                {
                    if (req.Method != SIPMethodsEnum.INVITE)
                    {
                        SIPResponse notAllowedResponse = SIPResponse.GetResponse(req, SIPResponseStatusCodesEnum.MethodNotAllowed, null);
                        await calleeTransport.SendResponseAsync(notAllowedResponse);
                    }
                    else
                    {
                        UASInviteTransaction uasTransaction = new UASInviteTransaction(calleeTransport, req, null);
                        var uas = new SIPServerUserAgent(calleeTransport, null, null, null, SIPCallDirection.In, null, null, uasTransaction);
                        uas.Progress(SIPResponseStatusCodesEnum.Trying, null, null, null, null);
                        uas.Progress(SIPResponseStatusCodesEnum.Ringing, null, null, null, null);

                        var answerSdp = @"
v=0
o=- 1838015445 0 IN IP4 127.0.0.1
s=-
c=IN IP4 127.0.0.1
t=0 0
m=audio 79762 RTP/AVP 0
a=rtpmap:0 PCMU/8000
a=sendrecv";
                        uas.Answer(SDP.SDP_MIME_CONTENTTYPE, answerSdp, null, SIPDialogueTransferModesEnum.NotAllowed);
                    }
                };

                SIPUserAgent userAgent = new SIPUserAgent(callerTransport, null);

                MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                    new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
                });
                rtpSession.addTrack(audioTrack);

                SIPURI dstUri = new SIPURI(SIPSchemesEnum.sip, calleeTransport.GetSIPChannels().First().ListeningSIPEndPoint);
                var    result = await userAgent.Call(dstUri.ToString(), null, null, rtpSession);

                Assert.False(result);
            }
            finally
            {
                rtpSession?.Close("normal");
                callerTransport?.Shutdown();
                calleeTransport?.Shutdown();
            }
        }
コード例 #9
0
ファイル: Program.cs プロジェクト: soitgoes/sipsorcery
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            var videoEP = new SIPSorceryMedia.Windows.WindowsVideoEndPoint();

            //var videoEP = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint();
            videoEP.RestrictFormats(format => format.Codec == VideoCodecsEnum.VP8);

            videoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) =>
            {
                _form.BeginInvoke(new Action(() =>
                {
                    unsafe
                    {
                        fixed(byte *s = bmp)
                        {
                            Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s);
                            _picBox.Image   = bmpImage;
                        }
                    }
                }));
            };

            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            // Add local receive only tracks. This ensures that the SDP answer includes only the codecs we support.
            MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false,
                                                               new List <SDPAudioVideoMediaFormat> {
                new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.RecvOnly);

            pc.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            //MediaStreamTrack videoTrack = new MediaStreamTrack(new VideoFormat(96, "VP8", 90000, "x-google-max-bitrate=5000000"), MediaStreamStatusEnum.RecvOnly);
            pc.addTrack(videoTrack);

            pc.OnVideoFrameReceived     += videoEP.GotVideoFrame;
            pc.OnVideoFormatsNegotiated += (formats) => videoEP.SetVideoSinkFormat(formats.First());

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await videoEP.CloseVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
コード例 #10
0
        public void CheckNoAudioNegotiationUnitTest()
        {
            logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name);
            logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name);

            // By default offers made by us always put audio first. Create a remote SDP offer
            // with the video first.
            string remoteSdp =
                @"v=0
o=- 62533 0 IN IP4 127.0.0.1
s=-
t=0 0
a=group:BUNDLE 0 1
a=msid-semantic: WMS
m=audio 57148 UDP/TLS/RTP/SAVP 0 101
c=IN IP6 2a02:8084:6981:7880::76
a=rtcp:9 IN IP4 0.0.0.0
a=candidate:2944 1 udp 659136 192.168.11.50 57148 typ host generation 0
a=candidate:2488 1 udp 659136 192.168.0.50 57148 typ host generation 0
a=candidate:2507 1 udp 659136 fe80::54a9:d238:b2ee:ceb%24 57148 typ host generation 0
a=candidate:3159 1 udp 659136 2a02:8084:6981:7880::76 57148 typ host generation 0
a=ice-ufrag:CUTK
a=ice-pwd:QTCZWDIEBCIBGOYAGSIXRFIL
a=ice-options:ice2,trickle
a=fingerprint:sha-256 06:2F:61:85:1F:83:64:88:1B:93:93:8C:E5:FF:1C:D9:82:EA:60:97:1E:0D:DA:FA:28:11:00:FA:74:69:23:DB
a=setup:actpass
a=mid:0
a=sendrecv
a=rtcp-mux
a=rtpmap:0 PCMU/8000
a=rtpmap:101 telephone-event/8000
a=fmtp:101 0-16
m=video 9 UDP/TLS/RTP/SAVP 100
c=IN IP4 0.0.0.0
a=rtcp:9 IN IP4 0.0.0.0
a=ice-ufrag:CUTK
a=ice-pwd:QTCZWDIEBCIBGOYAGSIXRFIL
a=ice-options:ice2,trickle
a=fingerprint:sha-256 06:2F:61:85:1F:83:64:88:1B:93:93:8C:E5:FF:1C:D9:82:EA:60:97:1E:0D:DA:FA:28:11:00:FA:74:69:23:DB
a=setup:actpass
a=mid:1
a=sendrecv
a=rtcp-mux
a=rtpmap:100 VP8/90000";

            // Create a local session and add the video track first.
            RTCPeerConnection pc = new RTCPeerConnection(null);
            MediaStreamTrack  localVideoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPAudioVideoMediaFormat> {
                new SDPAudioVideoMediaFormat(SDPMediaTypesEnum.video, 96, "VP8", 90000)
            });

            pc.addTrack(localVideoTrack);

            var offer = SDP.ParseSDPDescription(remoteSdp);

            logger.LogDebug($"Remote offer: {offer}");

            var result = pc.SetRemoteDescription(SIP.App.SdpType.offer, offer);

            logger.LogDebug($"Set remote description on local session result {result}.");

            Assert.Equal(SetDescriptionResultEnum.OK, result);

            var answer = pc.CreateAnswer(null);

            logger.LogDebug($"Local answer: {answer}");

            Assert.Equal(MediaStreamStatusEnum.Inactive, pc.AudioLocalTrack.StreamStatus);
            Assert.Equal(100, pc.VideoLocalTrack.Capabilities.Single(x => x.Name() == "VP8").ID);

            pc.Close("normal");
        }
コード例 #11
0
 public void AddTrack(MediaStreamTrack track)
 {
     tracks.Add(track);
 }
コード例 #12
0
 public MediaStreamTrackNative(MediaStreamTrack mediaStreamTrack) : base(mediaStreamTrack)
 {
     _mediaStreamTrack = mediaStreamTrack;
 }
コード例 #13
0
        private static async Task <RTCPeerConnection> StartPeerConnection()
        {
            RTCConfiguration pcConfiguration = new RTCConfiguration
            {
                certificates = new List <RTCCertificate>
                {
                    new RTCCertificate
                    {
                        X_CertificatePath = DTLS_CERTIFICATE_PATH,
                        X_KeyPath         = DTLS_KEY_PATH,
                        X_Fingerprint     = DTLS_CERTIFICATE_FINGERPRINT
                    }
                }
            };

            var peerConnection = new RTCPeerConnection(pcConfiguration);

            MediaStreamTrack videoTrack = new MediaStreamTrack(
                "0",
                SDPMediaTypesEnum.video,
                false,
                new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
            },
                MediaStreamStatusEnum.SendOnly);

            peerConnection.addTrack(videoTrack);

            peerConnection.OnReceiveReport += RtpSession_OnReceiveReport;
            peerConnection.OnSendReport    += RtpSession_OnSendReport;

            //peerConnection.OnRtcpBye += (reason) =>
            //{
            //    logger.LogInformation("RTCP BYE report received from remote peer.");
            //    peerConnection.Close(reason);
            //    dtls.Shutdown();
            //};

            peerConnection.OnTimeout += (mediaType) =>
            {
                peerConnection.Close("remote timeout");
            };

            peerConnection.onconnectionstatechange += async(state) =>
            {
                if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed)
                {
                    OnTestPatternSampleReady       -= peerConnection.SendMedia;
                    peerConnection.OnReceiveReport -= RtpSession_OnReceiveReport;
                    peerConnection.OnSendReport    -= RtpSession_OnSendReport;
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    // The DTLS handshake completed.
                    logger.LogDebug("Peer connection connected.");
                    OnTestPatternSampleReady += peerConnection.SendMedia;

                    await peerConnection.Start();

                    if (_sendTestPatternTimer == null)
                    {
                        _sendTestPatternTimer = new Timer(SendTestPattern, null, 0, TEST_PATTERN_SPACING_MILLISECONDS);
                    }
                }
            };

            peerConnection.oniceconnectionstatechange += (state) =>
            {
                logger.LogDebug($"ICE connection state change {state}.");

                // The ICE connectivity check completed successfully.
                if (state == RTCIceConnectionState.connected)
                {
                    var remoteEP = peerConnection.IceSession.ConnectedRemoteEndPoint;
                    peerConnection.SetDestination(SDPMediaTypesEnum.audio, remoteEP, remoteEP);
                }
            };

            var offerSdp = await peerConnection.createOffer(null);

            await peerConnection.setLocalDescription(offerSdp);

            logger.LogDebug(offerSdp.sdp);

            return(peerConnection);
        }
コード例 #14
0
        private static Task <RTCPeerConnection> Createpc(WebSocketContext context, RTCIceServer stunServer, bool relayOnly)
        {
            if (_peerConnection != null)
            {
                _peerConnection.Close("normal");
            }

            List <RTCCertificate> presetCertificates = null;

            if (File.Exists(LOCALHOST_CERTIFICATE_PATH))
            {
                var localhostCert = new X509Certificate2(LOCALHOST_CERTIFICATE_PATH, (string)null, X509KeyStorageFlags.Exportable);
                presetCertificates = new List <RTCCertificate> {
                    new RTCCertificate {
                        Certificate = localhostCert
                    }
                };
            }

            RTCConfiguration pcConfiguration = new RTCConfiguration
            {
                //certificates = presetCertificates,
                //X_RemoteSignallingAddress = (context != null) ? context.UserEndPoint.Address : null,
                iceServers = stunServer != null ? new List <RTCIceServer> {
                    stunServer
                } : null,
                iceTransportPolicy = relayOnly ? RTCIceTransportPolicy.relay : RTCIceTransportPolicy.all,
                //X_BindAddress = IPAddress.Any, // NOTE: Not reqd. Using this to filter out IPv6 addresses so can test with Pion.
            };

            _peerConnection = new RTCPeerConnection(pcConfiguration);

            //_peerConnection.GetRtpChannel().MdnsResolve = (hostname) => Task.FromResult(NetServices.InternetDefaultAddress);
            _peerConnection.GetRtpChannel().MdnsResolve = MdnsResolve;
            //_peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isrelay) => logger.LogDebug($"STUN message received from {ep}, message type {msg.Header.MessageType}.");

            //var dc = _peerConnection.createDataChannel(DATA_CHANNEL_LABEL, null);
            //dc.onmessage += (msg) => logger.LogDebug($"data channel receive ({dc.label}-{dc.id}): {msg}");

            // Add a send-only audio track (this doesn't require any native libraries for encoding so is good for x-platform testing).
            AudioExtrasSource audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.Music
            });

            audioSource.OnAudioSourceEncodedSample += _peerConnection.SendAudio;

            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendOnly);

            _peerConnection.addTrack(audioTrack);

            _peerConnection.OnAudioFormatsNegotiated += (formats) =>
                                                        audioSource.SetAudioSourceFormat(formats.First());

            _peerConnection.onicecandidateerror     += (candidate, error) => logger.LogWarning($"Error adding remote ICE candidate. {error} {candidate}");
            _peerConnection.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state changed to {state}.");

                if (state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed)
                {
                    _peerConnection.Close("remote disconnection");
                }

                if (state == RTCPeerConnectionState.connected)
                {
                    await audioSource.StartAudio();
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await audioSource.CloseAudio();
                }
            };
            _peerConnection.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            _peerConnection.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            _peerConnection.OnRtcpBye       += (reason) => logger.LogDebug($"RTCP BYE receive, reason: {(string.IsNullOrWhiteSpace(reason) ? "<none>" : reason)}.");

            // Peer ICE connection state changes are for ICE events such as the STUN checks completing.
            _peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            _peerConnection.ondatachannel += (dc) =>
            {
                logger.LogDebug($"Data channel opened by remote peer, label {dc.label}, stream ID {dc.id}.");
                dc.onmessage += (msg) =>
                {
                    logger.LogDebug($"data channel ({dc.label}:{dc.id}): {msg}.");
                };
            };

            return(Task.FromResult(_peerConnection));
        }
コード例 #15
0
ファイル: Program.cs プロジェクト: spFly/sipsorcery
        private static async Task <RTCPeerConnection> SendSDPOffer(WebSocketContext context)
        {
            logger.LogDebug($"Web socket client connection from {context.UserEndPoint}.");

            RTCConfiguration pcConfiguration = new RTCConfiguration
            {
                certificates = new List <RTCCertificate>
                {
                    new RTCCertificate
                    {
                        X_CertificatePath = DTLS_CERTIFICATE_PATH,
                        X_KeyPath         = DTLS_KEY_PATH,
                        X_Fingerprint     = DTLS_CERTIFICATE_FINGERPRINT
                    }
                }
            };

            var peerConnection = new RTCPeerConnection(pcConfiguration);
            var dtls           = new DtlsHandshake(DTLS_CERTIFICATE_PATH, DTLS_KEY_PATH);

            MediaStreamTrack audioTrack = new MediaStreamTrack("0", SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(audioTrack);

            peerConnection.OnReceiveReport += RtpSession_OnReceiveReport;
            peerConnection.OnSendReport    += RtpSession_OnSendReport;

            peerConnection.OnTimeout += (mediaType) =>
            {
                peerConnection.Close("remote timeout");
                dtls.Shutdown();
            };

            peerConnection.onconnectionstatechange += (state) =>
            {
                if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed)
                {
                    peerConnection.OnReceiveReport -= RtpSession_OnReceiveReport;
                    peerConnection.OnSendReport    -= RtpSession_OnSendReport;
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    logger.LogDebug("Peer connection connected.");
                    peerConnection.OnRtpPacketReceived += (SDPMediaTypesEnum media, RTPPacket rtpPkt) => logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}.");
                }
            };

            peerConnection.oniceconnectionstatechange += (state) =>
            {
                if (state == RTCIceConnectionState.connected)
                {
                    logger.LogDebug("Starting DTLS handshake task.");

                    bool dtlsResult = false;
                    Task.Run(async() => dtlsResult = await DoDtlsHandshake(peerConnection, dtls))
                    .ContinueWith((t) =>
                    {
                        logger.LogDebug($"dtls handshake result {dtlsResult}.");

                        if (dtlsResult)
                        {
                            var remoteEP = peerConnection.IceSession.ConnectedRemoteEndPoint;
                            peerConnection.SetDestination(SDPMediaTypesEnum.audio, remoteEP, remoteEP);
                        }
                        else
                        {
                            dtls.Shutdown();
                            peerConnection.Close("dtls handshake failed.");
                        }
                    });
                }
            };

            var offerSdp = await peerConnection.createOffer(null);

            await peerConnection.setLocalDescription(offerSdp);

            logger.LogDebug($"Sending SDP offer to client {context.UserEndPoint}.");
            logger.LogDebug(offerSdp.sdp);

            context.WebSocket.Send(offerSdp.sdp);

            return(peerConnection);
        }
コード例 #16
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="connectionId"></param>
        /// <param name="track"></param>
        public void RemoveSenderTrack(string connectionId, MediaStreamTrack track)
        {
            var sender = GetSenders(connectionId).First(s => s.Track == track);

            _mapConnectionIdAndPeer[connectionId].peer.RemoveTrack(sender);
        }
コード例 #17
0
ファイル: Program.cs プロジェクト: helgesson/sipsorcery
        private static Task <RTCPeerConnection> CreatePeerConnection(X509Certificate2 cert)
        {
            //RTCConfiguration config = new RTCConfiguration
            //{
            //    iceServers = new List<RTCIceServer> { new RTCIceServer { urls = STUN_URL } },
            //    certificates = new List<RTCCertificate> { new RTCCertificate { Certificate = cert } }
            //};
            //var pc = new RTCPeerConnection(config);
            var pc = new RTCPeerConnection(null);

            //var testPatternSource = new VideoTestPatternSource(new SIPSorceryMedia.Encoders.VideoEncoder());
            var testPatternSource = new VideoTestPatternSource(new FFmpegVideoEncoder());

            testPatternSource.SetFrameRate(TEST_PATTERN_FRAMES_PER_SECOND);
            //testPatternSource.SetMaxFrameRate(true);
            //var videoEndPoint = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint();
            //videoEndPoint.RestrictFormats(format => format.Codec == VideoCodecsEnum.H264);
            //testPatternSource.RestrictFormats(format => format.Codec == VideoCodecsEnum.H264);
            //var videoEndPoint = new SIPSorceryMedia.Windows.WindowsEncoderEndPoint();
            //var videoEndPoint = new SIPSorceryMedia.Encoders.VideoEncoderEndPoint();

            MediaStreamTrack track = new MediaStreamTrack(testPatternSource.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly);

            pc.addTrack(track);

            //testPatternSource.OnVideoSourceRawSample += videoEndPoint.ExternalVideoSourceRawSample;
            testPatternSource.OnVideoSourceRawSample     += MesasureTestPatternSourceFrameRate;
            testPatternSource.OnVideoSourceEncodedSample += pc.SendVideo;
            pc.OnVideoFormatsNegotiated += (formats) => testPatternSource.SetVideoSourceFormat(formats.First());

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await testPatternSource.CloseVideo();

                    testPatternSource.Dispose();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await testPatternSource.StartVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");
            pc.onsignalingstatechange     += () =>
            {
                if (pc.signalingState == RTCSignalingState.have_local_offer)
                {
                    logger.LogDebug($"Local SDP set, type {pc.localDescription.type}.");
                    logger.LogDebug(pc.localDescription.sdp.ToString());
                }
                else if (pc.signalingState == RTCSignalingState.have_remote_offer)
                {
                    logger.LogDebug($"Remote SDP set, type {pc.remoteDescription.type}.");
                    logger.LogDebug(pc.remoteDescription.sdp.ToString());
                }
            };

            return(Task.FromResult(pc));
        }
コード例 #18
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="connectionId"></param>
 /// <param name="track"></param>
 /// <returns></returns>
 public virtual RTCRtpTransceiver AddTrack(string connectionId, MediaStreamTrack track)
 {
     return(m_handler.AddTrack(connectionId, track));
 }
コード例 #19
0
        public async Task <RTCSessionDescriptionInit> GetOffer(string id)
        {
            if (string.IsNullOrWhiteSpace(id))
            {
                throw new ArgumentNullException("id", "A unique ID parameter must be supplied when creating a new peer connection.");
            }
            else if (_peerConnections.ContainsKey(id))
            {
                throw new ArgumentNullException("id", "The specified peer connection ID is already in use.");
            }

            RTCConfiguration pcConfiguration = new RTCConfiguration
            {
                certificates = new List <RTCCertificate>
                {
                    new RTCCertificate
                    {
                        X_CertificatePath = DTLS_CERTIFICATE_PATH,
                        X_KeyPath         = DTLS_KEY_PATH,
                        X_Fingerprint     = DTLS_CERTIFICATE_FINGERPRINT
                    }
                }
            };

            var peerConnection = new RTCPeerConnection(pcConfiguration);
            var dtls           = new DtlsHandshake(DTLS_CERTIFICATE_PATH, DTLS_KEY_PATH);

            MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(audioTrack);

            //peerConnection.OnRtpPacketReceived += (SDPMediaTypesEnum media, RTPPacket rtpPkt) => _logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}, SeqNum {rtpPkt.Header.SequenceNumber}.");
            //peerConnection.OnReceiveReport += RtpSession_OnReceiveReport;
            //peerConnection.OnSendReport += RtpSession_OnSendReport;

            peerConnection.OnTimeout += (mediaType) =>
            {
                peerConnection.Close("remote timeout");
                dtls.Shutdown();
            };

            peerConnection.onconnectionstatechange += (state) =>
            {
                if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.disconnected || state == RTCPeerConnectionState.failed)
                {
                    //peerConnection.OnReceiveReport -= RtpSession_OnReceiveReport;
                    //peerConnection.OnSendReport -= RtpSession_OnSendReport;
                    _logger.LogDebug($"Peer connection {id} closed.");
                    _peerConnections.TryRemove(id, out _);
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    _logger.LogDebug("Peer connection connected.");
                }
            };

            _ = Task <bool> .Run(() => Task.FromResult(DoDtlsHandshake(peerConnection, dtls)))
                .ContinueWith((t) =>
            {
                _logger.LogDebug($"dtls handshake result {t.Result}.");

                if (t.Result)
                {
                    //var remoteEP = peerConnection.IceSession.ConnectedRemoteEndPoint;
                    //peerConnection.SetDestination(SDPMediaTypesEnum.audio, remoteEP, remoteEP);
                }
                else
                {
                    dtls.Shutdown();
                    peerConnection.Close("dtls handshake failed.");
                }
            });

            var offerSdp = peerConnection.createOffer(null);
            await peerConnection.setLocalDescription(offerSdp);

            _peerConnections.TryAdd(id, peerConnection);

            return(offerSdp);
        }
コード例 #20
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="connectionId"></param>
 /// <param name="track"></param>
 public virtual void RemoveTrack(string connectionId, MediaStreamTrack track)
 {
     m_handler.RemoveTrack(connectionId, track);
 }
コード例 #21
0
        private static RTCPeerConnection CreatePeerConnection(WebSocketContext context)
        {
            RTCConfiguration pcConfiguration = new RTCConfiguration
            {
                certificates = new List <RTCCertificate>
                {
                    new RTCCertificate
                    {
                        X_CertificatePath = DTLS_CERTIFICATE_PATH,
                        X_KeyPath         = DTLS_KEY_PATH,
                        X_Fingerprint     = DTLS_CERTIFICATE_FINGERPRINT
                    }
                },
                X_RemoteSignallingAddress = context.UserEndPoint.Address,
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = SIPSORCERY_STUN_SERVER
                    }
                }
            };

            var peerConnection = new RTCPeerConnection(pcConfiguration);

            // Add inactive audio and video tracks.
            MediaStreamTrack audioTrack = new MediaStreamTrack("0", SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.Inactive);

            peerConnection.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack("1", SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
            }, MediaStreamStatusEnum.Inactive);

            peerConnection.addTrack(videoTrack);

            peerConnection.onicecandidate += (candidate) =>
            {
                logger.LogDebug($"ICE candidate discovered: {candidate}.");

                // Host candidates are included in the SDP we send.
                if (candidate.type != RTCIceCandidateType.host)
                {
                    context.WebSocket.Send($"candidate:{candidate}");
                }
            };

            peerConnection.onconnectionstatechange += (state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");
            };

            peerConnection.oniceconnectionstatechange += (state) =>
            {
                logger.LogDebug($"ICE connection state change to {state}.");

                if (state == RTCIceConnectionState.connected)
                {
                    var remoteEndPoint = peerConnection.IceSession.NominatedCandidate.DestinationEndPoint;
                    logger.LogInformation($"ICE connected to remote end point {remoteEndPoint}.");
                }
            };

            peerConnection.IceSession.StartGathering();

            return(peerConnection);
        }
コード例 #22
0
ファイル: Program.cs プロジェクト: csuffyy/sipsorcery
        private static RTCPeerConnection CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            var testPatternSource = new VideoTestPatternSource();
            WindowsVideoEndPoint windowsVideoEndPoint = new WindowsVideoEndPoint(true);
            var audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.Music
            });

            MediaStreamTrack videoTrack = new MediaStreamTrack(windowsVideoEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            testPatternSource.OnVideoSourceRawSample        += windowsVideoEndPoint.ExternalVideoSourceRawSample;
            windowsVideoEndPoint.OnVideoSourceEncodedSample += pc.SendVideo;
            audioSource.OnAudioSourceEncodedSample          += pc.SendAudio;
            pc.OnVideoFormatsNegotiated += (sdpFormat) =>
                                           windowsVideoEndPoint.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await audioSource.StartAudio();

                    await windowsVideoEndPoint.StartVideo();

                    await testPatternSource.StartVideo();
                }
                else if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await testPatternSource.CloseVideo();

                    await windowsVideoEndPoint.CloseVideo();

                    await audioSource.CloseAudio();
                }
            };

            // Diagnostics.
            pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(pc);
        }
コード例 #23
0
        private static async Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(WEBCAM_NAME);
            bool initResult = await winVideoEP.InitialiseVideoSourceDevice();

            if (!initResult)
            {
                throw new ApplicationException("Could not initialise video capture device.");
            }

            //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(false, 640, 480, 30);
            //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(false, 1920, 1080, 30);
            //await winVideoEP.InitialiseVideoSourceDevice();
            var audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.Music
            });

            MediaStreamTrack videoTrack = new MediaStreamTrack(winVideoEP.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            winVideoEP.OnVideoSourceEncodedSample  += pc.SendVideo;
            audioSource.OnAudioSourceEncodedSample += pc.SendAudio;
            pc.OnVideoFormatsNegotiated            += (videoFormats) =>
                                                      winVideoEP.SetVideoSourceFormat(videoFormats.First());
            pc.OnAudioFormatsNegotiated += (audioFormats) =>
                                           audioSource.SetAudioSourceFormat(audioFormats.First());

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await audioSource.StartAudio();

                    await winVideoEP.StartVideo();
                }
                else if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await winVideoEP.CloseVideo();

                    await audioSource.CloseAudio();
                }
            };

            // Diagnostics.
            pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(pc);
        }
コード例 #24
0
 public RTCRtpTransceiver AddTransceiver(int indexPeer, MediaStreamTrack track)
 {
     return(peers[indexPeer].AddTransceiver(track));
 }
コード例 #25
0
        private static RTCPeerConnection CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            var mediaFileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(MP4_PATH, new AudioEncoder());

            mediaFileSource.Initialise();
            mediaFileSource.RestrictCodecs(new List <VideoCodecsEnum> {
                VideoCodecsEnum.VP8
            });
            mediaFileSource.RestrictCodecs(new List <AudioCodecsEnum> {
                AudioCodecsEnum.PCMU
            });
            mediaFileSource.OnEndOfFile += () => pc.Close("source eof");

            MediaStreamTrack videoTrack = new MediaStreamTrack(mediaFileSource.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(mediaFileSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            mediaFileSource.OnVideoSourceEncodedSample += pc.SendVideo;
            mediaFileSource.OnAudioSourceEncodedSample += pc.SendAudio;
            pc.OnVideoFormatsNegotiated += (sdpFormat) => mediaFileSource.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.OnAudioFormatsNegotiated += (sdpFormat) => mediaFileSource.SetAudioSourceFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec));

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await mediaFileSource.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await mediaFileSource.StartVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(pc);
        }