Beispiel #1
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            //var pc = new RTCPeerConnection(config);
            var pc = peer = new RTCPeerConnection(null);

            var testPatternSource = new VideoTestPatternSource();

            testPatternSource.SetFrameRate(60);
            testPatternSource.SetMaxFrameRate(true);
            var videoEndPoint = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint();

            videoEndPoint.RestrictFormats(format => format.Codec == VideoCodecsEnum.H264);
            //var videoEndPoint = new SIPSorceryMedia.Windows.WindowsVideoEndPoint(true);
            //var videoEndPoint = new SIPSorceryMedia.Windows.WindowsEncoderEndPoint();
            //var videoEndPoint = new SIPSorceryMedia.Encoders.VideoEncoderEndPoint();

            MediaStreamTrack track = new MediaStreamTrack(videoEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly);

            pc.addTrack(track);

            testPatternSource.OnVideoSourceRawSample += TestPatternSource_OnVideoSourceRawSample;

            pc.OnVideoFormatsNegotiated += (formats) =>
            {
                videoEndPoint.SetVideoSourceFormat(formats.First());
            };


            pc.onconnectionstatechange += async(state) =>
            {
                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await testPatternSource.CloseVideo();

                    await videoEndPoint.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await videoEndPoint.StartVideo();

                    await testPatternSource.StartVideo();
                }
            };

            return(Task.FromResult(pc));
        }
Beispiel #2
0
        private static RTCPeerConnection CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            var testPatternSource = new VideoTestPatternSource();
            var videoEndPoint     = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint();
            //var videoEndPoint = new SIPSorceryMedia.Windows.WindowsVideoEndPoint(true);

            MediaStreamTrack track = new MediaStreamTrack(videoEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly);

            pc.addTrack(track);

            testPatternSource.OnVideoSourceRawSample += videoEndPoint.ExternalVideoSourceRawSample;
            videoEndPoint.OnVideoSourceEncodedSample += pc.SendVideo;
            pc.OnVideoFormatsNegotiated += (sdpFormat) => videoEndPoint.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await testPatternSource.CloseVideo();

                    await videoEndPoint.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await videoEndPoint.StartVideo();

                    await testPatternSource.StartVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(pc);
        }
Beispiel #3
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            //var videoEP = new SIPSorceryMedia.Windows.WindowsVideoEndPoint();
            var videoEP = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint();

            videoEP.RestrictFormats(format => format.Codec == VideoCodecsEnum.VP8);

            videoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) =>
            {
                _form.BeginInvoke(new Action(() =>
                {
                    unsafe
                    {
                        fixed(byte *s = bmp)
                        {
                            Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s);
                            _picBox.Image   = bmpImage;
                        }
                    }
                }));
            };

            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            // Add local receive only tracks. This ensures that the SDP answer includes only the codecs we support.
            MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false,
                                                               new List <SDPAudioVideoMediaFormat> {
                new SDPAudioVideoMediaFormat(SDPWellKnownMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.RecvOnly);

            pc.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            pc.addTrack(videoTrack);

            pc.OnVideoFrameReceived     += videoEP.GotVideoFrame;
            pc.OnVideoFormatsNegotiated += (formats) => videoEP.SetVideoSinkFormat(formats.First());

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await videoEP.CloseVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }