Ejemplo n.º 1
0
        private static RTCPeerConnection CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            var testPatternSource = new VideoTestPatternSource();
            WindowsVideoEndPoint windowsVideoEndPoint = new WindowsVideoEndPoint(true);

            MediaStreamTrack track = new MediaStreamTrack(windowsVideoEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly);

            pc.addTrack(track);

            testPatternSource.OnVideoSourceRawSample        += windowsVideoEndPoint.ExternalVideoSourceRawSample;
            windowsVideoEndPoint.OnVideoSourceEncodedSample += pc.SendVideo;
            pc.OnVideoFormatsNegotiated += (sdpFormat) =>
                                           windowsVideoEndPoint.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await testPatternSource.CloseVideo();

                    await windowsVideoEndPoint.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await windowsVideoEndPoint.StartVideo();

                    await testPatternSource.StartVideo();
                }
            };

            // Diagnostics.
            pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(pc);
        }
Ejemplo n.º 2
0
        private static RTCPeerConnection CreatePeerConnection()
        {
            var peerConnection = new RTCPeerConnection(null);

            var testPatternSource = new VideoTestPatternSource();
            WindowsVideoEndPoint windowsVideoEndPoint = new WindowsVideoEndPoint(new VpxVideoEncoder());

            MediaStreamTrack track = new MediaStreamTrack(windowsVideoEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly);

            peerConnection.addTrack(track);

            testPatternSource.OnVideoSourceRawSample        += windowsVideoEndPoint.ExternalVideoSourceRawSample;
            windowsVideoEndPoint.OnVideoSourceEncodedSample += peerConnection.SendVideo;

            peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change {state}.");
            peerConnection.OnReceiveReport            += RtpSession_OnReceiveReport;
            peerConnection.OnSendReport            += RtpSession_OnSendReport;
            peerConnection.OnTimeout               += (mediaType) => logger.LogWarning($"Timeout on {mediaType}.");
            peerConnection.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state changed to {state}.");

                if (state == RTCPeerConnectionState.closed)
                {
                    peerConnection.OnReceiveReport -= RtpSession_OnReceiveReport;
                    peerConnection.OnSendReport    -= RtpSession_OnSendReport;

                    await windowsVideoEndPoint.CloseVideo();

                    await testPatternSource.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await testPatternSource.StartVideo();

                    await windowsVideoEndPoint.StartVideo();
                }
            };

            return(peerConnection);
        }
Ejemplo n.º 3
0
        private static async Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(new VpxVideoEncoder(), WEBCAM_NAME);
            //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(new FFmpegVideoEncoder(), WEBCAM_NAME);
            //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(WEBCAM_NAME, 1920, 1080, 30);
            //winVideoEP.RestrictFormats(x => x.Codec == SIPSorceryMedia.Abstractions.V1.VideoCodecsEnum.H264);
            bool initResult = await winVideoEP.InitialiseVideoSourceDevice();

            if (!initResult)
            {
                throw new ApplicationException("Could not initialise video capture device.");
            }
            var audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.Music
            });

            MediaStreamTrack videoTrack = new MediaStreamTrack(winVideoEP.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            winVideoEP.OnVideoSourceEncodedSample  += pc.SendVideo;
            audioSource.OnAudioSourceEncodedSample += pc.SendAudio;
            pc.OnVideoFormatsNegotiated            += (videoFormats) =>
                                                      winVideoEP.SetVideoSourceFormat(videoFormats.First());
            pc.OnAudioFormatsNegotiated += (audioFormats) =>
                                           audioSource.SetAudioSourceFormat(audioFormats.First());

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await audioSource.StartAudio();

                    await winVideoEP.StartVideo();
                }
                else if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await winVideoEP.CloseVideo();

                    await audioSource.CloseAudio();
                }
            };

            // Diagnostics.
            pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(pc);
        }
Ejemplo n.º 4
0
        private static RTCPeerConnection WebSocketOpened(WebSocketContext context)
        {
            WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint();

            winVideoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) =>
            {
                _form.BeginInvoke(new Action(() =>
                {
                    unsafe
                    {
                        fixed(byte *s = bmp)
                        {
                            System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, (int)(bmp.Length / height), System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s);
                            _picBox.Image = bmpImage;
                        }
                    }
                }));
            };

            var peerConnection = new RTCPeerConnection(null);

            // Add local recvonly tracks. This ensures that the SDP answer includes only
            // the codecs we support.
            MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack(winVideoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(videoTrack);

            peerConnection.OnReceiveReport += RtpSession_OnReceiveReport;
            peerConnection.OnSendReport    += RtpSession_OnSendReport;
            peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) =>
            {
                bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate);
                Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}.");
            };
            peerConnection.oniceconnectionstatechange += (state) => Console.WriteLine($"ICE connection state changed to {state}.");
            peerConnection.onconnectionstatechange    += async(state) =>
            {
                Console.WriteLine($"Peer connection state changed to {state}.");

                if (state == RTCPeerConnectionState.closed)
                {
                    await winVideoEP.CloseVideo();
                }
            };
            //peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) =>
            //{
            //    //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}.");

            //    if (media == SDPMediaTypesEnum.video)
            //    {
            //        winVideoEP.GotVideoRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload);
            //    }
            //};
            peerConnection.OnVideoFrameReceived += winVideoEP.GotVideoFrame;

            return(peerConnection);
        }
Ejemplo n.º 5
0
        private static RTCPeerConnection CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint();
            //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(false, 640, 480, 30);
            //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(false, 1920, 1080, 30);
            //await winVideoEP.InitialiseVideoSourceDevice();
            var audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.Music
            });

            MediaStreamTrack videoTrack = new MediaStreamTrack(winVideoEP.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            winVideoEP.OnVideoSourceEncodedSample  += pc.SendVideo;
            audioSource.OnAudioSourceEncodedSample += pc.SendAudio;
            pc.OnVideoFormatsNegotiated            += (sdpFormat) =>
                                                      winVideoEP.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await audioSource.StartAudio();

                    await winVideoEP.StartVideo();
                }
                else if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await winVideoEP.CloseVideo();

                    await audioSource.CloseAudio();
                }
            };

            // Diagnostics.
            pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(pc);
        }