Example #1
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            var testPatternSource    = new VideoTestPatternSource();
            var videoEncoderEndPoint = new VideoEncoderEndPoint();
            var audioSource          = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.Music
            });

            MediaStreamTrack videoTrack = new MediaStreamTrack(videoEncoderEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            testPatternSource.OnVideoSourceRawSample        += videoEncoderEndPoint.ExternalVideoSourceRawSample;
            videoEncoderEndPoint.OnVideoSourceEncodedSample += pc.SendVideo;
            audioSource.OnAudioSourceEncodedSample          += pc.SendAudio;
            pc.OnVideoFormatsNegotiated += (sdpFormat) =>
                                           videoEncoderEndPoint.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await audioSource.StartAudio();

                    await testPatternSource.StartVideo();
                }
                else if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await testPatternSource.CloseVideo();

                    await audioSource.CloseAudio();
                }
            };

            // Diagnostics.
            pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
Example #2
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            //var pc = new RTCPeerConnection(config);
            var pc = new RTCPeerConnection(null);

            var testPatternSource = new VideoTestPatternSource();

            //testPatternSource.SetFrameRate(60);
            testPatternSource.SetMaxFrameRate(true);
            var videoEndPoint = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint();

            videoEndPoint.RestrictCodecs(new List <VideoCodecsEnum> {
                VideoCodecsEnum.H264
            });
            //var videoEndPoint = new SIPSorceryMedia.Windows.WindowsVideoEndPoint(true);
            //var videoEndPoint = new SIPSorceryMedia.Windows.WindowsEncoderEndPoint();
            //var videoEndPoint = new SIPSorceryMedia.Encoders.VideoEncoderEndPoint();

            MediaStreamTrack track = new MediaStreamTrack(videoEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly);

            pc.addTrack(track);

            testPatternSource.OnVideoSourceRawSample += videoEndPoint.ExternalVideoSourceRawSample;
            testPatternSource.OnVideoSourceRawSample += TestPatternSource_OnVideoSourceRawSample;
            videoEndPoint.OnVideoSourceEncodedSample += pc.SendVideo;
            pc.OnVideoFormatsNegotiated += (sdpFormat) => videoEndPoint.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await testPatternSource.CloseVideo();

                    await videoEndPoint.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await videoEndPoint.StartVideo();

                    await testPatternSource.StartVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
        private Task <RTCPeerConnection> CreatePeerConnection(string url)
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            //mediaFileSource.OnEndOfFile += () => pc.Close("source eof");

            MediaStreamTrack videoTrack = new MediaStreamTrack(new List <VideoCodecsEnum> {
                VIDEO_CODEC
            }, MediaStreamStatusEnum.SendOnly);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(new List <AudioCodecsEnum> {
                AUDIO_CODEC
            }, MediaStreamStatusEnum.SendOnly);

            pc.addTrack(audioTrack);

            IVideoSource videoSource = null;
            IAudioSource audioSource = null;

            if (url == MAX_URL)
            {
                videoSource = _maxSource;
                audioSource = _maxSource;
            }
            else
            {
                videoSource = _testPatternEncoder;
                audioSource = _musicSource;
            }

            pc.OnVideoFormatsNegotiated            += (sdpFormat) => videoSource.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.OnAudioFormatsNegotiated            += (sdpFormat) => audioSource.SetAudioSourceFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec));
            videoSource.OnVideoSourceEncodedSample += pc.SendVideo;
            audioSource.OnAudioSourceEncodedSample += pc.SendAudio;

            pc.onconnectionstatechange += async(state) =>
            {
                _logger.LogInformation($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    videoSource.OnVideoSourceEncodedSample -= pc.SendVideo;
                    audioSource.OnAudioSourceEncodedSample -= pc.SendAudio;
                    await CheckForSourceSubscribers();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await StartSource(url);
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => _logger.LogInformation($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
Example #4
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            //var videoEP = new SIPSorceryMedia.Windows.WindowsVideoEndPoint();
            var videoEP = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint();

            videoEP.RestrictCodecs(new List <VideoCodecsEnum> {
                VideoCodecsEnum.VP8
            });

            videoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) =>
            {
                _form.BeginInvoke(new Action(() =>
                {
                    unsafe
                    {
                        fixed(byte *s = bmp)
                        {
                            Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s);
                            _picBox.Image   = bmpImage;
                        }
                    }
                }));
            };

            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            // Add local receive only tracks. This ensures that the SDP answer includes only the codecs we support.
            MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, new List <SDPMediaFormat> {
                new SDPMediaFormat(SDPMediaFormatsEnum.PCMU)
            }, MediaStreamStatusEnum.RecvOnly);

            pc.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            pc.addTrack(videoTrack);

            pc.OnVideoFrameReceived     += videoEP.GotVideoFrame;
            pc.OnVideoFormatsNegotiated += (sdpFormat) => videoEP.SetVideoSinkFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await videoEP.CloseVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
Example #5
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            var peerConnection = new RTCPeerConnection(null);

            var videoEP = new SIPSorceryMedia.Encoders.VideoEncoderEndPoint();

            //var videoEP = new SIPSorceryMedia.Windows.WindowsEncoderEndPoint();
            //var videoEP = new FFmpegVideoEndPoint();
            videoEP.RestrictCodecs(new List <VideoCodecsEnum> {
                VideoCodecsEnum.VP8
            });

            videoEP.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) =>
            {
                _form.BeginInvoke(new Action(() =>
                {
                    unsafe
                    {
                        fixed(byte *s = bmp)
                        {
                            Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s);
                            _picBox.Image   = bmpImage;
                        }
                    }
                }));
            };

            // Sink (speaker) only audio end point.
            WindowsAudioEndPoint windowsAudioEP = new WindowsAudioEndPoint(new AudioEncoder(), -1, -1, true, false);

            MediaStreamTrack audioTrack = new MediaStreamTrack(windowsAudioEP.GetAudioSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(audioTrack);
            MediaStreamTrack videoTrack = new MediaStreamTrack(videoEP.GetVideoSinkFormats(), MediaStreamStatusEnum.RecvOnly);

            peerConnection.addTrack(videoTrack);

            peerConnection.OnVideoFrameReceived     += videoEP.GotVideoFrame;
            peerConnection.OnVideoFormatsNegotiated += (sdpFormat) => videoEP.SetVideoSinkFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            peerConnection.OnAudioFormatsNegotiated += (sdpFormat) =>
                                                       windowsAudioEP.SetAudioSinkFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec));

            peerConnection.OnTimeout += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}.");
            peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}.");
            peerConnection.onconnectionstatechange    += async(state) =>
            {
                logger.LogDebug($"Peer connection connected changed to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await windowsAudioEP.StartAudio();
                }
                else if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed)
                {
                    await windowsAudioEP.CloseAudio();
                }
            };

            peerConnection.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) =>
            {
                bool hasUseCandidate = msg.Attributes.Any(x => x.AttributeType == STUNAttributeTypesEnum.UseCandidate);
                Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}, use candidate {hasUseCandidate}.");
            };

            peerConnection.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) =>
            {
                //logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}.");
                if (media == SDPMediaTypesEnum.audio)
                {
                    windowsAudioEP.GotAudioRtp(rep, rtpPkt.Header.SyncSource, rtpPkt.Header.SequenceNumber, rtpPkt.Header.Timestamp, rtpPkt.Header.PayloadType, rtpPkt.Header.MarkerBit == 1, rtpPkt.Payload);
                }
            };

            return(Task.FromResult(peerConnection));
        }
Example #6
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            var mediaFileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(MP4_PATH, false, new AudioEncoder());

            mediaFileSource.Initialise();
            mediaFileSource.RestrictCodecs(new List <VideoCodecsEnum> {
                VideoCodecsEnum.VP8
            });
            mediaFileSource.RestrictCodecs(new List <AudioCodecsEnum> {
                AudioCodecsEnum.PCMU
            });
            mediaFileSource.OnEndOfFile += () => pc.Close("source eof");

            MediaStreamTrack videoTrack = new MediaStreamTrack(mediaFileSource.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(mediaFileSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            mediaFileSource.OnVideoSourceEncodedSample += pc.SendVideo;
            mediaFileSource.OnAudioSourceEncodedSample += pc.SendAudio;
            pc.OnVideoFormatsNegotiated += (sdpFormat) => mediaFileSource.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.OnAudioFormatsNegotiated += (sdpFormat) => mediaFileSource.SetAudioSourceFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec));

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await mediaFileSource.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await mediaFileSource.StartVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
Example #7
0
        private static async Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(WEBCAM_NAME);
            bool initResult = await winVideoEP.InitialiseVideoSourceDevice();

            if (!initResult)
            {
                throw new ApplicationException("Could not initialise video capture device.");
            }

            //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(false, 640, 480, 30);
            //WindowsVideoEndPoint winVideoEP = new WindowsVideoEndPoint(false, 1920, 1080, 30);
            //await winVideoEP.InitialiseVideoSourceDevice();
            var audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.Music
            });

            MediaStreamTrack videoTrack = new MediaStreamTrack(winVideoEP.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            winVideoEP.OnVideoSourceEncodedSample  += pc.SendVideo;
            audioSource.OnAudioSourceEncodedSample += pc.SendAudio;
            pc.OnVideoFormatsNegotiated            += (sdpFormat) =>
                                                      winVideoEP.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec));
            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.connected)
                {
                    await audioSource.StartAudio();

                    await winVideoEP.StartVideo();
                }
                else if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await winVideoEP.CloseVideo();

                    await audioSource.CloseAudio();
                }
            };

            // Diagnostics.
            pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            pc.OnSendReport    += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(pc);
        }