Beispiel #1
0
        private static Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };
            var pc = new RTCPeerConnection(config);

            var mediaFileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(MP4_PATH, false, new AudioEncoder());

            mediaFileSource.Initialise();
            mediaFileSource.RestrictFormats(x => x.Codec == VideoCodecsEnum.VP8);
            mediaFileSource.RestrictFormats(x => x.Codec == AudioCodecsEnum.PCMU);
            mediaFileSource.OnEndOfFile += () => pc.Close("source eof");

            MediaStreamTrack videoTrack = new MediaStreamTrack(mediaFileSource.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(videoTrack);
            MediaStreamTrack audioTrack = new MediaStreamTrack(mediaFileSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);

            pc.addTrack(audioTrack);

            mediaFileSource.OnVideoSourceEncodedSample += pc.SendVideo;
            mediaFileSource.OnAudioSourceEncodedSample += pc.SendAudio;
            pc.OnVideoFormatsNegotiated += (videoFormats) => mediaFileSource.SetVideoSourceFormat(videoFormats.First());
            pc.OnAudioFormatsNegotiated += (audioFormats) => mediaFileSource.SetAudioSourceFormat(audioFormats.First());

            pc.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    pc.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    await mediaFileSource.CloseVideo();
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    await mediaFileSource.StartVideo();
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(pc));
        }
Beispiel #2
0
        static private Task <RTCPeerConnection> CreatePeerConnection()
        {
            RTCConfiguration config = new RTCConfiguration
            {
                iceServers = new List <RTCIceServer> {
                    new RTCIceServer {
                        urls = STUN_URL
                    }
                }
            };

            PeerConnection = new RTCPeerConnection(config);

            switch (VideoSourceType)
            {
            case VIDEO_SOURCE.FILE_OR_STREAM:
                // Do we use same file for Audio ?
                if ((AudioSourceType == AUDIO_SOURCE.FILE_OR_STREAM) && (AudioSourceFile == VideoSourceFile))
                {
                    SIPSorceryMedia.FFmpeg.FFmpegFileSource fileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(VideoSourceFile, RepeatVideoFile, new AudioEncoder(), true);
                    fileSource.OnEndOfFile += () => PeerConnection.Close("source eof");

                    videoSource = fileSource as IVideoSource;
                    audioSource = fileSource as IAudioSource;
                }
                else
                {
                    SIPSorceryMedia.FFmpeg.FFmpegFileSource fileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(VideoSourceFile, RepeatVideoFile, new AudioEncoder(), true);
                    fileSource.OnEndOfFile += () => PeerConnection.Close("source eof");

                    videoSource = fileSource as IVideoSource;
                }
                break;

            case VIDEO_SOURCE.CAMERA:
                List <SIPSorceryMedia.FFmpeg.Camera>?cameras = SIPSorceryMedia.FFmpeg.FFmpegCameraManager.GetCameraDevices();

                SIPSorceryMedia.FFmpeg.Camera?camera = null;
                if (cameras?.Count > 0)
                {
                    // Get last one
                    camera = cameras.Last();
                }
                if (camera != null)
                {
                    videoSource = new SIPSorceryMedia.FFmpeg.FFmpegCameraSource(camera.Path);
                }
                else
                {
                    throw new NotSupportedException($"Cannot find adequate camera ...");
                }

                break;

            case VIDEO_SOURCE.SCREEN:
                List <SIPSorceryMedia.FFmpeg.Monitor>?monitors       = SIPSorceryMedia.FFmpeg.FFmpegMonitorManager.GetMonitorDevices();
                SIPSorceryMedia.FFmpeg.Monitor?       primaryMonitor = null;
                if (monitors?.Count > 0)
                {
                    foreach (SIPSorceryMedia.FFmpeg.Monitor monitor in monitors)
                    {
                        if (monitor.Primary)
                        {
                            primaryMonitor = monitor;
                            break;
                        }
                    }
                    if (primaryMonitor == null)
                    {
                        primaryMonitor = monitors[0];
                    }
                }

                if (primaryMonitor != null)
                {
                    videoSource = new SIPSorceryMedia.FFmpeg.FFmpegScreenSource(primaryMonitor.Path, primaryMonitor.Rect, 10);
                }
                else
                {
                    throw new NotSupportedException($"Cannot find adequate monitor ...");
                }
                break;
            }

            if (audioSource == null)
            {
                switch (AudioSourceType)
                {
                case AUDIO_SOURCE.FILE_OR_STREAM:
                    SIPSorceryMedia.FFmpeg.FFmpegFileSource fileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(AudioSourceFile, RepeatAudioFile, new AudioEncoder(), false);
                    fileSource.OnEndOfFile += () => PeerConnection.Close("source eof");

                    audioSource = fileSource as IAudioSource;
                    break;

                case AUDIO_SOURCE.MICROPHONE:
                    audioSource = new SIPSorceryMedia.FFmpeg.FFmpegMicrophoneSource(MicrophoneDevicePath, new AudioEncoder());
                    break;
                }
            }

            if (videoSource != null)
            {
                videoSource.RestrictFormats(x => x.Codec == VideoCodec);

                MediaStreamTrack videoTrack = new MediaStreamTrack(videoSource.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv);
                PeerConnection.addTrack(videoTrack);


                videoSource.OnVideoSourceEncodedSample  += PeerConnection.SendVideo;
                PeerConnection.OnVideoFormatsNegotiated += (videoFormats) => videoSource.SetVideoSourceFormat(videoFormats.First());
            }

            if (audioSource != null)
            {
                audioSource.RestrictFormats(x => x.Codec == AudioCodec);

                MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv);
                PeerConnection.addTrack(audioTrack);

                audioSource.OnAudioSourceEncodedSample  += AudioSource_OnAudioSourceEncodedSample;
                PeerConnection.OnAudioFormatsNegotiated += (audioFormats) => audioSource.SetAudioSourceFormat(audioFormats.First());
            }

            PeerConnection.onconnectionstatechange += async(state) =>
            {
                logger.LogDebug($"Peer connection state change to {state}.");

                if (state == RTCPeerConnectionState.failed)
                {
                    PeerConnection.Close("ice disconnection");
                }
                else if (state == RTCPeerConnectionState.closed)
                {
                    if (videoSource != null)
                    {
                        await videoSource.CloseVideo();
                    }

                    if (audioSink != null)
                    {
                        await audioSink.CloseAudioSink();
                    }

                    if (audioSource != null)
                    {
                        await audioSource.CloseAudio();
                    }
                }
                else if (state == RTCPeerConnectionState.connected)
                {
                    if (videoSource != null)
                    {
                        await videoSource.StartVideo();
                    }

                    if (audioSink != null)
                    {
                        await audioSink.StartAudioSink();
                    }

                    if (audioSource != null)
                    {
                        await audioSource.StartAudio();
                    }
                }
            };

            // Diagnostics.
            //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}");
            //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}");
            //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}.");
            PeerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");

            return(Task.FromResult(PeerConnection));
        }