private static Task <RTCPeerConnection> CreatePeerConnection(IVideoSource videoSource, IVideoSink videoSink) { var pc = new RTCPeerConnection(null); MediaStreamTrack videoTrack = new MediaStreamTrack(videoSink.GetVideoSinkFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(videoTrack); videoSource.OnVideoSourceEncodedSample += pc.SendVideo; pc.OnVideoFrameReceived += videoSink.GotVideoFrame; pc.OnVideoFormatsNegotiated += (formats) => { videoSink.SetVideoSinkFormat(formats.First()); videoSource.SetVideoSourceFormat(formats.First()); }; pc.OnTimeout += (mediaType) => logger.LogDebug($"Timeout on media {mediaType}."); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state changed to {state}."); pc.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection connected changed to {state}."); if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed) { await videoSource.CloseVideo().ConfigureAwait(false); } }; return(Task.FromResult(pc)); }
private Task <RTCPeerConnection> CreatePeerConnection(string url) { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; var pc = new RTCPeerConnection(config); //mediaFileSource.OnEndOfFile += () => pc.Close("source eof"); MediaStreamTrack videoTrack = new MediaStreamTrack(new List <VideoCodecsEnum> { VIDEO_CODEC }, MediaStreamStatusEnum.SendOnly); pc.addTrack(videoTrack); MediaStreamTrack audioTrack = new MediaStreamTrack(new List <AudioCodecsEnum> { AUDIO_CODEC }, MediaStreamStatusEnum.SendOnly); pc.addTrack(audioTrack); IVideoSource videoSource = null; IAudioSource audioSource = null; if (url == MAX_URL) { videoSource = _maxSource; audioSource = _maxSource; } else { videoSource = _testPatternEncoder; audioSource = _musicSource; } pc.OnVideoFormatsNegotiated += (sdpFormat) => videoSource.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec)); pc.OnAudioFormatsNegotiated += (sdpFormat) => audioSource.SetAudioSourceFormat(SDPMediaFormatInfo.GetAudioCodecForSdpFormat(sdpFormat.First().FormatCodec)); videoSource.OnVideoSourceEncodedSample += pc.SendVideo; audioSource.OnAudioSourceEncodedSample += pc.SendAudio; pc.onconnectionstatechange += async(state) => { _logger.LogInformation($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { videoSource.OnVideoSourceEncodedSample -= pc.SendVideo; audioSource.OnAudioSourceEncodedSample -= pc.SendAudio; await CheckForSourceSubscribers(); } else if (state == RTCPeerConnectionState.connected) { await StartSource(url); } }; // Diagnostics. //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => _logger.LogInformation($"ICE connection state change to {state}."); return(Task.FromResult(pc)); }
static private Task <RTCPeerConnection> CreatePeerConnection() { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; PeerConnection = new RTCPeerConnection(config); switch (VideoSourceType) { case VIDEO_SOURCE.FILE_OR_STREAM: // Do we use same file for Audio ? if ((AudioSourceType == AUDIO_SOURCE.FILE_OR_STREAM) && (AudioSourceFile == VideoSourceFile)) { SIPSorceryMedia.FFmpeg.FFmpegFileSource fileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(VideoSourceFile, RepeatVideoFile, new AudioEncoder(), true); fileSource.OnEndOfFile += () => PeerConnection.Close("source eof"); videoSource = fileSource as IVideoSource; audioSource = fileSource as IAudioSource; } else { SIPSorceryMedia.FFmpeg.FFmpegFileSource fileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(VideoSourceFile, RepeatVideoFile, new AudioEncoder(), true); fileSource.OnEndOfFile += () => PeerConnection.Close("source eof"); videoSource = fileSource as IVideoSource; } break; case VIDEO_SOURCE.CAMERA: List <SIPSorceryMedia.FFmpeg.Camera>?cameras = SIPSorceryMedia.FFmpeg.FFmpegCameraManager.GetCameraDevices(); SIPSorceryMedia.FFmpeg.Camera?camera = null; if (cameras?.Count > 0) { // Get last one camera = cameras.Last(); } if (camera != null) { videoSource = new SIPSorceryMedia.FFmpeg.FFmpegCameraSource(camera.Path); } else { throw new NotSupportedException($"Cannot find adequate camera ..."); } break; case VIDEO_SOURCE.SCREEN: List <SIPSorceryMedia.FFmpeg.Monitor>?monitors = SIPSorceryMedia.FFmpeg.FFmpegMonitorManager.GetMonitorDevices(); SIPSorceryMedia.FFmpeg.Monitor? primaryMonitor = null; if (monitors?.Count > 0) { foreach (SIPSorceryMedia.FFmpeg.Monitor monitor in monitors) { if (monitor.Primary) { primaryMonitor = monitor; break; } } if (primaryMonitor == null) { primaryMonitor = monitors[0]; } } if (primaryMonitor != null) { videoSource = new SIPSorceryMedia.FFmpeg.FFmpegScreenSource(primaryMonitor.Path, primaryMonitor.Rect, 10); } else { throw new NotSupportedException($"Cannot find adequate monitor ..."); } break; } if (audioSource == null) { switch (AudioSourceType) { case AUDIO_SOURCE.FILE_OR_STREAM: SIPSorceryMedia.FFmpeg.FFmpegFileSource fileSource = new SIPSorceryMedia.FFmpeg.FFmpegFileSource(AudioSourceFile, RepeatAudioFile, new AudioEncoder(), false); fileSource.OnEndOfFile += () => PeerConnection.Close("source eof"); audioSource = fileSource as IAudioSource; break; case AUDIO_SOURCE.MICROPHONE: audioSource = new SIPSorceryMedia.FFmpeg.FFmpegMicrophoneSource(MicrophoneDevicePath, new AudioEncoder()); break; } } if (videoSource != null) { videoSource.RestrictFormats(x => x.Codec == VideoCodec); MediaStreamTrack videoTrack = new MediaStreamTrack(videoSource.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv); PeerConnection.addTrack(videoTrack); videoSource.OnVideoSourceEncodedSample += PeerConnection.SendVideo; PeerConnection.OnVideoFormatsNegotiated += (videoFormats) => videoSource.SetVideoSourceFormat(videoFormats.First()); } if (audioSource != null) { audioSource.RestrictFormats(x => x.Codec == AudioCodec); MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv); PeerConnection.addTrack(audioTrack); audioSource.OnAudioSourceEncodedSample += AudioSource_OnAudioSourceEncodedSample; PeerConnection.OnAudioFormatsNegotiated += (audioFormats) => audioSource.SetAudioSourceFormat(audioFormats.First()); } PeerConnection.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.failed) { PeerConnection.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { if (videoSource != null) { await videoSource.CloseVideo(); } if (audioSink != null) { await audioSink.CloseAudioSink(); } if (audioSource != null) { await audioSource.CloseAudio(); } } else if (state == RTCPeerConnectionState.connected) { if (videoSource != null) { await videoSource.StartVideo(); } if (audioSink != null) { await audioSink.StartAudioSink(); } if (audioSource != null) { await audioSource.StartAudio(); } } }; // Diagnostics. //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); PeerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); return(Task.FromResult(PeerConnection)); }