private static Task <RTCPeerConnection> CreatePeerConnection() { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; var pc = new RTCPeerConnection(config); var testPatternSource = new VideoTestPatternSource(); var videoEncoderEndPoint = new VideoEncoderEndPoint(); var audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.Music }); MediaStreamTrack videoTrack = new MediaStreamTrack(videoEncoderEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(videoTrack); MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(audioTrack); testPatternSource.OnVideoSourceRawSample += videoEncoderEndPoint.ExternalVideoSourceRawSample; videoEncoderEndPoint.OnVideoSourceEncodedSample += pc.SendVideo; audioSource.OnAudioSourceEncodedSample += pc.SendAudio; pc.OnVideoFormatsNegotiated += (formats) => videoEncoderEndPoint.SetVideoSourceFormat(formats.First()); pc.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.connected) { await audioSource.StartAudio(); await testPatternSource.StartVideo(); } else if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { await testPatternSource.CloseVideo(); await audioSource.CloseAudio(); } }; // Diagnostics. pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); return(Task.FromResult(pc)); }
private static Task <RTCPeerConnection> CreatePeerConnection(X509Certificate2 cert) { //RTCConfiguration config = new RTCConfiguration //{ // iceServers = new List<RTCIceServer> { new RTCIceServer { urls = STUN_URL } }, // certificates = new List<RTCCertificate> { new RTCCertificate { Certificate = cert } } //}; //var pc = new RTCPeerConnection(config); var pc = new RTCPeerConnection(null); var testPatternSource = new VideoTestPatternSource(new SIPSorceryMedia.Encoders.VideoEncoder()); testPatternSource.SetFrameRate(60); //testPatternSource.SetMaxFrameRate(true); //var videoEndPoint = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint(); //videoEndPoint.RestrictFormats(format => format.Codec == VideoCodecsEnum.H264); //var videoEndPoint = new SIPSorceryMedia.Windows.WindowsEncoderEndPoint(); //var videoEndPoint = new SIPSorceryMedia.Encoders.VideoEncoderEndPoint(); MediaStreamTrack track = new MediaStreamTrack(testPatternSource.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly); pc.addTrack(track); //testPatternSource.OnVideoSourceRawSample += videoEndPoint.ExternalVideoSourceRawSample; testPatternSource.OnVideoSourceRawSample += MesasureTestPatternSourceFrameRate; testPatternSource.OnVideoSourceEncodedSample += pc.SendVideo; pc.OnVideoFormatsNegotiated += (formats) => testPatternSource.SetVideoSourceFormat(formats.First()); pc.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { await testPatternSource.CloseVideo(); await testPatternSource.CloseVideo(); testPatternSource.Dispose(); } else if (state == RTCPeerConnectionState.connected) { await testPatternSource.StartVideo(); await testPatternSource.StartVideo(); } }; // Diagnostics. //pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); //pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); //pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); return(Task.FromResult(pc)); }
protected override async Task ExecuteAsync(CancellationToken stoppingToken) { // Set up media sources. _maxSource = new FFmpegFileSource(MP4_PATH, true, new AudioEncoder()); _maxSource.Initialise(); _testPatternSource = new VideoTestPatternSource(); _testPatternEncoder = new FFmpegVideoEndPoint(); _testPatternSource.OnVideoSourceRawSample += _testPatternEncoder.ExternalVideoSourceRawSample; _musicSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.Music }); // The same sources are used for all connected peers (broadcast) so the codecs need // to be restricted to a single well supported option. _musicSource.RestrictCodecs(new List <AudioCodecsEnum> { AudioCodecsEnum.PCMU }); _maxSource.RestrictCodecs(new List <VideoCodecsEnum> { VIDEO_CODEC }); _testPatternEncoder.RestrictCodecs(new List <VideoCodecsEnum> { VIDEO_CODEC }); // Start web socket. _logger.LogInformation("Starting web socket server..."); WebSocketServer webSocketServer = null; if (!string.IsNullOrWhiteSpace(_certificatePath)) { if (!File.Exists(_certificatePath)) { throw new ApplicationException($"Certificate path could not be found {_certificatePath}."); } webSocketServer = new WebSocketServer(IPAddress.Any, WEBSOCKET_PORT, true); webSocketServer.SslConfiguration.ServerCertificate = new X509Certificate2(_certificatePath); webSocketServer.SslConfiguration.CheckCertificateRevocation = false; webSocketServer.SslConfiguration.EnabledSslProtocols = System.Security.Authentication.SslProtocols.Tls12; } else { webSocketServer = new WebSocketServer(IPAddress.Any, WEBSOCKET_PORT); } webSocketServer.AddWebSocketService <WebRTCWebSocketPeer>("/", (peer) => peer.CreatePeerConnection = () => CreatePeerConnection(null)); webSocketServer.AddWebSocketService <WebRTCWebSocketPeer>($"/{MAX_URL}", (peer) => peer.CreatePeerConnection = () => CreatePeerConnection(MAX_URL)); webSocketServer.Start(); _logger.LogInformation($"Waiting for web socket connections on {webSocketServer.Address}:{webSocketServer.Port}..."); await Task.Delay(Timeout.Infinite, stoppingToken); }
private static Task <RTCPeerConnection> CreatePeerConnection() { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; //var pc = new RTCPeerConnection(config); var pc = peer = new RTCPeerConnection(null); var testPatternSource = new VideoTestPatternSource(); testPatternSource.SetFrameRate(60); testPatternSource.SetMaxFrameRate(true); var videoEndPoint = new SIPSorceryMedia.FFmpeg.FFmpegVideoEndPoint(); videoEndPoint.RestrictFormats(format => format.Codec == VideoCodecsEnum.H264); //var videoEndPoint = new SIPSorceryMedia.Windows.WindowsVideoEndPoint(true); //var videoEndPoint = new SIPSorceryMedia.Windows.WindowsEncoderEndPoint(); //var videoEndPoint = new SIPSorceryMedia.Encoders.VideoEncoderEndPoint(); MediaStreamTrack track = new MediaStreamTrack(videoEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly); pc.addTrack(track); testPatternSource.OnVideoSourceRawSample += TestPatternSource_OnVideoSourceRawSample; pc.OnVideoFormatsNegotiated += (formats) => { videoEndPoint.SetVideoSourceFormat(formats.First()); }; pc.onconnectionstatechange += async(state) => { if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { await testPatternSource.CloseVideo(); await videoEndPoint.CloseVideo(); } else if (state == RTCPeerConnectionState.connected) { await videoEndPoint.StartVideo(); await testPatternSource.StartVideo(); } }; return(Task.FromResult(pc)); }
public void CanInstantiateUnitTest() { logger.LogDebug("--> " + System.Reflection.MethodBase.GetCurrentMethod().Name); logger.BeginScope(System.Reflection.MethodBase.GetCurrentMethod().Name); VideoTestPatternSource testPatternSource = new VideoTestPatternSource(); Assert.NotNull(testPatternSource); }
private static RTCPeerConnection CreatePeerConnection() { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; var pc = new RTCPeerConnection(config); var testPatternSource = new VideoTestPatternSource(); WindowsVideoEndPoint windowsVideoEndPoint = new WindowsVideoEndPoint(true); MediaStreamTrack track = new MediaStreamTrack(windowsVideoEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly); pc.addTrack(track); testPatternSource.OnVideoSourceRawSample += windowsVideoEndPoint.ExternalVideoSourceRawSample; windowsVideoEndPoint.OnVideoSourceEncodedSample += pc.SendVideo; pc.OnVideoFormatsNegotiated += (sdpFormat) => windowsVideoEndPoint.SetVideoSourceFormat(SDPMediaFormatInfo.GetVideoCodecForSdpFormat(sdpFormat.First().FormatCodec)); pc.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { await testPatternSource.CloseVideo(); await windowsVideoEndPoint.CloseVideo(); } else if (state == RTCPeerConnectionState.connected) { await windowsVideoEndPoint.StartVideo(); await testPatternSource.StartVideo(); } }; // Diagnostics. pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); return(pc); }
private static RTCPeerConnection CreatePeerConnection() { var peerConnection = new RTCPeerConnection(null); var testPatternSource = new VideoTestPatternSource(); WindowsVideoEndPoint windowsVideoEndPoint = new WindowsVideoEndPoint(new VpxVideoEncoder()); MediaStreamTrack track = new MediaStreamTrack(windowsVideoEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly); peerConnection.addTrack(track); testPatternSource.OnVideoSourceRawSample += windowsVideoEndPoint.ExternalVideoSourceRawSample; windowsVideoEndPoint.OnVideoSourceEncodedSample += peerConnection.SendVideo; peerConnection.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change {state}."); peerConnection.OnReceiveReport += RtpSession_OnReceiveReport; peerConnection.OnSendReport += RtpSession_OnSendReport; peerConnection.OnTimeout += (mediaType) => logger.LogWarning($"Timeout on {mediaType}."); peerConnection.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state changed to {state}."); if (state == RTCPeerConnectionState.closed) { peerConnection.OnReceiveReport -= RtpSession_OnReceiveReport; peerConnection.OnSendReport -= RtpSession_OnSendReport; await windowsVideoEndPoint.CloseVideo(); await testPatternSource.CloseVideo(); } else if (state == RTCPeerConnectionState.connected) { await testPatternSource.StartVideo(); await windowsVideoEndPoint.StartVideo(); } }; return(peerConnection); }
/// <summary> /// Creates the media session to use with the SIP call. /// </summary> /// <param name="audioSrcOpts">The audio source options to set when the call is first /// answered. These options can be adjusted afterwards to do things like put play /// on hold music etc.</param> /// <returns>A new media session object.</returns> private VoIPMediaSession CreateMediaSession() { var windowsAudioEndPoint = new WindowsAudioEndPoint(new AudioEncoder(), m_audioOutDeviceIndex); var windowsVideoEndPoint = new WindowsVideoEndPoint(); MediaEndPoints mediaEndPoints = new MediaEndPoints { AudioSink = windowsAudioEndPoint, AudioSource = windowsAudioEndPoint, VideoSink = windowsVideoEndPoint, VideoSource = windowsVideoEndPoint, }; // Fallback video source if a Windows webcam cannot be accessed. var testPatternSource = new VideoTestPatternSource(new VideoEncoder()); var voipMediaSession = new VoIPMediaSession(mediaEndPoints, testPatternSource); // voipMediaSession.OnRtpPacketReceived += OnRtpPacketReceived; return(voipMediaSession); }
static async Task Main() { Console.WriteLine("SIPSorcery Getting Started Video Call Demo"); Console.WriteLine("Press ctrl-c to exit."); Log = AddConsoleLogger(); ManualResetEvent exitMRE = new ManualResetEvent(false); _sipTransport = new SIPTransport(); EnableTraceLogs(_sipTransport); // Open a window to display the video feed from the remote SIP party. _form = new Form(); _form.AutoSize = true; _form.BackgroundImageLayout = ImageLayout.Center; _localVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, 0), Visible = true }; _remoteVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, VIDEO_FRAME_HEIGHT), Visible = true }; _form.Controls.Add(_localVideoPicBox); _form.Controls.Add(_remoteVideoPicBox); Application.EnableVisualStyles(); ThreadPool.QueueUserWorkItem(delegate { Application.Run(_form); }); ManualResetEvent formMre = new ManualResetEvent(false); _form.Activated += (object sender, EventArgs e) => formMre.Set(); Console.WriteLine("Waiting for form activation."); formMre.WaitOne(); _sipTransport.SIPTransportRequestReceived += OnSIPTransportRequestReceived; string executableDir = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location); var userAgent = new SIPUserAgent(_sipTransport, null, true); userAgent.OnCallHungup += (dialog) => exitMRE.Set(); var windowsAudioEndPoint = new WindowsAudioEndPoint(new AudioEncoder()); windowsAudioEndPoint.RestrictFormats(format => format.Codec == AudioCodecsEnum.PCMU); var windowsVideoEndPoint = new WindowsVideoEndPoint(new VpxVideoEncoder()); // Fallback to a test pattern source if accessing the Windows webcam fails. var testPattern = new VideoTestPatternSource(new VpxVideoEncoder()); MediaEndPoints mediaEndPoints = new MediaEndPoints { AudioSink = windowsAudioEndPoint, AudioSource = windowsAudioEndPoint, VideoSink = windowsVideoEndPoint, VideoSource = windowsVideoEndPoint, }; var voipMediaSession = new VoIPMediaSession(mediaEndPoints, testPattern); voipMediaSession.AcceptRtpFromAny = true; windowsVideoEndPoint.OnVideoSourceRawSample += (uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) => { _form?.BeginInvoke(new Action(() => { unsafe { fixed(byte *s = sample) { System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap(width, height, width * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _localVideoPicBox.Image = bmpImage; } } })); }; Console.WriteLine("Starting local video source..."); await windowsVideoEndPoint.StartVideo().ConfigureAwait(false); // Place the call and wait for the result. Task <bool> callTask = userAgent.Call(DESTINATION, null, null, voipMediaSession); callTask.Wait(CALL_TIMEOUT_SECONDS * 1000); if (callTask.Result) { Log.LogInformation("Call attempt successful."); windowsVideoEndPoint.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { _form?.BeginInvoke(new Action(() => { unsafe { fixed(byte *s = bmp) { System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _remoteVideoPicBox.Image = bmpImage; } } })); }; windowsAudioEndPoint.PauseAudio().Wait(); voipMediaSession.AudioExtrasSource.SetSource(AudioSourcesEnum.Music); } else { Log.LogWarning("Call attempt failed."); Console.WriteLine("Press ctrl-c to exit."); } Console.CancelKeyPress += delegate(object sender, ConsoleCancelEventArgs e) { e.Cancel = true; Log.LogInformation("Exiting..."); exitMRE.Set(); }; exitMRE.WaitOne(); if (userAgent.IsCallActive) { Log.LogInformation("Hanging up."); userAgent.Hangup(); Task.Delay(1000).Wait(); } // Clean up. _form.BeginInvoke(new Action(() => _form.Close())); _sipTransport.Shutdown(); }
static async Task Main(string[] args) { Console.WriteLine("SIPSorcery Video Phone Command Line Demo"); Console.WriteLine("Press ctrl-c to exit."); Log = AddConsoleLogger(); ManualResetEvent exitMRE = new ManualResetEvent(false); ManualResetEvent waitForCallMre = new ManualResetEvent(false); var parseResult = Parser.Default.ParseArguments <Options>(args); _options = (parseResult as Parsed <Options>)?.Value; if (parseResult.Tag != ParserResultType.NotParsed) { if (_options.ListCameras) { #region List webcams. var webcams = await WindowsVideoEndPoint.GetVideoCatpureDevices(); if (webcams == null || webcams.Count == 0) { Console.WriteLine("No webcams were found."); } else { var index = 0; foreach (var webcam in webcams) { Console.WriteLine($"{index}: \"{webcam.Name}\", use --cam={index}."); index++; } } #endregion } else if (_options.ListFormats != null) { #region List webcam formats. var webcams = await WindowsVideoEndPoint.GetVideoCatpureDevices(); if (webcams == null || webcams.Count == 0) { Console.WriteLine("No webcams were found."); } else if (_options.ListFormats >= webcams.Count) { Console.WriteLine($"No webcam available for index {_options.ListFormats}."); } else { string webcamName = webcams[_options.ListFormats.Value].Name; var formats = await WindowsVideoEndPoint.GetDeviceFrameFormats(webcamName); Console.WriteLine($"Video frame formats for {webcamName}."); foreach (var vidFmt in formats) { float vidFps = vidFmt.MediaFrameFormat.FrameRate.Numerator / vidFmt.MediaFrameFormat.FrameRate.Denominator; string pixFmt = vidFmt.MediaFrameFormat.Subtype == WindowsVideoEndPoint.MF_I420_PIXEL_FORMAT ? "I420" : vidFmt.MediaFrameFormat.Subtype; Console.WriteLine($"{vidFmt.Width}x{vidFmt.Height} {vidFps:0.##}fps {pixFmt}"); } } #endregion } else { string webcamName = null; if (_options.WebcamIndex != null) { var webcams = await WindowsVideoEndPoint.GetVideoCatpureDevices(); if (webcams == null || webcams.Count == 0) { Console.WriteLine("No webcams were found."); Application.Exit(); } else if (webcams.Count < _options.WebcamIndex) { Console.WriteLine($"No webcam available for index {_options.WebcamIndex}."); Application.Exit(); } else { webcamName = webcams[_options.WebcamIndex.Value].Name; Console.WriteLine($"Using webcam {webcamName}."); } } _sipTransport = new SIPTransport(); if (string.IsNullOrEmpty(_options.CallDestination)) { // We haven't been asked to place a call so we're listening. IPAddress listenAddress = (System.Net.Sockets.Socket.OSSupportsIPv6) ? IPAddress.IPv6Any : IPAddress.Any; var listenEndPoint = new IPEndPoint(listenAddress, SIP_PORT_DEFAULT); try { SIPUDPChannel udpChannel = new SIPUDPChannel(listenEndPoint, true); _sipTransport.AddSIPChannel(udpChannel); } catch (ApplicationException appExcp) { Console.WriteLine($"Failed to create UDP SIP channel on {listenEndPoint}, error {appExcp.Message}."); SIPUDPChannel udpChannel = new SIPUDPChannel(new IPEndPoint(listenAddress, 0), true); _sipTransport.AddSIPChannel(udpChannel); } var listeningEP = _sipTransport.GetSIPChannels().First().ListeningSIPEndPoint; Console.WriteLine($"Listening for incoming call on {listeningEP}."); } EnableTraceLogs(_sipTransport); // Open a window to display the video feed from the remote SIP party. _form = new Form(); _form.Text = string.IsNullOrEmpty(_options.CallDestination) ? "Listener" : "Caller"; _form.AutoSize = true; _form.BackgroundImageLayout = ImageLayout.Center; _localVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, 0), Visible = true }; _remoteVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, VIDEO_FRAME_HEIGHT), Visible = true }; _form.Controls.Add(_localVideoPicBox); _form.Controls.Add(_remoteVideoPicBox); var userAgent = new SIPUserAgent(_sipTransport, null, true); userAgent.OnCallHungup += (dialog) => exitMRE.Set(); WindowsAudioEndPoint windowsAudioEndPoint = null; if (!_options.NoAudio) { windowsAudioEndPoint = new WindowsAudioEndPoint(new AudioEncoder()); windowsAudioEndPoint.RestrictFormats(x => x.Codec == AudioCodecsEnum.G722); } MediaEndPoints mediaEndPoints = null; if (_options.TestPattern && _options.WebcamIndex == null) { var testPattern = new VideoTestPatternSource(new FFmpegVideoEncoder()); var decoderSink = new DecoderVideoSink(new FFmpegVideoEncoder()); //var decoderSink = new DecoderVideoSink(new VpxVideoEncoder()); testPattern.RestrictFormats(format => format.Codec == VIDEO_CODEC); decoderSink.RestrictFormats(format => format.Codec == VIDEO_CODEC); mediaEndPoints = new MediaEndPoints { AudioSink = windowsAudioEndPoint, AudioSource = windowsAudioEndPoint, VideoSink = decoderSink, VideoSource = testPattern, }; } else { WindowsVideoEndPoint windowsVideoEndPoint = webcamName switch { null => new WindowsVideoEndPoint(new FFmpegVideoEncoder()), _ => new WindowsVideoEndPoint(new FFmpegVideoEncoder(), webcamName), }; windowsVideoEndPoint.RestrictFormats(format => format.Codec == VIDEO_CODEC); mediaEndPoints = new MediaEndPoints { AudioSink = windowsAudioEndPoint, AudioSource = windowsAudioEndPoint, VideoSink = windowsVideoEndPoint, VideoSource = windowsVideoEndPoint, }; } mediaEndPoints.VideoSource.OnVideoSourceRawSample += (uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { int stride = width * 3; if (pixelFormat == VideoPixelFormatsEnum.I420) { sample = PixelConverter.I420toBGR(sample, width, height, out stride); } if (_localVideoPicBox.Width != width || _localVideoPicBox.Height != height) { Log.LogDebug($"Adjusting local video display from {_localVideoPicBox.Width}x{_localVideoPicBox.Height} to {width}x{height}."); _localVideoPicBox.Width = width; _localVideoPicBox.Height = height; } unsafe { fixed(byte *s = sample) { System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap(width, height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _localVideoPicBox.Image = bmpImage; } } } })); } }; Console.CancelKeyPress += delegate(object sender, ConsoleCancelEventArgs e) { e.Cancel = true; Log.LogInformation("Exiting..."); waitForCallMre.Set(); exitMRE.Set(); }; if (string.IsNullOrEmpty(_options.CallDestination)) { ActivateForm(); userAgent.OnIncomingCall += async(ua, req) => { var voipMediaSession = new VoIPMediaSession(mediaEndPoints); voipMediaSession.AcceptRtpFromAny = true; if (voipMediaSession.VideoLocalTrack != null) { voipMediaSession.VideoLocalTrack.MaximumBandwidth = MAXIMUM_VIDEO_BANDWIDTH; } var uas = userAgent.AcceptCall(req); await userAgent.Answer(uas, voipMediaSession); Console.WriteLine("Starting local video source..."); await mediaEndPoints.VideoSource.StartVideo().ConfigureAwait(false); waitForCallMre.Set(); }; Console.WriteLine("Waiting for incoming call..."); waitForCallMre.WaitOne(); } else { var voipMediaSession = new VoIPMediaSession(mediaEndPoints); voipMediaSession.AcceptRtpFromAny = true; if (voipMediaSession.VideoLocalTrack != null) { voipMediaSession.VideoLocalTrack.MaximumBandwidth = MAXIMUM_VIDEO_BANDWIDTH; } ActivateForm(); Console.WriteLine("Starting local video source..."); await mediaEndPoints.VideoSource.StartVideo().ConfigureAwait(false); // Place the call and wait for the result. Task <bool> callTask = userAgent.Call(_options.CallDestination, null, null, voipMediaSession); callTask.Wait(CALL_TIMEOUT_SECONDS * 1000); } if (userAgent.IsCallActive) { Log.LogInformation("Call attempt successful."); mediaEndPoints.VideoSink.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { unsafe { if (_remoteVideoPicBox.Width != (int)width || _remoteVideoPicBox.Height != (int)height) { Log.LogDebug($"Adjusting remote video display from {_remoteVideoPicBox.Width}x{_remoteVideoPicBox.Height} to {width}x{height}."); _remoteVideoPicBox.Width = (int)width; _remoteVideoPicBox.Height = (int)height; } fixed(byte *s = bmp) { System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _remoteVideoPicBox.Image = bmpImage; } } } })); } }; } else { Log.LogWarning("Call attempt failed."); Console.WriteLine("Press ctrl-c to exit."); } exitMRE.WaitOne(); if (userAgent.IsCallActive) { Log.LogInformation("Hanging up."); userAgent.Hangup(); } Task.Delay(1000).Wait(); // Clean up. if (_form.Handle != IntPtr.Zero) { _form.BeginInvoke(new Action(() => _form.Close())); } _sipTransport.Shutdown(); } } }
static async Task Main() { Console.WriteLine("SIPSorcery Asterisk + ICE Demo"); AddConsoleLogger(); CancellationTokenSource exitCts = new CancellationTokenSource(); var sipTransport = new SIPTransport(); EnableTraceLogs(sipTransport); var userAgent = new SIPUserAgent(sipTransport, null); userAgent.ClientCallFailed += (uac, error, sipResponse) => Console.WriteLine($"Call failed {error}."); userAgent.OnCallHungup += async(dialog) => { // Give time for the BYE response before exiting. await Task.Delay(1000); exitCts.Cancel(); }; var audioExtras = new AudioExtrasSource(); audioExtras.SetSource(AudioSourcesEnum.PinkNoise); var testPattern = new VideoTestPatternSource(new VpxVideoEncoder()); var pc = new RTCPeerConnection(null); pc.OnAudioFormatsNegotiated += (formats) => audioExtras.SetAudioSourceFormat(formats.First()); pc.OnVideoFormatsNegotiated += (formats) => testPattern.SetVideoSourceFormat(formats.First()); var audioTrack = new MediaStreamTrack(audioExtras.GetAudioSourceFormats(), MediaStreamStatusEnum.SendOnly); pc.addTrack(audioTrack); audioExtras.OnAudioSourceEncodedSample += pc.SendAudio; var videoTrack = new MediaStreamTrack(testPattern.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly); pc.addTrack(videoTrack); testPattern.OnVideoSourceEncodedSample += pc.SendVideo; // Diagnostics. pc.OnReceiveReport += (re, media, rr) => Console.WriteLine($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); pc.OnSendReport += (media, sr) => Console.WriteLine($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => Console.WriteLine($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => Console.WriteLine($"ICE connection state change to {state}."); // ICE connection state handler. pc.onconnectionstatechange += (state) => { Console.WriteLine($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.connected) { audioExtras.StartAudio(); testPattern.StartVideo(); } else if (state == RTCPeerConnectionState.failed) { if (userAgent.IsCallActive) { Console.WriteLine("ICE connection failed, hanging up active call."); userAgent.Hangup(); } } }; // Place the call and wait for the result. var callTask = userAgent.Call(DESTINATION, USERNAME, PASSWORD, pc); Console.CancelKeyPress += delegate(object sender, ConsoleCancelEventArgs e) { e.Cancel = true; if (userAgent != null) { if (userAgent.IsCalling || userAgent.IsRinging) { Console.WriteLine("Cancelling in progress call."); userAgent.Cancel(); } else if (userAgent.IsCallActive) { Console.WriteLine("Hanging up established call."); userAgent.Hangup(); } } ; exitCts.Cancel(); }; Console.WriteLine("press ctrl-c to exit..."); bool callResult = await callTask; if (callResult) { Console.WriteLine($"Call to {DESTINATION} succeeded."); exitCts.Token.WaitHandle.WaitOne(); } else { Console.WriteLine($"Call to {DESTINATION} failed."); } Console.WriteLine("Exiting..."); if (userAgent?.IsHangingUp == true) { Console.WriteLine("Waiting 1s for the call hangup or cancel to complete..."); await Task.Delay(1000); } // Clean up. sipTransport.Shutdown(); }
static async Task Main() { Console.WriteLine("WebRTC Echo Test Client"); logger = AddConsoleLogger(); CancellationTokenSource cts = new CancellationTokenSource(); #region Set up a simple Windows Form with two picture boxes. _form = new Form(); _form.AutoSize = true; _form.BackgroundImageLayout = ImageLayout.Center; _sourceVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, 0), Visible = true }; _echoVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, VIDEO_FRAME_HEIGHT), Visible = true }; _form.Controls.Add(_sourceVideoPicBox); _form.Controls.Add(_echoVideoPicBox); Application.EnableVisualStyles(); ThreadPool.QueueUserWorkItem(delegate { Application.Run(_form); }); _form.FormClosing += (sender, e) => _isFormActivated = false; _form.Activated += (sender, e) => _isFormActivated = true; //_form.FormClosed += (sender, e) => // TODO. #endregion // Video sink and source to generate and consume VP8 video streams. var testPattern = new VideoTestPatternSource(new VpxVideoEncoder()); var vp8VideoSink = new VideoEncoderEndPoint(); #region Connect the video frames generated from the sink and source to the Windows form. testPattern.OnVideoSourceRawSample += (uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { unsafe { fixed(byte *s = sample) { var bmpImage = new Bitmap(width, height, width * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _sourceVideoPicBox.Image = bmpImage; } } } })); } }; vp8VideoSink.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { unsafe { fixed(byte *s = bmp) { var bmpImage = new Bitmap((int)width, (int)height, stride, PixelFormat.Format24bppRgb, (IntPtr)s); _echoVideoPicBox.Image = bmpImage; } } } })); } }; #endregion await testPattern.StartVideo().ConfigureAwait(false); var pc = await CreatePeerConnection(testPattern, vp8VideoSink).ConfigureAwait(false); Console.WriteLine($"Sending offer to {SIGNALING_SERVER}."); var signaler = new HttpClient(); var offer = pc.createOffer(null); await pc.setLocalDescription(offer).ConfigureAwait(false); var content = new StringContent(offer.toJSON(), Encoding.UTF8, "application/json"); var response = await signaler.PostAsync($"{SIGNALING_SERVER}", content).ConfigureAwait(false); var answerStr = await response.Content.ReadAsStringAsync().ConfigureAwait(false); if (RTCSessionDescriptionInit.TryParse(answerStr, out var answerInit)) { var setAnswerResult = pc.setRemoteDescription(answerInit); if (setAnswerResult != SetDescriptionResultEnum.OK) { Console.WriteLine($"Set remote description failed {setAnswerResult}."); } } else { Console.WriteLine("Failed to parse SDP answer from signaling server."); } Console.WriteLine("Press any key to exit."); Console.ReadLine(); }
static async Task Main() { Console.WriteLine("SIPSorcery Getting Started Video Call Demo"); Console.WriteLine("Press ctrl-c to exit."); Log = AddConsoleLogger(); ManualResetEvent exitMRE = new ManualResetEvent(false); var sipTransport = new SIPTransport(); sipTransport.EnableTraceLogs(); var userAgent = new SIPUserAgent(sipTransport, null, true); #region Set up a simple Windows Form with two picture boxes. _form = new Form(); _form.AutoSize = true; _form.BackgroundImageLayout = ImageLayout.Center; _localVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, 0), Visible = true }; _remoteVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, VIDEO_FRAME_HEIGHT), Visible = true }; _form.Controls.Add(_localVideoPicBox); _form.Controls.Add(_remoteVideoPicBox); #endregion Application.EnableVisualStyles(); ThreadPool.QueueUserWorkItem(delegate { Application.Run(_form); }); _form.FormClosing += (sender, e) => _isFormActivated = false; _form.Activated += (sender, e) => _isFormActivated = true; _form.FormClosed += (sender, e) => userAgent.Hangup(); userAgent.OnCallHungup += (dialog) => { if (_isFormActivated) { _form.Close(); } }; // Video sink and source to generate and consume VP8 video streams. var testPattern = new VideoTestPatternSource(new VpxVideoEncoder()); var vp8VideoSink = new VideoEncoderEndPoint(); // Add the video sink and source to the media session. MediaEndPoints mediaEndPoints = new MediaEndPoints { VideoSink = vp8VideoSink, VideoSource = testPattern, }; var voipMediaSession = new VoIPMediaSession(mediaEndPoints); voipMediaSession.AcceptRtpFromAny = true; #region Connect the video frames generate from the sink and source to the Windows form. testPattern.OnVideoSourceRawSample += (uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { unsafe { fixed(byte *s = sample) { var bmpImage = new Bitmap(width, height, width * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _localVideoPicBox.Image = bmpImage; } } } })); } }; vp8VideoSink.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { unsafe { fixed(byte *s = bmp) { var bmpImage = new Bitmap((int)width, (int)height, stride, PixelFormat.Format24bppRgb, (IntPtr)s); _remoteVideoPicBox.Image = bmpImage; } } } })); } }; #endregion // Place the call. var callResult = await userAgent.Call(DESTINATION, null, null, voipMediaSession).ConfigureAwait(false); Console.WriteLine($"Call result {((callResult) ? "success" : "failure")}."); Console.WriteLine("Press any key to hangup and exit."); Console.ReadLine(); if (userAgent.IsCallActive) { _isFormActivated = false; userAgent.Hangup(); await Task.Delay(1000).ConfigureAwait(false); } sipTransport.Shutdown(); }
static async Task Main() { Console.WriteLine("Janus Echo Test Demo"); AddConsoleLogger(); CancellationTokenSource cts = new CancellationTokenSource(); bool isFormActivated = false; #region Set up a simple Windows Form with two picture boxes. var form = new Form(); form.AutoSize = true; form.BackgroundImageLayout = ImageLayout.Center; var localVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, 0), Visible = true }; var remoteVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, VIDEO_FRAME_HEIGHT), Visible = true }; form.Controls.Add(localVideoPicBox); form.Controls.Add(remoteVideoPicBox); Application.EnableVisualStyles(); ThreadPool.QueueUserWorkItem(delegate { Application.Run(form); }); form.FormClosing += (sender, e) => isFormActivated = false; form.Activated += (sender, e) => isFormActivated = true; #endregion Console.WriteLine("Creating peer connection."); RTCPeerConnection pc = new RTCPeerConnection(null); var videoSource = new VideoTestPatternSource(new VpxVideoEncoder()); var videoSink = new VideoEncoderEndPoint(); MediaStreamTrack videoTrack = new MediaStreamTrack(videoSink.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(videoTrack); pc.OnVideoFrameReceived += videoSink.GotVideoFrame; videoSource.OnVideoSourceEncodedSample += pc.SendVideo; pc.OnVideoFormatsNegotiated += (formats) => { videoSink.SetVideoSourceFormat(formats.First()); videoSource.SetVideoSourceFormat(formats.First()); }; pc.OnTimeout += (mediaType) => Console.WriteLine($"Peer connection timeout on media {mediaType}."); pc.oniceconnectionstatechange += (state) => Console.WriteLine($"ICE connection state changed to {state}."); pc.onconnectionstatechange += async(state) => { Console.WriteLine($"Peer connection connected changed to {state}."); if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed) { await videoSource.CloseVideo().ConfigureAwait(false); videoSource.Dispose(); } }; #region Wire up the video source and sink to the picutre boxes. videoSource.OnVideoSourceRawSample += (uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) => { if (isFormActivated) { form?.BeginInvoke(new Action(() => { if (form.Handle != IntPtr.Zero) { unsafe { fixed(byte *s = sample) { var bmpImage = new Bitmap(width, height, width * 3, PixelFormat.Format24bppRgb, (IntPtr)s); localVideoPicBox.Image = bmpImage; } } } })); } }; videoSink.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { if (isFormActivated) { form.BeginInvoke(new Action(() => { unsafe { fixed(byte *s = bmp) { Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s); remoteVideoPicBox.Image = bmpImage; } } })); } }; #endregion var offer = pc.CreateOffer(null); await pc.setLocalDescription(new RTCSessionDescriptionInit { type = RTCSdpType.offer, sdp = offer.ToString() }).ConfigureAwait(false); Console.WriteLine($"SDP Offer: {pc.localDescription.sdp}"); await videoSource.StartVideo().ConfigureAwait(false); if (_useJanusRest) { JanusRestClient janusClient = new JanusRestClient( JANUS_BASE_URI, SIPSorcery.LogFactory.CreateLogger <JanusRestClient>(), cts.Token); //var serverInfo = await janusClient.GetServerInfo().ConfigureAwait(false); //Console.WriteLine($"Name={serverInfo.name}."); //Console.WriteLine($"Version={serverInfo.version}."); janusClient.OnJanusEvent += async(resp) => { if (resp.jsep != null) { Console.WriteLine($"get event jsep={resp.jsep.type}."); Console.WriteLine($"SDP Answer: {resp.jsep.sdp}"); var result = pc.setRemoteDescription(new RTCSessionDescriptionInit { type = RTCSdpType.answer, sdp = resp.jsep.sdp }); Console.WriteLine($"SDP Answer: {pc.remoteDescription.sdp}"); if (result == SetDescriptionResultEnum.OK) { Console.WriteLine("Starting peer connection."); await pc.Start().ConfigureAwait(false); } else { Console.WriteLine($"Error setting remote SDP description {result}."); } } }; await janusClient.StartSession().ConfigureAwait(false); await janusClient.StartEcho(offer.ToString()).ConfigureAwait(false); } else { //RestClient signalingClient = new RestClient($"{WEBRTC_SIGNALING_JANUS_URL}?duration=15"); RestClient signalingClient = new RestClient($"{WEBRTC_SIGNALING_JANUS_URL}"); var echoTestReq = new RestRequest(string.Empty, Method.POST, DataFormat.Json); echoTestReq.AddJsonBody(pc.localDescription.sdp.ToString()); var echoTestResp = await signalingClient.ExecutePostAsync <string>(echoTestReq).ConfigureAwait(false); if (echoTestResp.IsSuccessful) { var sdpAnswer = echoTestResp.Data; Console.WriteLine($"SDP Answer: {sdpAnswer}"); var result = pc.setRemoteDescription(new RTCSessionDescriptionInit { type = RTCSdpType.answer, sdp = sdpAnswer }); Console.WriteLine($"SDP Answer: {pc.remoteDescription.sdp}"); if (result == SetDescriptionResultEnum.OK) { Console.WriteLine("Starting peer connection."); await pc.Start().ConfigureAwait(false); } else { Console.WriteLine($"Error setting remote SDP description {result}."); } } else { Console.WriteLine($"Janus echo test plugin request failed {echoTestResp.ErrorMessage}."); } } Console.WriteLine("Press any key to exit..."); Console.ReadLine(); isFormActivated = false; cts.Cancel(); //await janusClient.DestroySession().ConfigureAwait(false); }