private static Task <RTCPeerConnection> CreatePeerConnection() { RTCConfiguration config = new RTCConfiguration { iceServers = new List <RTCIceServer> { new RTCIceServer { urls = STUN_URL } } }; var pc = new RTCPeerConnection(config); var testPatternSource = new VideoTestPatternSource(); var videoEncoderEndPoint = new VideoEncoderEndPoint(); var audioSource = new AudioExtrasSource(new AudioEncoder(), new AudioSourceOptions { AudioSource = AudioSourcesEnum.Music }); MediaStreamTrack videoTrack = new MediaStreamTrack(videoEncoderEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(videoTrack); MediaStreamTrack audioTrack = new MediaStreamTrack(audioSource.GetAudioSourceFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(audioTrack); testPatternSource.OnVideoSourceRawSample += videoEncoderEndPoint.ExternalVideoSourceRawSample; videoEncoderEndPoint.OnVideoSourceEncodedSample += pc.SendVideo; audioSource.OnAudioSourceEncodedSample += pc.SendAudio; pc.OnVideoFormatsNegotiated += (formats) => videoEncoderEndPoint.SetVideoSourceFormat(formats.First()); pc.onconnectionstatechange += async(state) => { logger.LogDebug($"Peer connection state change to {state}."); if (state == RTCPeerConnectionState.connected) { await audioSource.StartAudio(); await testPatternSource.StartVideo(); } else if (state == RTCPeerConnectionState.failed) { pc.Close("ice disconnection"); } else if (state == RTCPeerConnectionState.closed) { await testPatternSource.CloseVideo(); await audioSource.CloseAudio(); } }; // Diagnostics. pc.OnReceiveReport += (re, media, rr) => logger.LogDebug($"RTCP Receive for {media} from {re}\n{rr.GetDebugSummary()}"); pc.OnSendReport += (media, sr) => logger.LogDebug($"RTCP Send for {media}\n{sr.GetDebugSummary()}"); pc.GetRtpChannel().OnStunMessageReceived += (msg, ep, isRelay) => logger.LogDebug($"STUN {msg.Header.MessageType} received from {ep}."); pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}."); return(Task.FromResult(pc)); }
static async Task Main() { Console.WriteLine("WebRTC Echo Test Client"); logger = AddConsoleLogger(); CancellationTokenSource cts = new CancellationTokenSource(); #region Set up a simple Windows Form with two picture boxes. _form = new Form(); _form.AutoSize = true; _form.BackgroundImageLayout = ImageLayout.Center; _sourceVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, 0), Visible = true }; _echoVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, VIDEO_FRAME_HEIGHT), Visible = true }; _form.Controls.Add(_sourceVideoPicBox); _form.Controls.Add(_echoVideoPicBox); Application.EnableVisualStyles(); ThreadPool.QueueUserWorkItem(delegate { Application.Run(_form); }); _form.FormClosing += (sender, e) => _isFormActivated = false; _form.Activated += (sender, e) => _isFormActivated = true; //_form.FormClosed += (sender, e) => // TODO. #endregion // Video sink and source to generate and consume VP8 video streams. var testPattern = new VideoTestPatternSource(new VpxVideoEncoder()); var vp8VideoSink = new VideoEncoderEndPoint(); #region Connect the video frames generated from the sink and source to the Windows form. testPattern.OnVideoSourceRawSample += (uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { unsafe { fixed(byte *s = sample) { var bmpImage = new Bitmap(width, height, width * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _sourceVideoPicBox.Image = bmpImage; } } } })); } }; vp8VideoSink.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { unsafe { fixed(byte *s = bmp) { var bmpImage = new Bitmap((int)width, (int)height, stride, PixelFormat.Format24bppRgb, (IntPtr)s); _echoVideoPicBox.Image = bmpImage; } } } })); } }; #endregion await testPattern.StartVideo().ConfigureAwait(false); var pc = await CreatePeerConnection(testPattern, vp8VideoSink).ConfigureAwait(false); Console.WriteLine($"Sending offer to {SIGNALING_SERVER}."); var signaler = new HttpClient(); var offer = pc.createOffer(null); await pc.setLocalDescription(offer).ConfigureAwait(false); var content = new StringContent(offer.toJSON(), Encoding.UTF8, "application/json"); var response = await signaler.PostAsync($"{SIGNALING_SERVER}", content).ConfigureAwait(false); var answerStr = await response.Content.ReadAsStringAsync().ConfigureAwait(false); if (RTCSessionDescriptionInit.TryParse(answerStr, out var answerInit)) { var setAnswerResult = pc.setRemoteDescription(answerInit); if (setAnswerResult != SetDescriptionResultEnum.OK) { Console.WriteLine($"Set remote description failed {setAnswerResult}."); } } else { Console.WriteLine("Failed to parse SDP answer from signaling server."); } Console.WriteLine("Press any key to exit."); Console.ReadLine(); }
static async Task Main() { Console.WriteLine("SIPSorcery Getting Started Video Call Demo"); Console.WriteLine("Press ctrl-c to exit."); Log = AddConsoleLogger(); ManualResetEvent exitMRE = new ManualResetEvent(false); var sipTransport = new SIPTransport(); sipTransport.EnableTraceLogs(); var userAgent = new SIPUserAgent(sipTransport, null, true); #region Set up a simple Windows Form with two picture boxes. _form = new Form(); _form.AutoSize = true; _form.BackgroundImageLayout = ImageLayout.Center; _localVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, 0), Visible = true }; _remoteVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, VIDEO_FRAME_HEIGHT), Visible = true }; _form.Controls.Add(_localVideoPicBox); _form.Controls.Add(_remoteVideoPicBox); #endregion Application.EnableVisualStyles(); ThreadPool.QueueUserWorkItem(delegate { Application.Run(_form); }); _form.FormClosing += (sender, e) => _isFormActivated = false; _form.Activated += (sender, e) => _isFormActivated = true; _form.FormClosed += (sender, e) => userAgent.Hangup(); userAgent.OnCallHungup += (dialog) => { if (_isFormActivated) { _form.Close(); } }; // Video sink and source to generate and consume VP8 video streams. var testPattern = new VideoTestPatternSource(new VpxVideoEncoder()); var vp8VideoSink = new VideoEncoderEndPoint(); // Add the video sink and source to the media session. MediaEndPoints mediaEndPoints = new MediaEndPoints { VideoSink = vp8VideoSink, VideoSource = testPattern, }; var voipMediaSession = new VoIPMediaSession(mediaEndPoints); voipMediaSession.AcceptRtpFromAny = true; #region Connect the video frames generate from the sink and source to the Windows form. testPattern.OnVideoSourceRawSample += (uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { unsafe { fixed(byte *s = sample) { var bmpImage = new Bitmap(width, height, width * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _localVideoPicBox.Image = bmpImage; } } } })); } }; vp8VideoSink.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { unsafe { fixed(byte *s = bmp) { var bmpImage = new Bitmap((int)width, (int)height, stride, PixelFormat.Format24bppRgb, (IntPtr)s); _remoteVideoPicBox.Image = bmpImage; } } } })); } }; #endregion // Place the call. var callResult = await userAgent.Call(DESTINATION, null, null, voipMediaSession).ConfigureAwait(false); Console.WriteLine($"Call result {((callResult) ? "success" : "failure")}."); Console.WriteLine("Press any key to hangup and exit."); Console.ReadLine(); if (userAgent.IsCallActive) { _isFormActivated = false; userAgent.Hangup(); await Task.Delay(1000).ConfigureAwait(false); } sipTransport.Shutdown(); }
static async Task Main() { Console.WriteLine("Janus Echo Test Demo"); AddConsoleLogger(); CancellationTokenSource cts = new CancellationTokenSource(); bool isFormActivated = false; #region Set up a simple Windows Form with two picture boxes. var form = new Form(); form.AutoSize = true; form.BackgroundImageLayout = ImageLayout.Center; var localVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, 0), Visible = true }; var remoteVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, VIDEO_FRAME_HEIGHT), Visible = true }; form.Controls.Add(localVideoPicBox); form.Controls.Add(remoteVideoPicBox); Application.EnableVisualStyles(); ThreadPool.QueueUserWorkItem(delegate { Application.Run(form); }); form.FormClosing += (sender, e) => isFormActivated = false; form.Activated += (sender, e) => isFormActivated = true; #endregion Console.WriteLine("Creating peer connection."); RTCPeerConnection pc = new RTCPeerConnection(null); var videoSource = new VideoTestPatternSource(new VpxVideoEncoder()); var videoSink = new VideoEncoderEndPoint(); MediaStreamTrack videoTrack = new MediaStreamTrack(videoSink.GetVideoSourceFormats(), MediaStreamStatusEnum.SendRecv); pc.addTrack(videoTrack); pc.OnVideoFrameReceived += videoSink.GotVideoFrame; videoSource.OnVideoSourceEncodedSample += pc.SendVideo; pc.OnVideoFormatsNegotiated += (formats) => { videoSink.SetVideoSourceFormat(formats.First()); videoSource.SetVideoSourceFormat(formats.First()); }; pc.OnTimeout += (mediaType) => Console.WriteLine($"Peer connection timeout on media {mediaType}."); pc.oniceconnectionstatechange += (state) => Console.WriteLine($"ICE connection state changed to {state}."); pc.onconnectionstatechange += async(state) => { Console.WriteLine($"Peer connection connected changed to {state}."); if (state == RTCPeerConnectionState.closed || state == RTCPeerConnectionState.failed) { await videoSource.CloseVideo().ConfigureAwait(false); videoSource.Dispose(); } }; #region Wire up the video source and sink to the picutre boxes. videoSource.OnVideoSourceRawSample += (uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) => { if (isFormActivated) { form?.BeginInvoke(new Action(() => { if (form.Handle != IntPtr.Zero) { unsafe { fixed(byte *s = sample) { var bmpImage = new Bitmap(width, height, width * 3, PixelFormat.Format24bppRgb, (IntPtr)s); localVideoPicBox.Image = bmpImage; } } } })); } }; videoSink.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { if (isFormActivated) { form.BeginInvoke(new Action(() => { unsafe { fixed(byte *s = bmp) { Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(bmp.Length / height), PixelFormat.Format24bppRgb, (IntPtr)s); remoteVideoPicBox.Image = bmpImage; } } })); } }; #endregion var offer = pc.CreateOffer(null); await pc.setLocalDescription(new RTCSessionDescriptionInit { type = RTCSdpType.offer, sdp = offer.ToString() }).ConfigureAwait(false); Console.WriteLine($"SDP Offer: {pc.localDescription.sdp}"); await videoSource.StartVideo().ConfigureAwait(false); if (_useJanusRest) { JanusRestClient janusClient = new JanusRestClient( JANUS_BASE_URI, SIPSorcery.LogFactory.CreateLogger <JanusRestClient>(), cts.Token); //var serverInfo = await janusClient.GetServerInfo().ConfigureAwait(false); //Console.WriteLine($"Name={serverInfo.name}."); //Console.WriteLine($"Version={serverInfo.version}."); janusClient.OnJanusEvent += async(resp) => { if (resp.jsep != null) { Console.WriteLine($"get event jsep={resp.jsep.type}."); Console.WriteLine($"SDP Answer: {resp.jsep.sdp}"); var result = pc.setRemoteDescription(new RTCSessionDescriptionInit { type = RTCSdpType.answer, sdp = resp.jsep.sdp }); Console.WriteLine($"SDP Answer: {pc.remoteDescription.sdp}"); if (result == SetDescriptionResultEnum.OK) { Console.WriteLine("Starting peer connection."); await pc.Start().ConfigureAwait(false); } else { Console.WriteLine($"Error setting remote SDP description {result}."); } } }; await janusClient.StartSession().ConfigureAwait(false); await janusClient.StartEcho(offer.ToString()).ConfigureAwait(false); } else { //RestClient signalingClient = new RestClient($"{WEBRTC_SIGNALING_JANUS_URL}?duration=15"); RestClient signalingClient = new RestClient($"{WEBRTC_SIGNALING_JANUS_URL}"); var echoTestReq = new RestRequest(string.Empty, Method.POST, DataFormat.Json); echoTestReq.AddJsonBody(pc.localDescription.sdp.ToString()); var echoTestResp = await signalingClient.ExecutePostAsync <string>(echoTestReq).ConfigureAwait(false); if (echoTestResp.IsSuccessful) { var sdpAnswer = echoTestResp.Data; Console.WriteLine($"SDP Answer: {sdpAnswer}"); var result = pc.setRemoteDescription(new RTCSessionDescriptionInit { type = RTCSdpType.answer, sdp = sdpAnswer }); Console.WriteLine($"SDP Answer: {pc.remoteDescription.sdp}"); if (result == SetDescriptionResultEnum.OK) { Console.WriteLine("Starting peer connection."); await pc.Start().ConfigureAwait(false); } else { Console.WriteLine($"Error setting remote SDP description {result}."); } } else { Console.WriteLine($"Janus echo test plugin request failed {echoTestResp.ErrorMessage}."); } } Console.WriteLine("Press any key to exit..."); Console.ReadLine(); isFormActivated = false; cts.Cancel(); //await janusClient.DestroySession().ConfigureAwait(false); }