public VoIPMediaSession( MediaEndPoints mediaEndPoint, IPAddress bindAddress = null, int bindPort = 0, VideoTestPatternSource testPatternSource = null) : base(false, false, false, bindAddress, bindPort) { if (mediaEndPoint == null) { throw new ArgumentNullException("mediaEndPoint", "The media end point parameter cannot be null."); } Media = mediaEndPoint; // The audio extras source is used for on-hold music. _audioExtrasSource = new AudioExtrasSource(); _audioExtrasSource.OnAudioSourceEncodedSample += SendAudio; // Wire up the audio and video sample event handlers. if (Media.AudioSource != null) { var audioTrack = new MediaStreamTrack(mediaEndPoint.AudioSource.GetAudioSourceFormats()); base.addTrack(audioTrack); Media.AudioSource.OnAudioSourceEncodedSample += base.SendAudio; } if (Media.VideoSource != null) { var videoTrack = new MediaStreamTrack(mediaEndPoint.VideoSource.GetVideoSourceFormats()); base.addTrack(videoTrack); Media.VideoSource.OnVideoSourceEncodedSample += base.SendVideo; Media.VideoSource.OnVideoSourceError += VideoSource_OnVideoSourceError; if (testPatternSource != null) { // The test pattern source is used as failover if the webcam initialisation fails. // It's also used as the video stream if the call is put on hold. _videoTestPatternSource = testPatternSource; _videoTestPatternSource.OnVideoSourceEncodedSample += base.SendVideo; //_videoTestPatternSource.OnVideoSourceRawSample += Media.VideoSource.ExternalVideoSourceRawSample; } } if (Media.VideoSink != null) { Media.VideoSink.OnVideoSinkDecodedSample += VideoSinkSampleReady; base.OnVideoFrameReceived += Media.VideoSink.GotVideoFrame; } if (Media.AudioSink != null) { base.OnRtpPacketReceived += RtpMediaPacketReceived; } base.OnAudioFormatsNegotiated += AudioFormatsNegotiated; base.OnVideoFormatsNegotiated += VideoFormatsNegotiated; }
private VoIPMediaSession CreateMockVoIPMediaEndPoint(Func <AudioFormat, bool> audioFormatFilter = null) { var audioSource = new AudioExtrasSource(); audioSource.RestrictFormats(audioFormatFilter); MediaEndPoints mockEndPoints = new MediaEndPoints { AudioSource = audioSource }; return(new VoIPMediaSession(mockEndPoints)); }
private VoIPMediaSession CreateMockVoIPMediaEndPoint(List <AudioCodecsEnum> supportedCodecs = null) { var audioSource = new AudioExtrasSource(); audioSource.RestrictCodecs(supportedCodecs); MediaEndPoints mockEndPoints = new MediaEndPoints { AudioSource = audioSource }; return(new VoIPMediaSession(mockEndPoints)); }
/// <summary> /// Creates the media session to use with the SIP call. /// </summary> /// <param name="audioSrcOpts">The audio source options to set when the call is first /// answered. These options can be adjusted afterwards to do things like put play /// on hold music etc.</param> /// <returns>A new media session object.</returns> private VoIPMediaSession CreateMediaSession() { var windowsAudioEndPoint = new WindowsAudioEndPoint(new AudioEncoder(), m_audioOutDeviceIndex); var windowsVideoEndPoint = new WindowsVideoEndPoint(); MediaEndPoints mediaEndPoints = new MediaEndPoints { AudioSink = windowsAudioEndPoint, AudioSource = windowsAudioEndPoint, VideoSink = windowsVideoEndPoint, VideoSource = windowsVideoEndPoint, }; var voipMediaSession = new VoIPMediaSession(mediaEndPoints); voipMediaSession.AcceptRtpFromAny = true; return(voipMediaSession); }
/// <summary> /// Creates the media session to use with the SIP call. /// </summary> /// <param name="audioSrcOpts">The audio source options to set when the call is first /// answered. These options can be adjusted afterwards to do things like put play /// on hold music etc.</param> /// <returns>A new media session object.</returns> private VoIPMediaSession CreateMediaSession() { var windowsAudioEndPoint = new WindowsAudioEndPoint(new AudioEncoder(), m_audioOutDeviceIndex); var windowsVideoEndPoint = new WindowsVideoEndPoint(); MediaEndPoints mediaEndPoints = new MediaEndPoints { AudioSink = windowsAudioEndPoint, AudioSource = windowsAudioEndPoint, VideoSink = windowsVideoEndPoint, VideoSource = windowsVideoEndPoint, }; // Fallback video source if a Windows webcam cannot be accessed. var testPatternSource = new VideoTestPatternSource(new VideoEncoder()); var voipMediaSession = new VoIPMediaSession(mediaEndPoints, testPatternSource); // voipMediaSession.OnRtpPacketReceived += OnRtpPacketReceived; return(voipMediaSession); }
static async Task Main() { Console.WriteLine("SIPSorcery Getting Started Video Call Demo"); Console.WriteLine("Press ctrl-c to exit."); Log = AddConsoleLogger(); ManualResetEvent exitMRE = new ManualResetEvent(false); _sipTransport = new SIPTransport(); EnableTraceLogs(_sipTransport); // Open a window to display the video feed from the remote SIP party. _form = new Form(); _form.AutoSize = true; _form.BackgroundImageLayout = ImageLayout.Center; _localVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, 0), Visible = true }; _remoteVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, VIDEO_FRAME_HEIGHT), Visible = true }; _form.Controls.Add(_localVideoPicBox); _form.Controls.Add(_remoteVideoPicBox); Application.EnableVisualStyles(); ThreadPool.QueueUserWorkItem(delegate { Application.Run(_form); }); ManualResetEvent formMre = new ManualResetEvent(false); _form.Activated += (object sender, EventArgs e) => formMre.Set(); Console.WriteLine("Waiting for form activation."); formMre.WaitOne(); _sipTransport.SIPTransportRequestReceived += OnSIPTransportRequestReceived; string executableDir = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location); var userAgent = new SIPUserAgent(_sipTransport, null, true); userAgent.OnCallHungup += (dialog) => exitMRE.Set(); var windowsAudioEndPoint = new WindowsAudioEndPoint(new AudioEncoder()); windowsAudioEndPoint.RestrictFormats(format => format.Codec == AudioCodecsEnum.PCMU); var windowsVideoEndPoint = new WindowsVideoEndPoint(new VpxVideoEncoder()); // Fallback to a test pattern source if accessing the Windows webcam fails. var testPattern = new VideoTestPatternSource(new VpxVideoEncoder()); MediaEndPoints mediaEndPoints = new MediaEndPoints { AudioSink = windowsAudioEndPoint, AudioSource = windowsAudioEndPoint, VideoSink = windowsVideoEndPoint, VideoSource = windowsVideoEndPoint, }; var voipMediaSession = new VoIPMediaSession(mediaEndPoints, testPattern); voipMediaSession.AcceptRtpFromAny = true; windowsVideoEndPoint.OnVideoSourceRawSample += (uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) => { _form?.BeginInvoke(new Action(() => { unsafe { fixed(byte *s = sample) { System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap(width, height, width * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _localVideoPicBox.Image = bmpImage; } } })); }; Console.WriteLine("Starting local video source..."); await windowsVideoEndPoint.StartVideo().ConfigureAwait(false); // Place the call and wait for the result. Task <bool> callTask = userAgent.Call(DESTINATION, null, null, voipMediaSession); callTask.Wait(CALL_TIMEOUT_SECONDS * 1000); if (callTask.Result) { Log.LogInformation("Call attempt successful."); windowsVideoEndPoint.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { _form?.BeginInvoke(new Action(() => { unsafe { fixed(byte *s = bmp) { System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _remoteVideoPicBox.Image = bmpImage; } } })); }; windowsAudioEndPoint.PauseAudio().Wait(); voipMediaSession.AudioExtrasSource.SetSource(AudioSourcesEnum.Music); } else { Log.LogWarning("Call attempt failed."); Console.WriteLine("Press ctrl-c to exit."); } Console.CancelKeyPress += delegate(object sender, ConsoleCancelEventArgs e) { e.Cancel = true; Log.LogInformation("Exiting..."); exitMRE.Set(); }; exitMRE.WaitOne(); if (userAgent.IsCallActive) { Log.LogInformation("Hanging up."); userAgent.Hangup(); Task.Delay(1000).Wait(); } // Clean up. _form.BeginInvoke(new Action(() => _form.Close())); _sipTransport.Shutdown(); }
static async Task Main(string[] args) { Console.WriteLine("SIPSorcery Video Phone Command Line Demo"); Console.WriteLine("Press ctrl-c to exit."); Log = AddConsoleLogger(); ManualResetEvent exitMRE = new ManualResetEvent(false); ManualResetEvent waitForCallMre = new ManualResetEvent(false); var parseResult = Parser.Default.ParseArguments <Options>(args); _options = (parseResult as Parsed <Options>)?.Value; if (parseResult.Tag != ParserResultType.NotParsed) { if (_options.ListCameras) { #region List webcams. var webcams = await WindowsVideoEndPoint.GetVideoCatpureDevices(); if (webcams == null || webcams.Count == 0) { Console.WriteLine("No webcams were found."); } else { var index = 0; foreach (var webcam in webcams) { Console.WriteLine($"{index}: \"{webcam.Name}\", use --cam={index}."); index++; } } #endregion } else if (_options.ListFormats != null) { #region List webcam formats. var webcams = await WindowsVideoEndPoint.GetVideoCatpureDevices(); if (webcams == null || webcams.Count == 0) { Console.WriteLine("No webcams were found."); } else if (_options.ListFormats >= webcams.Count) { Console.WriteLine($"No webcam available for index {_options.ListFormats}."); } else { string webcamName = webcams[_options.ListFormats.Value].Name; var formats = await WindowsVideoEndPoint.GetDeviceFrameFormats(webcamName); Console.WriteLine($"Video frame formats for {webcamName}."); foreach (var vidFmt in formats) { float vidFps = vidFmt.MediaFrameFormat.FrameRate.Numerator / vidFmt.MediaFrameFormat.FrameRate.Denominator; string pixFmt = vidFmt.MediaFrameFormat.Subtype == WindowsVideoEndPoint.MF_I420_PIXEL_FORMAT ? "I420" : vidFmt.MediaFrameFormat.Subtype; Console.WriteLine($"{vidFmt.Width}x{vidFmt.Height} {vidFps:0.##}fps {pixFmt}"); } } #endregion } else { string webcamName = null; if (_options.WebcamIndex != null) { var webcams = await WindowsVideoEndPoint.GetVideoCatpureDevices(); if (webcams == null || webcams.Count == 0) { Console.WriteLine("No webcams were found."); Application.Exit(); } else if (webcams.Count < _options.WebcamIndex) { Console.WriteLine($"No webcam available for index {_options.WebcamIndex}."); Application.Exit(); } else { webcamName = webcams[_options.WebcamIndex.Value].Name; Console.WriteLine($"Using webcam {webcamName}."); } } _sipTransport = new SIPTransport(); if (string.IsNullOrEmpty(_options.CallDestination)) { // We haven't been asked to place a call so we're listening. IPAddress listenAddress = (System.Net.Sockets.Socket.OSSupportsIPv6) ? IPAddress.IPv6Any : IPAddress.Any; var listenEndPoint = new IPEndPoint(listenAddress, SIP_PORT_DEFAULT); try { SIPUDPChannel udpChannel = new SIPUDPChannel(listenEndPoint, true); _sipTransport.AddSIPChannel(udpChannel); } catch (ApplicationException appExcp) { Console.WriteLine($"Failed to create UDP SIP channel on {listenEndPoint}, error {appExcp.Message}."); SIPUDPChannel udpChannel = new SIPUDPChannel(new IPEndPoint(listenAddress, 0), true); _sipTransport.AddSIPChannel(udpChannel); } var listeningEP = _sipTransport.GetSIPChannels().First().ListeningSIPEndPoint; Console.WriteLine($"Listening for incoming call on {listeningEP}."); } EnableTraceLogs(_sipTransport); // Open a window to display the video feed from the remote SIP party. _form = new Form(); _form.Text = string.IsNullOrEmpty(_options.CallDestination) ? "Listener" : "Caller"; _form.AutoSize = true; _form.BackgroundImageLayout = ImageLayout.Center; _localVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, 0), Visible = true }; _remoteVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, VIDEO_FRAME_HEIGHT), Visible = true }; _form.Controls.Add(_localVideoPicBox); _form.Controls.Add(_remoteVideoPicBox); var userAgent = new SIPUserAgent(_sipTransport, null, true); userAgent.OnCallHungup += (dialog) => exitMRE.Set(); WindowsAudioEndPoint windowsAudioEndPoint = null; if (!_options.NoAudio) { windowsAudioEndPoint = new WindowsAudioEndPoint(new AudioEncoder()); windowsAudioEndPoint.RestrictFormats(x => x.Codec == AudioCodecsEnum.G722); } MediaEndPoints mediaEndPoints = null; if (_options.TestPattern && _options.WebcamIndex == null) { var testPattern = new VideoTestPatternSource(new FFmpegVideoEncoder()); var decoderSink = new DecoderVideoSink(new FFmpegVideoEncoder()); //var decoderSink = new DecoderVideoSink(new VpxVideoEncoder()); testPattern.RestrictFormats(format => format.Codec == VIDEO_CODEC); decoderSink.RestrictFormats(format => format.Codec == VIDEO_CODEC); mediaEndPoints = new MediaEndPoints { AudioSink = windowsAudioEndPoint, AudioSource = windowsAudioEndPoint, VideoSink = decoderSink, VideoSource = testPattern, }; } else { WindowsVideoEndPoint windowsVideoEndPoint = webcamName switch { null => new WindowsVideoEndPoint(new FFmpegVideoEncoder()), _ => new WindowsVideoEndPoint(new FFmpegVideoEncoder(), webcamName), }; windowsVideoEndPoint.RestrictFormats(format => format.Codec == VIDEO_CODEC); mediaEndPoints = new MediaEndPoints { AudioSink = windowsAudioEndPoint, AudioSource = windowsAudioEndPoint, VideoSink = windowsVideoEndPoint, VideoSource = windowsVideoEndPoint, }; } mediaEndPoints.VideoSource.OnVideoSourceRawSample += (uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { int stride = width * 3; if (pixelFormat == VideoPixelFormatsEnum.I420) { sample = PixelConverter.I420toBGR(sample, width, height, out stride); } if (_localVideoPicBox.Width != width || _localVideoPicBox.Height != height) { Log.LogDebug($"Adjusting local video display from {_localVideoPicBox.Width}x{_localVideoPicBox.Height} to {width}x{height}."); _localVideoPicBox.Width = width; _localVideoPicBox.Height = height; } unsafe { fixed(byte *s = sample) { System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap(width, height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _localVideoPicBox.Image = bmpImage; } } } })); } }; Console.CancelKeyPress += delegate(object sender, ConsoleCancelEventArgs e) { e.Cancel = true; Log.LogInformation("Exiting..."); waitForCallMre.Set(); exitMRE.Set(); }; if (string.IsNullOrEmpty(_options.CallDestination)) { ActivateForm(); userAgent.OnIncomingCall += async(ua, req) => { var voipMediaSession = new VoIPMediaSession(mediaEndPoints); voipMediaSession.AcceptRtpFromAny = true; if (voipMediaSession.VideoLocalTrack != null) { voipMediaSession.VideoLocalTrack.MaximumBandwidth = MAXIMUM_VIDEO_BANDWIDTH; } var uas = userAgent.AcceptCall(req); await userAgent.Answer(uas, voipMediaSession); Console.WriteLine("Starting local video source..."); await mediaEndPoints.VideoSource.StartVideo().ConfigureAwait(false); waitForCallMre.Set(); }; Console.WriteLine("Waiting for incoming call..."); waitForCallMre.WaitOne(); } else { var voipMediaSession = new VoIPMediaSession(mediaEndPoints); voipMediaSession.AcceptRtpFromAny = true; if (voipMediaSession.VideoLocalTrack != null) { voipMediaSession.VideoLocalTrack.MaximumBandwidth = MAXIMUM_VIDEO_BANDWIDTH; } ActivateForm(); Console.WriteLine("Starting local video source..."); await mediaEndPoints.VideoSource.StartVideo().ConfigureAwait(false); // Place the call and wait for the result. Task <bool> callTask = userAgent.Call(_options.CallDestination, null, null, voipMediaSession); callTask.Wait(CALL_TIMEOUT_SECONDS * 1000); } if (userAgent.IsCallActive) { Log.LogInformation("Call attempt successful."); mediaEndPoints.VideoSink.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { unsafe { if (_remoteVideoPicBox.Width != (int)width || _remoteVideoPicBox.Height != (int)height) { Log.LogDebug($"Adjusting remote video display from {_remoteVideoPicBox.Width}x{_remoteVideoPicBox.Height} to {width}x{height}."); _remoteVideoPicBox.Width = (int)width; _remoteVideoPicBox.Height = (int)height; } fixed(byte *s = bmp) { System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _remoteVideoPicBox.Image = bmpImage; } } } })); } }; } else { Log.LogWarning("Call attempt failed."); Console.WriteLine("Press ctrl-c to exit."); } exitMRE.WaitOne(); if (userAgent.IsCallActive) { Log.LogInformation("Hanging up."); userAgent.Hangup(); } Task.Delay(1000).Wait(); // Clean up. if (_form.Handle != IntPtr.Zero) { _form.BeginInvoke(new Action(() => _form.Close())); } _sipTransport.Shutdown(); } } }
static async Task Main() { Console.WriteLine("SIPSorcery Getting Started Video Call Demo"); Console.WriteLine("Press ctrl-c to exit."); Log = AddConsoleLogger(); ManualResetEvent exitMRE = new ManualResetEvent(false); var sipTransport = new SIPTransport(); sipTransport.EnableTraceLogs(); var userAgent = new SIPUserAgent(sipTransport, null, true); #region Set up a simple Windows Form with two picture boxes. _form = new Form(); _form.AutoSize = true; _form.BackgroundImageLayout = ImageLayout.Center; _localVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, 0), Visible = true }; _remoteVideoPicBox = new PictureBox { Size = new Size(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT), Location = new Point(0, VIDEO_FRAME_HEIGHT), Visible = true }; _form.Controls.Add(_localVideoPicBox); _form.Controls.Add(_remoteVideoPicBox); #endregion Application.EnableVisualStyles(); ThreadPool.QueueUserWorkItem(delegate { Application.Run(_form); }); _form.FormClosing += (sender, e) => _isFormActivated = false; _form.Activated += (sender, e) => _isFormActivated = true; _form.FormClosed += (sender, e) => userAgent.Hangup(); userAgent.OnCallHungup += (dialog) => { if (_isFormActivated) { _form.Close(); } }; // Video sink and source to generate and consume VP8 video streams. var testPattern = new VideoTestPatternSource(new VpxVideoEncoder()); var vp8VideoSink = new VideoEncoderEndPoint(); // Add the video sink and source to the media session. MediaEndPoints mediaEndPoints = new MediaEndPoints { VideoSink = vp8VideoSink, VideoSource = testPattern, }; var voipMediaSession = new VoIPMediaSession(mediaEndPoints); voipMediaSession.AcceptRtpFromAny = true; #region Connect the video frames generate from the sink and source to the Windows form. testPattern.OnVideoSourceRawSample += (uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { unsafe { fixed(byte *s = sample) { var bmpImage = new Bitmap(width, height, width * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)s); _localVideoPicBox.Image = bmpImage; } } } })); } }; vp8VideoSink.OnVideoSinkDecodedSample += (byte[] bmp, uint width, uint height, int stride, VideoPixelFormatsEnum pixelFormat) => { if (_isFormActivated) { _form?.BeginInvoke(new Action(() => { if (_form.Handle != IntPtr.Zero) { unsafe { fixed(byte *s = bmp) { var bmpImage = new Bitmap((int)width, (int)height, stride, PixelFormat.Format24bppRgb, (IntPtr)s); _remoteVideoPicBox.Image = bmpImage; } } } })); } }; #endregion // Place the call. var callResult = await userAgent.Call(DESTINATION, null, null, voipMediaSession).ConfigureAwait(false); Console.WriteLine($"Call result {((callResult) ? "success" : "failure")}."); Console.WriteLine("Press any key to hangup and exit."); Console.ReadLine(); if (userAgent.IsCallActive) { _isFormActivated = false; userAgent.Hangup(); await Task.Delay(1000).ConfigureAwait(false); } sipTransport.Shutdown(); }
public VoIPMediaSession(MediaEndPoints mediaEndPoint, VideoTestPatternSource testPatternSource) : this(mediaEndPoint, null, 0, testPatternSource) { }