Example #1
0
        /// <summary>
        /// This class manages different media renderer's that can be included in a call, e.g. audio and video.
        /// </summary>
        /// <param name="dispatcher">Need a UI dispatcher so tasks can be executed on the UI thread. For example this object
        /// gets created when a button is clicked on and is therefore owned by the UI thread. When a call transfer completes the
        /// resources need to be closed without any UI interaction. In that case need to marshal back to the UI thread.</param>
        /// <param name="useVideo">Set to true if the current call is going to be using video.</param>
        public MediaManager(Dispatcher dispatcher, bool useVideo = false)
        {
            _dispatcher = dispatcher;
            _useVideo   = useVideo;

            if (_useVideo)
            {
                _vpxDecoder = new VpxEncoder();
                _vpxDecoder.InitDecoder();

                _imageConverter = new ImageConvert();
            }
        }
Example #2
0
        /// <summary>
        /// Creates a new RTP audio visual session with audio/video capturing and rendering capabilities.
        /// </summary>
        /// <param name="addrFamily">The address family to create the underlying socket on (IPv4 or IPv6).</param>
        /// <param name="audioOptions">Options for the send and receive audio streams on this session.</param>
        /// <param name="videoOptions">Options for the send and receive video streams on this session</param>
        public RtpAVSession(AddressFamily addrFamily, AudioOptions audioOptions, VideoOptions videoOptions)
            : base(addrFamily, false, false, false)
        {
            _audioOpts = audioOptions ?? DefaultAudioOptions;
            _videoOpts = videoOptions ?? DefaultVideoOptions;

            // Initialise the video decoding objects. Even if we are not sourcing video
            // we need to be ready to receive and render.
            _vpxDecoder = new VpxEncoder();
            int res = _vpxDecoder.InitDecoder();

            if (res != 0)
            {
                throw new ApplicationException("VPX decoder initialisation failed.");
            }
            _imgConverter = new ImageConvert();

            if (_audioOpts.AudioSource != AudioSourcesEnum.None)
            {
                var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU);

                // RTP event support.
                int            clockRate      = pcmu.GetClockRate();
                SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);
                rtpEventFormat.SetFormatAttribute($"{TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
                rtpEventFormat.SetFormatParameterAttribute("0-16");

                var audioCapabilities = new List <SDPMediaFormat> {
                    pcmu, rtpEventFormat
                };

                MediaStreamTrack audioTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.audio, false, audioCapabilities);
                addTrack(audioTrack);
            }

            if (_videoOpts.VideoSource != VideoSourcesEnum.None)
            {
                MediaStreamTrack videoTrack = new MediaStreamTrack(null, SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                    new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
                });
                addTrack(videoTrack);
            }

            // Where the magic (for processing received media) happens.
            base.OnRtpPacketReceived += RtpPacketReceived;
        }
Example #3
0
        static void Main(string[] args)
        {
            AddConsoleLogger();

            _vpxEncoder = new VpxEncoder();
            int res = _vpxEncoder.InitDecoder();

            if (res != 0)
            {
                throw new ApplicationException("VPX decoder initialisation failed.");
            }

            _imgConverter = new ImageConvert();

            // Start web socket.
            Console.WriteLine("Starting web socket server...");
            _webSocketServer = new WebSocketServer(IPAddress.Any, WEBSOCKET_PORT, true);
            _webSocketServer.SslConfiguration.ServerCertificate          = new System.Security.Cryptography.X509Certificates.X509Certificate2(LOCALHOST_CERTIFICATE_PATH);
            _webSocketServer.SslConfiguration.CheckCertificateRevocation = false;
            _webSocketServer.AddWebSocketService <SDPExchange>("/", (sdpExchanger) =>
            {
                sdpExchanger.WebSocketOpened   += WebSocketOpened;
                sdpExchanger.OnMessageReceived += WebSocketMessageReceived;
            });
            _webSocketServer.Start();

            Console.WriteLine($"Waiting for browser web socket connection to {_webSocketServer.Address}:{_webSocketServer.Port}...");

            // Open a Window to display the video feed from the WebRTC peer.
            _form          = new Form();
            _form.AutoSize = true;
            _form.BackgroundImageLayout = ImageLayout.Center;
            _picBox = new PictureBox
            {
                Size     = new Size(640, 480),
                Location = new Point(0, 0),
                Visible  = true
            };
            _form.Controls.Add(_picBox);

            Application.EnableVisualStyles();
            Application.Run(_form);
        }
Example #4
0
        /// <summary>
        /// Creates a new RTP audio visual session with audio/video capturing and rendering capabilities.
        /// </summary>
        /// <param name="addrFamily">The address family to create the underlying socket on (IPv4 or IPv6).</param>
        /// <param name="audioOptions">Options for the send and receive audio streams on this session.</param>
        /// <param name="videoOptions">Options for the send and receive video streams on this session</param>
        /// <param name="bindAddress">Optional. If specified this address will be used as the bind address for any RTP
        /// and control sockets created. Generally this address does not need to be set. The default behaviour
        /// is to bind to [::] or 0.0.0.0, depending on system support, which minimises network routing
        /// causing connection issues.</param>
        /// <param name="disableExternalAudioSource">If true then no attempt will be made to use an external audio
        /// source, e.g. microphone.</param>
        public RtpAVSession(AudioOptions audioOptions, VideoOptions videoOptions, IPAddress bindAddress = null, bool disableExternalAudioSource = false)
            : base(false, false, false, bindAddress)
        {
            _audioOpts = audioOptions ?? DefaultAudioOptions;
            _videoOpts = videoOptions ?? DefaultVideoOptions;
            _disableExternalAudioSource = disableExternalAudioSource;

            if (_audioOpts != null && _audioOpts.AudioCodecs != null &&
                _audioOpts.AudioCodecs.Any(x => !(x == SDPMediaFormatsEnum.PCMU || x == SDPMediaFormatsEnum.PCMA || x == SDPMediaFormatsEnum.G722)))
            {
                throw new ApplicationException("Only PCMA, PCMU and G722 are supported for audio codec options.");
            }

            // Initialise the video decoding objects. Even if we are not sourcing video
            // we need to be ready to receive and render.
            _vpxDecoder = new VpxEncoder();
            int res = _vpxDecoder.InitDecoder();

            if (res != 0)
            {
                throw new ApplicationException("VPX decoder initialisation failed.");
            }
            _imgConverter = new ImageConvert();

            if (_audioOpts.AudioSource != AudioSourcesEnum.None)
            {
                var pcmu = new SDPMediaFormat(SDPMediaFormatsEnum.PCMU);

                //// RTP event support.
                //int clockRate = pcmu.GetClockRate();
                //SDPMediaFormat rtpEventFormat = new SDPMediaFormat(DTMF_EVENT_PAYLOAD_ID);
                //rtpEventFormat.SetFormatAttribute($"{SDP.TELEPHONE_EVENT_ATTRIBUTE}/{clockRate}");
                //rtpEventFormat.SetFormatParameterAttribute("0-16");

                var audioCapabilities = new List <SDPMediaFormat>();
                if (_audioOpts.AudioCodecs == null || _audioOpts.AudioCodecs.Count == 0)
                {
                    audioCapabilities.Add(pcmu);
                }
                else
                {
                    foreach (var codec in _audioOpts.AudioCodecs)
                    {
                        audioCapabilities.Add(new SDPMediaFormat(codec));
                    }
                }
                //audioCapabilities.Add(rtpEventFormat);

                if (audioCapabilities.Any(x => x.FormatCodec == SDPMediaFormatsEnum.G722))
                {
                    _g722Encode      = new G722Codec();
                    _g722EncodeState = new G722CodecState(64000, G722Flags.None);
                    _g722Decode      = new G722Codec();
                    _g722DecodeState = new G722CodecState(64000, G722Flags.None);
                }

                MediaStreamTrack audioTrack = new MediaStreamTrack(SDPMediaTypesEnum.audio, false, audioCapabilities);
                addTrack(audioTrack);
            }

            if (_videoOpts.VideoSource != VideoSourcesEnum.None)
            {
                MediaStreamTrack videoTrack = new MediaStreamTrack(SDPMediaTypesEnum.video, false, new List <SDPMediaFormat> {
                    new SDPMediaFormat(SDPMediaFormatsEnum.VP8)
                });
                addTrack(videoTrack);
            }

            // Where the magic (for processing received media) happens.
            base.OnRtpPacketReceived += RtpPacketReceived;
        }