示例#1
0
        public IEnumerator VideoStreamAddTrackAndRemoveTrack()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var stream = new MediaStream();
            var track  = new VideoStreamTrack("video", rt);

            // wait for the end of the initialization for encoder on the render thread.
            yield return(0);

            Assert.That(track.Kind, Is.EqualTo(TrackKind.Video));
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0));
            Assert.That(stream.AddTrack(track), Is.True);
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(1));
            Assert.That(stream.GetVideoTracks(), Has.All.Not.Null);
            Assert.That(stream.RemoveTrack(track), Is.True);
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0));
            track.Dispose();
            // wait for disposing video track.
            yield return(0);

            stream.Dispose();
            Object.DestroyImmediate(rt);
        }
示例#2
0
        public IEnumerator MediaStreamAddTrack()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var stream = new MediaStream();
            var track  = new VideoStreamTrack("video", rt);

            yield return(new WaitForSeconds(0.1f));

            Assert.AreEqual(TrackKind.Video, track.Kind);
            Assert.AreEqual(0, stream.GetVideoTracks().Count());
            Assert.True(stream.AddTrack(track));
            Assert.AreEqual(1, stream.GetVideoTracks().Count());
            Assert.NotNull(stream.GetVideoTracks().First());
            Assert.True(stream.RemoveTrack(track));
            Assert.AreEqual(0, stream.GetVideoTracks().Count());
            track.Dispose();
            yield return(new WaitForSeconds(0.1f));

            stream.Dispose();
            Object.DestroyImmediate(rt);
        }
示例#3
0
 /// <summary>
 /// Enables the local video stream.
 /// </summary>
 public void EnableLocalVideoStream()
 {
     lock (MediaLock)
     {
         if (_mediaStream != null)
         {
             foreach (MediaVideoTrack videoTrack in _mediaStream.GetVideoTracks())
             {
                 videoTrack.Enabled = true;
             }
         }
         VideoEnabled = true;
     }
 }
示例#4
0
        public IEnumerator VideoStreamAddTrackAndRemoveTrack()
        {
            var width  = 256;
            var height = 256;
            var format = WebRTC.GetSupportedRenderTextureFormat(UnityEngine.SystemInfo.graphicsDeviceType);
            var rt     = new UnityEngine.RenderTexture(width, height, 0, format);

            rt.Create();
            var stream = new MediaStream();
            var track  = new VideoStreamTrack(rt);

            bool isCalledOnAddTrack    = false;
            bool isCalledOnRemoveTrack = false;

            stream.OnAddTrack = e =>
            {
                Assert.That(e.Track, Is.EqualTo(track));
                isCalledOnAddTrack = true;
            };
            stream.OnRemoveTrack = e =>
            {
                Assert.That(e.Track, Is.EqualTo(track));
                isCalledOnRemoveTrack = true;
            };

            // wait for the end of the initialization for encoder on the render thread.
            yield return(0);

            Assert.That(track.Kind, Is.EqualTo(TrackKind.Video));
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0));
            Assert.That(stream.AddTrack(track), Is.True);
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(1));
            Assert.That(stream.GetVideoTracks(), Has.All.Not.Null);
            Assert.That(stream.RemoveTrack(track), Is.True);
            Assert.That(stream.GetVideoTracks(), Has.Count.EqualTo(0));

            var op1 = new WaitUntilWithTimeout(() => isCalledOnAddTrack, 5000);

            yield return(op1);

            var op2 = new WaitUntilWithTimeout(() => isCalledOnRemoveTrack, 5000);

            yield return(op2);

            track.Dispose();

            stream.Dispose();
            Object.DestroyImmediate(rt);
        }
示例#5
0
        //private void _remoteRtcPeerConnection_OnIceConnectionChange(RTCPeerConnectionIceStateChangeEvent __param0)
        //{
        //    Debug.WriteLine($"Entered _remoteRtcPeerConnection_OnIceConnectionChange {__param0.State}");
        //}

        //private async void _remoteRtcPeerConnection_OnIceCandidate(RTCPeerConnectionIceEvent __param0)
        //{
        //    Debug.WriteLine("Entered _remoteRtcPeerConnection_OnIceCandidate");
        //    await _remoteRtcPeerConnection.AddIceCandidate(__param0.Candidate);
        //}

        //private void _remoteRtcPeerConnection_OnAddStream(MediaStreamEvent __param0)
        //{
        //    MediaStream remoteStream = __param0.Stream;
        //    var tracks = remoteStream.GetVideoTracks();
        //    _media.AddVideoTrackMediaElementPair(tracks.FirstOrDefault(), _remoteVideo, remoteStream.Id);
        //    Debug.WriteLine("Received a remote stream");
        //}

        private void _peerConnection_OnAddStream(MediaStreamEvent __param0)
        {
            MediaStream remoteStream = __param0.Stream;
            var         tracks       = remoteStream.GetVideoTracks();

            _media.AddVideoTrackMediaElementPair(tracks.FirstOrDefault(), _remoteVideo, remoteStream.Id);
            Debug.WriteLine("Received a remote stream");
        }
示例#6
0
 internal override async Task OnAddStream(MediaStream stream)
 {
     Context.RemoteStream = stream;
     var tracks = stream.GetVideoTracks();
     if (tracks.Count > 0)
     {
         var source = Context.Media.CreateMediaStreamSource(tracks[0], 30, "PEER");
         Context.RemoteVideoRenderer.SetupRenderer(Context.ForegroundProcessId, source);
     }
 }
        internal override async Task OnAddStreamAsync(MediaStream stream)
        {
            Context.RemoteStream = stream;
            var tracks = stream.GetVideoTracks();

            if (tracks.Count > 0)
            {
                var source = RtcManager.Instance.Media.CreateMediaSource(tracks[0], CallContext.PeerMediaStreamId);
                Context.RemoteVideoRenderer.SetupRenderer(Context.ForegroundProcessId, source, Context.RemoteVideoControlSize);
            }
        }
示例#8
0
        internal override async Task OnAddStream(MediaStream stream)
        {
            Context.RemoteStream = stream;
            var tracks = stream.GetVideoTracks();
            if (tracks.Count > 0)
            {
#if WIN10
                var source = Context.Media.CreateMediaSource(tracks[0], VoipContext.PeerMediaStreamId);
#else
                var source = Context.Media.CreateMediaStreamSource(tracks[0], 30, VoipContext.PeerMediaStreamId);
#endif
                Context.RemoteVideoRenderer.SetupRenderer(Context.ForegroundProcessId, source, Context.RemoteVideoControlSize);
            }
        }
示例#9
0
        internal override async Task OnAddStream(MediaStream stream)
        {
            Context.RemoteStream = stream;
            var tracks = stream.GetVideoTracks();

            if (tracks.Count > 0)
            {
#if WIN10
                var source = Context.Media.CreateMediaSource(tracks[0], VoipContext.PeerMediaStreamId);
#else
                var source = Context.Media.CreateMediaStreamSource(tracks[0], 30, VoipContext.PeerMediaStreamId);
#endif
                Context.RemoteVideoRenderer.SetupRenderer(Context.ForegroundProcessId, source, Context.RemoteVideoControlSize);
            }
        }
        public override async void _play(JObject parameters)
        {
            Messenger.Broadcast(SympleLog.LogTrace, "symple:webrtc: _play");

            // if there is an active stream, play it now
            if (this.activeStream != null)
            {
                Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: active stream is not null, shuld play it now (TODO)");
                //this.video.src = URL.createObjectURL(this.activeStream);
                //this.video.play();
                this.setState("playing");
            }
            else
            {
                // otherwise, wait until ICE to complete before setting the playing state

                // if we are the ICE initiator, then attempt to open the local video device and send the SDP offer to the peer
                if (this.initiator)
                {
                    Messenger.Broadcast(SympleLog.LogInfo, "symple:webrtc: initiating");

                    var videoCaptureDevices = GetMedia().GetVideoCaptureDevices();

                    Messenger.Broadcast(SympleLog.LogInfo, "videoCaptureDevices:");
                    foreach (var dev in videoCaptureDevices)
                    {
                        Messenger.Broadcast(SympleLog.LogInfo, "id = " + dev.Id + ", name = " + dev.Name + ", location = " + dev.Location);
                        var capabilities = await dev.GetVideoCaptureCapabilities();

                        foreach (var capability in capabilities)
                        {
                            Messenger.Broadcast(SympleLog.LogDebug, "\t" + capability.FullDescription);
                        }
                    }

                    int requestedWebRtcCameraIndex = parameters["requestedWebRtcCameraIndex"].ToObject <int>();
                    int usedWebRtcCameraIndex      = requestedWebRtcCameraIndex;
                    if (requestedWebRtcCameraIndex >= videoCaptureDevices.Count)
                    {
                        Messenger.Broadcast(SympleLog.LogInfo, "NOTE: requested WebRTC camera index of " + requestedWebRtcCameraIndex + " is out of range of the number of available video capture devices (" + videoCaptureDevices.Count + "). Resetting to 0.");
                        usedWebRtcCameraIndex = 0;
                    }
                    Messenger.Broadcast(SympleLog.LogInfo, "Selecting WebRTC camera with index " + usedWebRtcCameraIndex);

                    var selectedVideoDevice = videoCaptureDevices[usedWebRtcCameraIndex];

                    Messenger.Broadcast(SympleLog.LogDebug, "getting videoCaptureCapabilities");
                    var videoCaptureCapabilities = await selectedVideoDevice.GetVideoCaptureCapabilities();

                    Messenger.Broadcast(SympleLog.LogDebug, "got videoCaptureCapabilities");

                    GetMedia().SelectVideoDevice(selectedVideoDevice);


                    int requestedVideoWidth;
                    if (parameters["requestedVideoWidth"] != null)
                    {
                        requestedVideoWidth = parameters["requestedVideoWidth"].ToObject <int>();
                    }
                    else
                    {
                        Messenger.Broadcast(SympleLog.LogDebug, "requestedVideoWidth set to default");
                        requestedVideoWidth = 640;
                    }

                    int requestedVideoHeight;
                    if (parameters["requestedVideoHeight"] != null)
                    {
                        requestedVideoHeight = parameters["requestedVideoHeight"].ToObject <int>();
                    }
                    else
                    {
                        Messenger.Broadcast(SympleLog.LogDebug, "requestedVideoHeight set to default");
                        requestedVideoHeight = 480;
                    }

                    int numRequestedPixels = requestedVideoWidth * requestedVideoHeight;

                    // We need to specify a preferred video capture format; it has to be one of the supported capabilities of the device.
                    // We will choose the capability that is as close as possible to the requested resolution while also having the highest frame rate for that resolution;
                    var chosenCapability = videoCaptureCapabilities[0];
                    foreach (var capability in videoCaptureCapabilities)
                    {
                        int numPixelsInThisCapability   = (int)(capability.Width * capability.Height);
                        int numPixelsInChosenCapability = (int)(chosenCapability.Width * chosenCapability.Height);

                        long thisPixelDeltaFromRequested   = Math.Abs(numPixelsInThisCapability - numRequestedPixels);
                        long chosenPixelDeltaFromRequested = Math.Abs(numPixelsInChosenCapability - numRequestedPixels);

                        if (thisPixelDeltaFromRequested < chosenPixelDeltaFromRequested)
                        {
                            chosenCapability = capability;
                        }
                        else if (thisPixelDeltaFromRequested == chosenPixelDeltaFromRequested && capability.FrameRate > chosenCapability.FrameRate)
                        {
                            chosenCapability = capability;
                        }
                    }

                    Messenger.Broadcast(SympleLog.LogDebug, "chosenCapability:");
                    Messenger.Broadcast(SympleLog.LogDebug, "\tWidth: " + (int)chosenCapability.Width);
                    Messenger.Broadcast(SympleLog.LogDebug, "\tHeight: " + (int)chosenCapability.Height);
                    Messenger.Broadcast(SympleLog.LogDebug, "\tFrameRate: " + (int)chosenCapability.FrameRate);
                    WebRTC.SetPreferredVideoCaptureFormat((int)chosenCapability.Width, (int)chosenCapability.Height, (int)chosenCapability.FrameRate, chosenCapability.MrcEnabled);

                    //Org.WebRtc.Media.SetDisplayOrientation(Windows.Graphics.Display.DisplayOrientations.None);

                    Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: before getUserMedia");
                    if (_localStream == null)
                    {
                        _localStream = await GetMedia().GetUserMedia(this.userMediaConstraints);
                    }
                    Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: after getUserMedia");

                    // play the local video stream and create the SDP offer
                    this.pc.AddStream(_localStream);

                    Messenger.Broadcast(SympleLog.LogDebug, "localStream: " + _localStream);
                    var videoTracks = _localStream.GetVideoTracks();
                    Messenger.Broadcast(SympleLog.LogDebug, "videoTracks in localStream: ");
                    foreach (var track in videoTracks)
                    {
                        Messenger.Broadcast(SympleLog.LogDebug, track.Id + ", enabled = " + track.Enabled + ", kind = " + track.Kind);
                    }
                    var audioTracks = _localStream.GetAudioTracks();
                    Messenger.Broadcast(SympleLog.LogDebug, "audioTracks in localStream: ");
                    foreach (var track in audioTracks)
                    {
                        Messenger.Broadcast(SympleLog.LogDebug, track.Id + ", enabled = " + track.Enabled + ", kind = " + track.Kind);
                    }

                    if (videoTracks.Count > 0)
                    {
                        //var source = GetMedia().CreateMediaSource(videoTracks[0], Symple.LocalMediaStreamId); // was valid for org.webrtc 1.54, not existing anymore
                        //var source = GetMedia().CreateMediaStreamSource(Symple.LocalMediaStreamId);
                        var source = GetMedia().CreateMediaStreamSource(videoTracks[0], "I420", Symple.LocalMediaStreamId);

                        Messenger.Broadcast(SympleLog.CreatedMediaSource, source);

                        if (this.pc != null)
                        {
                            RTCSessionDescription desc = await this.pc.CreateOffer();

                            Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: offer: " + desc);
                            this._onLocalSDP(desc);

                            // store the active local stream
                            this.activeStream = _localStream;
                        }
                        else
                        {
                            Messenger.Broadcast(SympleLog.LogError, "peer connection was destroyed while trying to creat offer");
                        }
                    }
                    else
                    {
                        Messenger.Broadcast(SympleLog.LogError, "ERROR: No video track found locally");
                    }
                }
            }
        }
示例#11
0
        public async void testStartVideoLocal()
        {
            Messenger.Broadcast(SympleLog.LogDebug, "basicTestVideo()");

            if (!webrtcInitialized)
            {
                WebRTC.Initialize(null);    // needed before calling any webrtc functions http://stackoverflow.com/questions/43331677/webrtc-for-uwp-new-rtcpeerconnection-doesnt-complete-execution
                webrtcInitialized = true;
            }

            Messenger.Broadcast(SympleLog.LogDebug, "creating media");

            if (_media == null)
            {
                _media = Media.CreateMedia();
                _media.OnMediaDevicesChanged += (MediaDeviceType mediaType) =>
                {
                    Messenger.Broadcast(SympleLog.LogDebug, "OnMediaDevicesChanged(), mediaType = " + mediaType);
                };
            }

            Messenger.Broadcast(SympleLog.LogDebug, "created media");

            var videoCaptureDevices = _media.GetVideoCaptureDevices();

            Messenger.Broadcast(SympleLog.LogDebug, "num videoCaptureDevices: " + videoCaptureDevices.Count);

            var videoDevice = videoCaptureDevices[0];

            Messenger.Broadcast(SympleLog.LogDebug, "getting videoCaptureCapabilities");
            var videoCaptureCapabilities = await videoDevice.GetVideoCaptureCapabilities();

            Messenger.Broadcast(SympleLog.LogDebug, "got videoCaptureCapabilities");

            var chosenCapability = videoCaptureCapabilities[0];

            Messenger.Broadcast(SympleLog.LogDebug, "chosenCapability:");
            Messenger.Broadcast(SympleLog.LogDebug, "\tWidth: " + (int)chosenCapability.Width);
            Messenger.Broadcast(SympleLog.LogDebug, "\tHeight: " + (int)chosenCapability.Height);
            Messenger.Broadcast(SympleLog.LogDebug, "\tFrameRate: " + (int)chosenCapability.FrameRate);
            WebRTC.SetPreferredVideoCaptureFormat((int)chosenCapability.Width, (int)chosenCapability.Height, (int)chosenCapability.FrameRate);

            Messenger.Broadcast(SympleLog.LogDebug, "getting usermedia");
            if (_localStream == null)
            {
                _localStream = await _media.GetUserMedia(new RTCMediaStreamConstraints { videoEnabled = true, audioEnabled = true });
            }

            Messenger.Broadcast(SympleLog.LogDebug, "got usermedia");

            Messenger.Broadcast(SympleLog.LogDebug, "localStream id: " + _localStream.Id);
            Messenger.Broadcast(SympleLog.LogDebug, "localStream Active?: " + _localStream.Active);

            var videoTracks = _localStream.GetVideoTracks();

            Messenger.Broadcast(SympleLog.LogDebug, "num videoTracks: " + videoTracks.Count);

            _selectedVideoTrack = videoTracks[0];

            Messenger.Broadcast(SympleLog.LogDebug, "selected video track id: " + _selectedVideoTrack.Id);
            Messenger.Broadcast(SympleLog.LogDebug, "selected video track suspended?: " + _selectedVideoTrack.Suspended);
            Messenger.Broadcast(SympleLog.LogDebug, "selected video track enabled?: " + _selectedVideoTrack.Enabled);

            var source = _media.CreateMediaSource(_selectedVideoTrack, Symple.LocalMediaStreamId);

            Messenger.Broadcast(SympleLog.LogDebug, "created mediasource");

            Messenger.Broadcast(SympleLog.CreatedMediaSource, source);
        }
示例#12
0
        public override async void _play(JObject parameters)
        {
            Messenger.Broadcast(SympleLog.LogTrace, "symple:webrtc: _play");

            // if there is an active stream, play it now
            if (this.activeStream != null)
            {
                Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: active stream is not null, shuld play it now (TODO)");
                //this.video.src = URL.createObjectURL(this.activeStream);
                //this.video.play();
                this.setState("playing");
            }
            else
            {
                // otherwise, wait until ICE to complete before setting the playing state

                // if we are the ICE initiator, then attempt to open the local video device and send the SDP offer to the peer
                if (this.initiator)
                {
                    Messenger.Broadcast(SympleLog.LogInfo, "symple:webrtc: initiating");

                    var videoCaptureDevices = GetMedia().GetVideoCaptureDevices();

                    Messenger.Broadcast(SympleLog.LogInfo, "videoCaptureDevices:");
                    foreach (var dev in videoCaptureDevices)
                    {
                        Messenger.Broadcast(SympleLog.LogInfo, "id = " + dev.Id + ", name = " + dev.Name + ", location = " + dev.Location);
                        var capabilities = await dev.GetVideoCaptureCapabilities();

                        foreach (var capability in capabilities)
                        {
                            Messenger.Broadcast(SympleLog.LogInfo, "\t" + capability.FullDescription);
                        }
                    }

                    var videoDevice = videoCaptureDevices[0];

                    Messenger.Broadcast(SympleLog.LogDebug, "getting videoCaptureCapabilities");
                    var videoCaptureCapabilities = await videoDevice.GetVideoCaptureCapabilities();

                    Messenger.Broadcast(SympleLog.LogDebug, "got videoCaptureCapabilities");

                    GetMedia().SelectVideoDevice(videoCaptureDevices[0]);


                    // We need to specify a preferred video capture format; it has to be one of the supported capabilities of the device.
                    // We will choose the capability that has the lowest resolution and the highest frame rate for that resolution.
                    var chosenCapability = videoCaptureCapabilities[0];
                    foreach (var capability in videoCaptureCapabilities)
                    {
                        if (capability.Width == 640 && capability.Height == 480)
                        {
                            // we'd prefer to just do 640x480 if possible
                            chosenCapability = capability;
                            break;
                        }

                        if ((capability.Width < chosenCapability.Width && capability.Height < chosenCapability.Height) ||
                            (capability.Width == chosenCapability.Width && capability.Height == chosenCapability.Height && capability.FrameRate > chosenCapability.FrameRate))
                        {
                            chosenCapability = capability;
                        }
                    }

                    Messenger.Broadcast(SympleLog.LogDebug, "chosenCapability:");
                    Messenger.Broadcast(SympleLog.LogDebug, "\tWidth: " + (int)chosenCapability.Width);
                    Messenger.Broadcast(SympleLog.LogDebug, "\tHeight: " + (int)chosenCapability.Height);
                    Messenger.Broadcast(SympleLog.LogDebug, "\tFrameRate: " + (int)chosenCapability.FrameRate);
                    WebRTC.SetPreferredVideoCaptureFormat((int)chosenCapability.Width, (int)chosenCapability.Height, (int)chosenCapability.FrameRate);

                    //WebRTC.SetPreferredVideoCaptureFormat(640, 480, 30);

                    //Org.WebRtc.Media.SetDisplayOrientation(Windows.Graphics.Display.DisplayOrientations.None);

                    Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: before getUserMedia");
                    if (_localStream == null)
                    {
                        _localStream = await GetMedia().GetUserMedia(new RTCMediaStreamConstraints {
                            videoEnabled = true, audioEnabled = true
                        });
                    }
                    Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: after getUserMedia");

                    // play the local video stream and create the SDP offer
                    this.pc.AddStream(_localStream);

                    Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: should play the local stream and create the SDP offer (TODO)");

                    Messenger.Broadcast(SympleLog.LogInfo, "localStream: " + _localStream);
                    var videoTracks = _localStream.GetVideoTracks();
                    Messenger.Broadcast(SympleLog.LogInfo, "videoTracks in localStream: ");
                    foreach (var track in videoTracks)
                    {
                        Messenger.Broadcast(SympleLog.LogInfo, track.Id + ", enabled = " + track.Enabled + ", kind = " + track.Kind + ", suspended = " + track.Suspended);
                    }
                    var audioTracks = _localStream.GetAudioTracks();
                    Messenger.Broadcast(SympleLog.LogInfo, "audioTracks in localStream: ");
                    foreach (var track in audioTracks)
                    {
                        Messenger.Broadcast(SympleLog.LogInfo, track.Id + ", enabled = " + track.Enabled + ", kind = " + track.Kind);
                    }

                    if (videoTracks.Count > 0)
                    {
                        var source = GetMedia().CreateMediaSource(videoTracks[0], Symple.LocalMediaStreamId);

                        Messenger.Broadcast(SympleLog.CreatedMediaSource, source);

                        if (this.pc != null)
                        {
                            RTCSessionDescription desc = await this.pc.CreateOffer();

                            Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: offer: " + desc);
                            this._onLocalSDP(desc);

                            // store the active local stream
                            this.activeStream = _localStream;
                        }
                        else
                        {
                            Messenger.Broadcast(SympleLog.LogError, "peer connection was destroyed while trying to creat offer");
                        }
                    }
                    else
                    {
                        Messenger.Broadcast(SympleLog.LogError, "ERROR: No video track found locally");
                    }
                }
            }
        }
示例#13
0
        private async Task Initialize()
        {
            //Initialization of WebRTC worker threads, etc
            Org.WebRtc.WebRTC.Initialize(Dispatcher);

            _media = Media.CreateMedia();

            //Selecting video device to use, setting preferred capabilities
            var videoDevices        = _media.GetVideoCaptureDevices();
            var selectedVideoDevice = videoDevices.First();
            var videoCapabilites    = await selectedVideoDevice.GetVideoCaptureCapabilities();

            var selectedVideoCapability = videoCapabilites.FirstOrDefault();

            //Needed for HoloLens camera, will not set compatible video capability automatically
            //Hololens Cam default capability: 1280x720x30
            Org.WebRtc.WebRTC.SetPreferredVideoCaptureFormat(
                (int)selectedVideoCapability.Width,
                (int)selectedVideoCapability.Height,
                (int)selectedVideoCapability.FrameRate);

            //Setting up local stream
            RTCMediaStreamConstraints mediaStreamConstraints = new RTCMediaStreamConstraints
            {
                audioEnabled = false,
                videoEnabled = true
            };

            _localStream = await _media.GetUserMedia(mediaStreamConstraints);

            _media.SelectVideoDevice(selectedVideoDevice);

            // Get Video Tracks
            var videotrac = _localStream.GetVideoTracks();

            foreach (var videoTrack in videotrac) //This foreach may not be necessary
            {
                videoTrack.Enabled = true;
            }
            var selectedVideoTrac = videotrac.FirstOrDefault();

            Debug.WriteLine("Creating RTCPeerConnection");
            var config = new RTCConfiguration()
            {
                BundlePolicy       = RTCBundlePolicy.Balanced,
                IceTransportPolicy = RTCIceTransportPolicy.All,
                IceServers         = GetDefaultList()
            };

            _peerConnection = new RTCPeerConnection(config);
            _peerConnection.OnIceCandidate        += _localRtcPeerConnection_OnIceCandidate;
            _peerConnection.OnIceConnectionChange += _localRtcPeerConnection_OnIceConnectionChange;
            _peerConnection.OnAddStream           += _peerConnection_OnAddStream;

            //_peerConnection.AddStream(_localStream);
            _media.AddVideoTrackMediaElementPair(selectedVideoTrac, _localVideo, _localStream.Id);

            // Send event started
            Element.SendStarted();

            //Debug.WriteLine("Creating 'remote' RTCPeerConnection");
            //_remoteRtcPeerConnection = new RTCPeerConnection(config);
            //_remoteRtcPeerConnection.OnIceCandidate += _remoteRtcPeerConnection_OnIceCandidate;
            //_remoteRtcPeerConnection.OnIceConnectionChange += _remoteRtcPeerConnection_OnIceConnectionChange;
            //// Wait for Stream
            //_remoteRtcPeerConnection.OnAddStream += _remoteRtcPeerConnection_OnAddStream;
        }