public IAsyncAction SetVideoDeviceAsync(DtoMediaDevice device)
        {
            _videoDevice = device;
            var settings = ApplicationData.Current.LocalSettings;

            if (device != null)
            {
                settings.Values[MediaSettingsIds.VideoDeviceSettings] = device.Id;
                ETWEventLogger.Instance.LogEvent("Video Device Selected",
                                                 "name = " + device.Name,
                                                 DateTimeOffset.Now.ToUnixTimeMilliseconds().ToString());

                var  selectedVideoDevice       = device.FromDto();
                int  preferredCaptureWidth     = (int)settings.Values[MediaSettingsIds.PreferredVideoCaptureWidth];
                int  preferredCaptureHeight    = (int)settings.Values[MediaSettingsIds.PreferredVideoCaptureHeight];
                int  preferredCaptureFrameRate = (int)settings.Values[MediaSettingsIds.PreferredVideoCaptureFrameRate];
                bool isCapabilityValid         = false;
                if (selectedVideoDevice != null)
                {
                    var getTask = selectedVideoDevice.GetVideoCaptureCapabilities().AsTask();
                    getTask.Wait();
                    var capabilities = getTask.Result;
                    foreach (var capability in capabilities)
                    {
                        if (capability.FrameRate == preferredCaptureFrameRate &&
                            capability.Height == preferredCaptureHeight &&
                            capability.Width == preferredCaptureWidth)
                        {
                            isCapabilityValid = true;
                        }
                    }
                }
                if (!isCapabilityValid)
                {
                    preferredCaptureWidth = 640;
                    settings.Values[MediaSettingsIds.PreferredVideoCaptureWidth] = preferredCaptureWidth;
                    preferredCaptureHeight = 480;
                    settings.Values[MediaSettingsIds.PreferredVideoCaptureHeight] = preferredCaptureHeight;
                    preferredCaptureFrameRate = 30;
                    settings.Values[MediaSettingsIds.PreferredVideoCaptureFrameRate] = preferredCaptureFrameRate;
                }
                WebRTC.SetPreferredVideoCaptureFormat(preferredCaptureWidth, preferredCaptureHeight,
                                                      preferredCaptureFrameRate);
            }
            else
            {
                settings.Values.Remove(MediaSettingsIds.VideoDeviceSettings);
            }
            OnVideoDeviceSelectionChanged?.Invoke();
            return(Task.CompletedTask.AsAsyncAction());
        }
示例#2
0
        private void setWebRtcDeviceAndProfile()
        {
            this.media.SelectVideoDevice(
                new Org.WebRtc.MediaDevice(
                    this.selectedDevice.Id,
                    this.selectedDevice.Name));

            var mrcEnabled = false;

            WebRTC.SetPreferredVideoCaptureFormat(
                (int)this.selectedProfile.Width,
                (int)this.selectedProfile.Height,
                (int)this.selectedProfile.FrameRate,
                mrcEnabled
                );
        }
示例#3
0
    async void Start()
    {
        CheapContainer.Register <ISignallingService, Signaller>();
        CheapContainer.Register <IDispatcherProvider, DispatcherProvider>();
        CheapContainer.Register <ITextureDetailsProvider, TextureDetailsProvider>();

        var provider = CheapContainer.Resolve <ITextureDetailsProvider>();

        provider.Details = this.TextureDetails;

        CheapContainer.Register <IMediaManager, MediaManager>();
        CheapContainer.Register <IPeerManager, PeerManager>();
        CheapContainer.Register <IConversationManager, ConversationManager>();

        var conversationManager = CheapContainer.Resolve <IConversationManager>();

        conversationManager.IsInitiator = this.IsInitiator;

        // TODO: not really found a good way of abstracting this but I think it has to be called.
        // Does it need moving into the Media Manager and linking to the widths/heights in there?
        // I think I ramped it down to 856? 896? some such.
        WebRTC.SetPreferredVideoCaptureFormat(896, 504, 30);

        // TODO: This is here right now as it feels like a bunch of work gets marshalled
        // back (via Sync Context?) to this thread which then gums up the UI but we'd
        // like to understand better what that work is.
        Task.Run(
            async() =>
        {
            await conversationManager.InitialiseAsync(this.HostName, this.remotePeerName);

            if (await conversationManager.ConnectToSignallingAsync(this.ServerIP, this.PortNumber,
                                                                   "H264", 90000))
            {
                // We're good!
            }
        }
            );
    }
示例#4
0
        public async Task Initialize(CoreDispatcher coreDispatcher)
        {
            if (this.media != null || this.mediaStream != null)
            {
                throw new Exception("Media lock is alreay initialized.");
            }

            var allowed = await WebRTC.RequestAccessForMediaCapture();

            if (!allowed)
            {
                throw new Exception("Failed to access media for WebRtc...");
            }

            WebRTC.Initialize(coreDispatcher);

            this.media = Media.CreateMedia();

            var videoDevice  = this.media.GetVideoCaptureDevices().First();
            var capabilities = await videoDevice.GetVideoCaptureCapabilities();

            var selectedFormat = capabilities
                                 .OrderBy(cap => cap.Width * cap.Height * cap.FrameRate)
                                 .FirstOrDefault();

            if (selectedFormat != null)
            {
                WebRTC.SetPreferredVideoCaptureFormat(
                    (int)selectedFormat.Width,
                    (int)selectedFormat.Height,
                    (int)selectedFormat.FrameRate,
                    selectedFormat.MrcEnabled
                    );
            }

            this.mediaStream = await this.media.GetUserMedia(this.Constraints);
        }
        public override async void _play(JObject parameters)
        {
            Messenger.Broadcast(SympleLog.LogTrace, "symple:webrtc: _play");

            // if there is an active stream, play it now
            if (this.activeStream != null)
            {
                Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: active stream is not null, shuld play it now (TODO)");
                //this.video.src = URL.createObjectURL(this.activeStream);
                //this.video.play();
                this.setState("playing");
            }
            else
            {
                // otherwise, wait until ICE to complete before setting the playing state

                // if we are the ICE initiator, then attempt to open the local video device and send the SDP offer to the peer
                if (this.initiator)
                {
                    Messenger.Broadcast(SympleLog.LogInfo, "symple:webrtc: initiating");

                    var videoCaptureDevices = GetMedia().GetVideoCaptureDevices();

                    Messenger.Broadcast(SympleLog.LogInfo, "videoCaptureDevices:");
                    foreach (var dev in videoCaptureDevices)
                    {
                        Messenger.Broadcast(SympleLog.LogInfo, "id = " + dev.Id + ", name = " + dev.Name + ", location = " + dev.Location);
                        var capabilities = await dev.GetVideoCaptureCapabilities();

                        foreach (var capability in capabilities)
                        {
                            Messenger.Broadcast(SympleLog.LogDebug, "\t" + capability.FullDescription);
                        }
                    }

                    int requestedWebRtcCameraIndex = parameters["requestedWebRtcCameraIndex"].ToObject <int>();
                    int usedWebRtcCameraIndex      = requestedWebRtcCameraIndex;
                    if (requestedWebRtcCameraIndex >= videoCaptureDevices.Count)
                    {
                        Messenger.Broadcast(SympleLog.LogInfo, "NOTE: requested WebRTC camera index of " + requestedWebRtcCameraIndex + " is out of range of the number of available video capture devices (" + videoCaptureDevices.Count + "). Resetting to 0.");
                        usedWebRtcCameraIndex = 0;
                    }
                    Messenger.Broadcast(SympleLog.LogInfo, "Selecting WebRTC camera with index " + usedWebRtcCameraIndex);

                    var selectedVideoDevice = videoCaptureDevices[usedWebRtcCameraIndex];

                    Messenger.Broadcast(SympleLog.LogDebug, "getting videoCaptureCapabilities");
                    var videoCaptureCapabilities = await selectedVideoDevice.GetVideoCaptureCapabilities();

                    Messenger.Broadcast(SympleLog.LogDebug, "got videoCaptureCapabilities");

                    GetMedia().SelectVideoDevice(selectedVideoDevice);


                    int requestedVideoWidth;
                    if (parameters["requestedVideoWidth"] != null)
                    {
                        requestedVideoWidth = parameters["requestedVideoWidth"].ToObject <int>();
                    }
                    else
                    {
                        Messenger.Broadcast(SympleLog.LogDebug, "requestedVideoWidth set to default");
                        requestedVideoWidth = 640;
                    }

                    int requestedVideoHeight;
                    if (parameters["requestedVideoHeight"] != null)
                    {
                        requestedVideoHeight = parameters["requestedVideoHeight"].ToObject <int>();
                    }
                    else
                    {
                        Messenger.Broadcast(SympleLog.LogDebug, "requestedVideoHeight set to default");
                        requestedVideoHeight = 480;
                    }

                    int numRequestedPixels = requestedVideoWidth * requestedVideoHeight;

                    // We need to specify a preferred video capture format; it has to be one of the supported capabilities of the device.
                    // We will choose the capability that is as close as possible to the requested resolution while also having the highest frame rate for that resolution;
                    var chosenCapability = videoCaptureCapabilities[0];
                    foreach (var capability in videoCaptureCapabilities)
                    {
                        int numPixelsInThisCapability   = (int)(capability.Width * capability.Height);
                        int numPixelsInChosenCapability = (int)(chosenCapability.Width * chosenCapability.Height);

                        long thisPixelDeltaFromRequested   = Math.Abs(numPixelsInThisCapability - numRequestedPixels);
                        long chosenPixelDeltaFromRequested = Math.Abs(numPixelsInChosenCapability - numRequestedPixels);

                        if (thisPixelDeltaFromRequested < chosenPixelDeltaFromRequested)
                        {
                            chosenCapability = capability;
                        }
                        else if (thisPixelDeltaFromRequested == chosenPixelDeltaFromRequested && capability.FrameRate > chosenCapability.FrameRate)
                        {
                            chosenCapability = capability;
                        }
                    }

                    Messenger.Broadcast(SympleLog.LogDebug, "chosenCapability:");
                    Messenger.Broadcast(SympleLog.LogDebug, "\tWidth: " + (int)chosenCapability.Width);
                    Messenger.Broadcast(SympleLog.LogDebug, "\tHeight: " + (int)chosenCapability.Height);
                    Messenger.Broadcast(SympleLog.LogDebug, "\tFrameRate: " + (int)chosenCapability.FrameRate);
                    WebRTC.SetPreferredVideoCaptureFormat((int)chosenCapability.Width, (int)chosenCapability.Height, (int)chosenCapability.FrameRate, chosenCapability.MrcEnabled);

                    //Org.WebRtc.Media.SetDisplayOrientation(Windows.Graphics.Display.DisplayOrientations.None);

                    Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: before getUserMedia");
                    if (_localStream == null)
                    {
                        _localStream = await GetMedia().GetUserMedia(this.userMediaConstraints);
                    }
                    Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: after getUserMedia");

                    // play the local video stream and create the SDP offer
                    this.pc.AddStream(_localStream);

                    Messenger.Broadcast(SympleLog.LogDebug, "localStream: " + _localStream);
                    var videoTracks = _localStream.GetVideoTracks();
                    Messenger.Broadcast(SympleLog.LogDebug, "videoTracks in localStream: ");
                    foreach (var track in videoTracks)
                    {
                        Messenger.Broadcast(SympleLog.LogDebug, track.Id + ", enabled = " + track.Enabled + ", kind = " + track.Kind);
                    }
                    var audioTracks = _localStream.GetAudioTracks();
                    Messenger.Broadcast(SympleLog.LogDebug, "audioTracks in localStream: ");
                    foreach (var track in audioTracks)
                    {
                        Messenger.Broadcast(SympleLog.LogDebug, track.Id + ", enabled = " + track.Enabled + ", kind = " + track.Kind);
                    }

                    if (videoTracks.Count > 0)
                    {
                        //var source = GetMedia().CreateMediaSource(videoTracks[0], Symple.LocalMediaStreamId); // was valid for org.webrtc 1.54, not existing anymore
                        //var source = GetMedia().CreateMediaStreamSource(Symple.LocalMediaStreamId);
                        var source = GetMedia().CreateMediaStreamSource(videoTracks[0], "I420", Symple.LocalMediaStreamId);

                        Messenger.Broadcast(SympleLog.CreatedMediaSource, source);

                        if (this.pc != null)
                        {
                            RTCSessionDescription desc = await this.pc.CreateOffer();

                            Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: offer: " + desc);
                            this._onLocalSDP(desc);

                            // store the active local stream
                            this.activeStream = _localStream;
                        }
                        else
                        {
                            Messenger.Broadcast(SympleLog.LogError, "peer connection was destroyed while trying to creat offer");
                        }
                    }
                    else
                    {
                        Messenger.Broadcast(SympleLog.LogError, "ERROR: No video track found locally");
                    }
                }
            }
        }
示例#6
0
        public async void testStartVideoLocal()
        {
            Messenger.Broadcast(SympleLog.LogDebug, "basicTestVideo()");

            if (!webrtcInitialized)
            {
                WebRTC.Initialize(null);    // needed before calling any webrtc functions http://stackoverflow.com/questions/43331677/webrtc-for-uwp-new-rtcpeerconnection-doesnt-complete-execution
                webrtcInitialized = true;
            }

            Messenger.Broadcast(SympleLog.LogDebug, "creating media");

            if (_media == null)
            {
                _media = Media.CreateMedia();
                _media.OnMediaDevicesChanged += (MediaDeviceType mediaType) =>
                {
                    Messenger.Broadcast(SympleLog.LogDebug, "OnMediaDevicesChanged(), mediaType = " + mediaType);
                };
            }

            Messenger.Broadcast(SympleLog.LogDebug, "created media");

            var videoCaptureDevices = _media.GetVideoCaptureDevices();

            Messenger.Broadcast(SympleLog.LogDebug, "num videoCaptureDevices: " + videoCaptureDevices.Count);

            var videoDevice = videoCaptureDevices[0];

            Messenger.Broadcast(SympleLog.LogDebug, "getting videoCaptureCapabilities");
            var videoCaptureCapabilities = await videoDevice.GetVideoCaptureCapabilities();

            Messenger.Broadcast(SympleLog.LogDebug, "got videoCaptureCapabilities");

            var chosenCapability = videoCaptureCapabilities[0];

            Messenger.Broadcast(SympleLog.LogDebug, "chosenCapability:");
            Messenger.Broadcast(SympleLog.LogDebug, "\tWidth: " + (int)chosenCapability.Width);
            Messenger.Broadcast(SympleLog.LogDebug, "\tHeight: " + (int)chosenCapability.Height);
            Messenger.Broadcast(SympleLog.LogDebug, "\tFrameRate: " + (int)chosenCapability.FrameRate);
            WebRTC.SetPreferredVideoCaptureFormat((int)chosenCapability.Width, (int)chosenCapability.Height, (int)chosenCapability.FrameRate);

            Messenger.Broadcast(SympleLog.LogDebug, "getting usermedia");
            if (_localStream == null)
            {
                _localStream = await _media.GetUserMedia(new RTCMediaStreamConstraints { videoEnabled = true, audioEnabled = true });
            }

            Messenger.Broadcast(SympleLog.LogDebug, "got usermedia");

            Messenger.Broadcast(SympleLog.LogDebug, "localStream id: " + _localStream.Id);
            Messenger.Broadcast(SympleLog.LogDebug, "localStream Active?: " + _localStream.Active);

            var videoTracks = _localStream.GetVideoTracks();

            Messenger.Broadcast(SympleLog.LogDebug, "num videoTracks: " + videoTracks.Count);

            _selectedVideoTrack = videoTracks[0];

            Messenger.Broadcast(SympleLog.LogDebug, "selected video track id: " + _selectedVideoTrack.Id);
            Messenger.Broadcast(SympleLog.LogDebug, "selected video track suspended?: " + _selectedVideoTrack.Suspended);
            Messenger.Broadcast(SympleLog.LogDebug, "selected video track enabled?: " + _selectedVideoTrack.Enabled);

            var source = _media.CreateMediaSource(_selectedVideoTrack, Symple.LocalMediaStreamId);

            Messenger.Broadcast(SympleLog.LogDebug, "created mediasource");

            Messenger.Broadcast(SympleLog.CreatedMediaSource, source);
        }
        public override async void _play(JObject parameters)
        {
            Messenger.Broadcast(SympleLog.LogTrace, "symple:webrtc: _play");

            // if there is an active stream, play it now
            if (this.activeStream != null)
            {
                Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: active stream is not null, shuld play it now (TODO)");
                //this.video.src = URL.createObjectURL(this.activeStream);
                //this.video.play();
                this.setState("playing");
            }
            else
            {
                // otherwise, wait until ICE to complete before setting the playing state

                // if we are the ICE initiator, then attempt to open the local video device and send the SDP offer to the peer
                if (this.initiator)
                {
                    Messenger.Broadcast(SympleLog.LogInfo, "symple:webrtc: initiating");

                    var videoCaptureDevices = GetMedia().GetVideoCaptureDevices();

                    Messenger.Broadcast(SympleLog.LogInfo, "videoCaptureDevices:");
                    foreach (var dev in videoCaptureDevices)
                    {
                        Messenger.Broadcast(SympleLog.LogInfo, "id = " + dev.Id + ", name = " + dev.Name + ", location = " + dev.Location);
                        var capabilities = await dev.GetVideoCaptureCapabilities();

                        foreach (var capability in capabilities)
                        {
                            Messenger.Broadcast(SympleLog.LogInfo, "\t" + capability.FullDescription);
                        }
                    }

                    var videoDevice = videoCaptureDevices[0];

                    Messenger.Broadcast(SympleLog.LogDebug, "getting videoCaptureCapabilities");
                    var videoCaptureCapabilities = await videoDevice.GetVideoCaptureCapabilities();

                    Messenger.Broadcast(SympleLog.LogDebug, "got videoCaptureCapabilities");

                    GetMedia().SelectVideoDevice(videoCaptureDevices[0]);


                    // We need to specify a preferred video capture format; it has to be one of the supported capabilities of the device.
                    // We will choose the capability that has the lowest resolution and the highest frame rate for that resolution.
                    var chosenCapability = videoCaptureCapabilities[0];
                    foreach (var capability in videoCaptureCapabilities)
                    {
                        if (capability.Width == 640 && capability.Height == 480)
                        {
                            // we'd prefer to just do 640x480 if possible
                            chosenCapability = capability;
                            break;
                        }

                        if ((capability.Width < chosenCapability.Width && capability.Height < chosenCapability.Height) ||
                            (capability.Width == chosenCapability.Width && capability.Height == chosenCapability.Height && capability.FrameRate > chosenCapability.FrameRate))
                        {
                            chosenCapability = capability;
                        }
                    }

                    Messenger.Broadcast(SympleLog.LogDebug, "chosenCapability:");
                    Messenger.Broadcast(SympleLog.LogDebug, "\tWidth: " + (int)chosenCapability.Width);
                    Messenger.Broadcast(SympleLog.LogDebug, "\tHeight: " + (int)chosenCapability.Height);
                    Messenger.Broadcast(SympleLog.LogDebug, "\tFrameRate: " + (int)chosenCapability.FrameRate);
                    WebRTC.SetPreferredVideoCaptureFormat((int)chosenCapability.Width, (int)chosenCapability.Height, (int)chosenCapability.FrameRate);

                    //WebRTC.SetPreferredVideoCaptureFormat(640, 480, 30);

                    //Org.WebRtc.Media.SetDisplayOrientation(Windows.Graphics.Display.DisplayOrientations.None);

                    Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: before getUserMedia");
                    if (_localStream == null)
                    {
                        _localStream = await GetMedia().GetUserMedia(new RTCMediaStreamConstraints {
                            videoEnabled = true, audioEnabled = true
                        });
                    }
                    Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: after getUserMedia");

                    // play the local video stream and create the SDP offer
                    this.pc.AddStream(_localStream);

                    Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: should play the local stream and create the SDP offer (TODO)");

                    Messenger.Broadcast(SympleLog.LogInfo, "localStream: " + _localStream);
                    var videoTracks = _localStream.GetVideoTracks();
                    Messenger.Broadcast(SympleLog.LogInfo, "videoTracks in localStream: ");
                    foreach (var track in videoTracks)
                    {
                        Messenger.Broadcast(SympleLog.LogInfo, track.Id + ", enabled = " + track.Enabled + ", kind = " + track.Kind + ", suspended = " + track.Suspended);
                    }
                    var audioTracks = _localStream.GetAudioTracks();
                    Messenger.Broadcast(SympleLog.LogInfo, "audioTracks in localStream: ");
                    foreach (var track in audioTracks)
                    {
                        Messenger.Broadcast(SympleLog.LogInfo, track.Id + ", enabled = " + track.Enabled + ", kind = " + track.Kind);
                    }

                    if (videoTracks.Count > 0)
                    {
                        var source = GetMedia().CreateMediaSource(videoTracks[0], Symple.LocalMediaStreamId);

                        Messenger.Broadcast(SympleLog.CreatedMediaSource, source);

                        if (this.pc != null)
                        {
                            RTCSessionDescription desc = await this.pc.CreateOffer();

                            Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: offer: " + desc);
                            this._onLocalSDP(desc);

                            // store the active local stream
                            this.activeStream = _localStream;
                        }
                        else
                        {
                            Messenger.Broadcast(SympleLog.LogError, "peer connection was destroyed while trying to creat offer");
                        }
                    }
                    else
                    {
                        Messenger.Broadcast(SympleLog.LogError, "ERROR: No video track found locally");
                    }
                }
            }
        }
示例#8
0
        public async Task ConfigureRtcAsync()
        {
            var settings      = ApplicationData.Current.LocalSettings;
            var videoDeviceId = string.Empty;

            if (settings.Values.ContainsKey(MediaSettingsIds.VideoDeviceSettings))
            {
                videoDeviceId = (string)settings.Values[MediaSettingsIds.VideoDeviceSettings];
            }
            var videoDevices        = Media.GetVideoCaptureDevices();
            var selectedVideoDevice = videoDevices.FirstOrDefault(d => d.Id.Equals(videoDeviceId));

            selectedVideoDevice = selectedVideoDevice ?? videoDevices.FirstOrDefault();
            if (selectedVideoDevice != null)
            {
                Media.SelectVideoDevice(selectedVideoDevice);
            }

            if (settings.Values.ContainsKey(MediaSettingsIds.AudioDeviceSettings))
            {
                var audioDeviceId       = (string)settings.Values[MediaSettingsIds.AudioDeviceSettings];
                var audioDevices        = Media.GetAudioCaptureDevices();
                var selectedAudioDevice = audioDevices.FirstOrDefault(d => d.Id.Equals(audioDeviceId));
                if (selectedAudioDevice == null)
                {
                    settings.Values.Remove(MediaSettingsIds.AudioDeviceSettings);
                }
                Media.SelectAudioCaptureDevice(selectedAudioDevice);
            }
            else
            {
                Media.SelectAudioCaptureDevice(null);
            }

            if (settings.Values.ContainsKey(MediaSettingsIds.AudioPlayoutDeviceSettings))
            {
                var audioPlayoutDeviceId       = (string)settings.Values[MediaSettingsIds.AudioPlayoutDeviceSettings];
                var audioPlayoutDevices        = Media.GetAudioPlayoutDevices();
                var selectedAudioPlayoutDevice =
                    audioPlayoutDevices.FirstOrDefault(d => d.Id.Equals(audioPlayoutDeviceId));
                if (selectedAudioPlayoutDevice == null)
                {
                    settings.Values.Remove(MediaSettingsIds.AudioPlayoutDeviceSettings);
                }
                Media.SelectAudioPlayoutDevice(selectedAudioPlayoutDevice);
            }
            else
            {
                Media.SelectAudioPlayoutDevice(null);
            }

            var videoCodecId = int.MinValue;

            if (settings.Values.ContainsKey(MediaSettingsIds.VideoCodecSettings))
            {
                videoCodecId = (int)settings.Values[MediaSettingsIds.VideoCodecSettings];
            }
            var videoCodecs        = WebRTC.GetVideoCodecs();
            var selectedVideoCodec = videoCodecs.FirstOrDefault(c => c.Id.Equals(videoCodecId));
            await
            Hub.Instance.MediaSettingsChannel.SetVideoCodecAsync(
                (selectedVideoCodec ?? videoCodecs.FirstOrDefault()).ToDto());

            var audioCodecId = int.MinValue;

            if (settings.Values.ContainsKey(MediaSettingsIds.AudioCodecSettings))
            {
                audioCodecId = (int)settings.Values[MediaSettingsIds.AudioCodecSettings];
            }
            var audioCodecs        = WebRTC.GetAudioCodecs();
            var selectedAudioCodec = audioCodecs.FirstOrDefault(c => c.Id.Equals(audioCodecId));
            await
            Hub.Instance.MediaSettingsChannel.SetAudioCodecAsync(
                (selectedAudioCodec ?? audioCodecs.FirstOrDefault()).ToDto());

            if (settings.Values.ContainsKey(MediaSettingsIds.PreferredVideoCaptureWidth) &&
                settings.Values.ContainsKey(MediaSettingsIds.PreferredVideoCaptureHeight) &&
                settings.Values.ContainsKey(MediaSettingsIds.PreferredVideoCaptureFrameRate))
            {
                WebRTC.SetPreferredVideoCaptureFormat(
                    (int)settings.Values[MediaSettingsIds.PreferredVideoCaptureWidth],
                    (int)settings.Values[MediaSettingsIds.PreferredVideoCaptureHeight],
                    (int)settings.Values[MediaSettingsIds.PreferredVideoCaptureFrameRate]);
            }
        }
        /// <summary>
        /// On Win10 in a background task, WebRTC initialization has to be done
        /// when we have access to the resources.  That's inside an active
        /// voip call.
        /// This function must be called after VoipCoordinator.StartVoipTask()
        /// </summary>
        /// <returns></returns>
        public void InitializeRTC()
        {
            if (Media == null)
            {
                WebRTC.Initialize(_dispatcher);
                Media = WebRTCMedia.CreateMedia();
                WebRTCMedia.SetDisplayOrientation(_displayOrientation);

                // Uncomment the following line to enable WebRTC logging.
                // Logs are:
                //  - Saved to local storage. Log folder location can be obtained using WebRTC.LogFolder()
                //  - Sent over network if client is connected to TCP port 47003
                //WebRTC.EnableLogging(LogLevel.LOGLVL_INFO);
            }

            if (DisplayOrientations.None != _displayOrientation)
            {
                WebRTCMedia.SetDisplayOrientation(_displayOrientation);
            }

            string videoDeviceId = string.Empty;

            if (_localSettings.Values.ContainsKey(MediaSettingsIds.VideoDeviceSettings))
            {
                videoDeviceId = (string)_localSettings.Values[MediaSettingsIds.VideoDeviceSettings];
            }
            var videoDevices        = Media.GetVideoCaptureDevices();
            var selectedVideoDevice = videoDevices.FirstOrDefault(d => d.Id.Equals(videoDeviceId));

            selectedVideoDevice = selectedVideoDevice ?? videoDevices.FirstOrDefault();
            if (selectedVideoDevice != null)
            {
                Media.SelectVideoDevice(selectedVideoDevice);
            }

            string audioDeviceId = string.Empty;

            if (_localSettings.Values.ContainsKey(MediaSettingsIds.AudioDeviceSettings))
            {
                audioDeviceId = (string)_localSettings.Values[MediaSettingsIds.AudioDeviceSettings];
            }
            var audioDevices        = Media.GetAudioCaptureDevices();
            var selectedAudioDevice = audioDevices.FirstOrDefault(d => d.Id.Equals(audioDeviceId));

            selectedAudioDevice = selectedAudioDevice ?? audioDevices.FirstOrDefault();
            if (selectedAudioDevice != null)
            {
                Media.SelectAudioDevice(selectedAudioDevice);
            }

            string audioPlayoutDeviceId = string.Empty;

            if (_localSettings.Values.ContainsKey(MediaSettingsIds.AudioPlayoutDeviceSettings))
            {
                audioPlayoutDeviceId = (string)_localSettings.Values[MediaSettingsIds.AudioPlayoutDeviceSettings];
            }
            var audioPlayoutDevices        = Media.GetAudioPlayoutDevices();
            var selectedAudioPlayoutDevice = audioPlayoutDevices.FirstOrDefault(d => d.Id.Equals(audioPlayoutDeviceId));

            selectedAudioPlayoutDevice = selectedAudioPlayoutDevice ?? audioPlayoutDevices.FirstOrDefault();
            if (selectedAudioPlayoutDevice != null)
            {
                Media.SelectAudioPlayoutDevice(selectedAudioPlayoutDevice);
            }

            int videoCodecId = int.MinValue;

            if (_localSettings.Values.ContainsKey(MediaSettingsIds.VideoCodecSettings))
            {
                videoCodecId = (int)_localSettings.Values[MediaSettingsIds.VideoCodecSettings];
            }
            var videoCodecs        = WebRTC.GetVideoCodecs();
            var selectedVideoCodec = videoCodecs.FirstOrDefault(c => c.Id.Equals(videoCodecId));

            SetVideoCodec(DtoExtensions.ToDto(selectedVideoCodec ?? videoCodecs.FirstOrDefault()));

            int audioCodecId = int.MinValue;

            if (_localSettings.Values.ContainsKey(MediaSettingsIds.AudioCodecSettings))
            {
                audioCodecId = (int)_localSettings.Values[MediaSettingsIds.AudioCodecSettings];
            }
            var audioCodecs        = WebRTC.GetAudioCodecs();
            var selectedAudioCodec = audioCodecs.FirstOrDefault(c => c.Id.Equals(audioCodecId));

            SetAudioCodec(DtoExtensions.ToDto(selectedAudioCodec ?? audioCodecs.FirstOrDefault()));

            if (_localSettings.Values.ContainsKey(MediaSettingsIds.PreferredVideoCaptureWidth) &&
                _localSettings.Values.ContainsKey(MediaSettingsIds.PreferredVideoCaptureHeight) &&
                _localSettings.Values.ContainsKey(MediaSettingsIds.PreferredVideoCaptureFrameRate))
            {
                WebRTC.SetPreferredVideoCaptureFormat((int)_localSettings.Values[MediaSettingsIds.PreferredVideoCaptureWidth],
                                                      (int)_localSettings.Values[MediaSettingsIds.PreferredVideoCaptureHeight],
                                                      (int)_localSettings.Values[MediaSettingsIds.PreferredVideoCaptureFrameRate]);
            }

            ResolutionHelper.ResolutionChanged += (id, width, height) =>
            {
                if (id == LocalMediaStreamId)
                {
                    LocalVideoRenderer.ResolutionChanged(width, height);
                }
                else if (id == PeerMediaStreamId)
                {
                    RemoteVideoRenderer.ResolutionChanged(width, height);
                }
            };

            FrameCounterHelper.FramesPerSecondChanged += (id, frameRate) =>
            {
                if (id == LocalMediaStreamId)
                {
                    LocalVideo_FrameRateUpdate(frameRate);
                }
                else if (id == PeerMediaStreamId)
                {
                    RemoteVideo_FrameRateUpdate(frameRate);
                }
            };
        }
        /// <summary>
        /// The initializer for MainViewModel.
        /// </summary>
        /// <param name="uiDispatcher">The UI dispatcher.</param>
        public void Initialize(CoreDispatcher uiDispatcher)
        {
            WebRTC.Initialize(uiDispatcher);

            // For the HoloLens
            if (_isHoloLens)
            {
                WebRTC.SetPreferredVideoCaptureFormat(896, 504, 30);
            }

            // Pick the codec
            var videoCodecs = WebRTC.GetVideoCodecs();

            foreach (var codec in videoCodecs)
            {
                if (codec.Name == "H264")
                {
                    Conductor.Instance.VideoCodec = codec;
                    break;
                }
            }

            // Pick the bitrate
            Conductor.Instance.VideoBitrate = 512;

            var settings = ApplicationData.Current.LocalSettings;

            // A Peer is connected to the server event handler
            Conductor.Instance.Signaller.OnPeerConnected += (peerId, peerName) =>
            {
                RunOnUiThread(() =>
                {
                    if (Peers == null)
                    {
                        Peers = new ObservableCollection <Peer>();
                        Conductor.Instance.Peers = Peers;
                    }
                    Peers.Add(new Peer {
                        Id = peerId, Name = peerName
                    });
                });
            };

            // A Peer is disconnected from the server event handler
            Conductor.Instance.Signaller.OnPeerDisconnected += peerId =>
            {
                RunOnUiThread(() =>
                {
                    var peerToRemove = Peers?.FirstOrDefault(p => p.Id == peerId);
                    if (peerToRemove != null)
                    {
                        Peers.Remove(peerToRemove);
                    }
                });
            };

            // The user is Signed in to the server event handler
            Conductor.Instance.Signaller.OnSignedIn += () =>
            {
                RunOnUiThread(() =>
                {
                    IsConnected         = true;
                    IsMicrophoneEnabled = true;
                    IsCameraEnabled     = true;
                    IsConnecting        = false;
                });
            };

            // Failed to connect to the server event handler
            Conductor.Instance.Signaller.OnServerConnectionFailure += () =>
            {
                RunOnUiThread(async() =>
                {
                    IsConnecting            = false;
                    MessageDialog msgDialog = new MessageDialog("Failed to connect to server!");
                    await msgDialog.ShowAsync();
                });
            };

            // The current user is disconnected from the server event handler
            Conductor.Instance.Signaller.OnDisconnected += () =>
            {
                RunOnUiThread(() =>
                {
                    IsConnected         = false;
                    IsMicrophoneEnabled = false;
                    IsCameraEnabled     = false;
                    IsDisconnecting     = false;
                    Peers?.Clear();
                });
            };

            LoadSettings();

            Connect();


            // Event handlers for managing the media streams

            Conductor.Instance.OnAddRemoteStream    += Conductor_OnAddRemoteStream;
            Conductor.Instance.OnRemoveRemoteStream += Conductor_OnRemoveRemoteStream;
            Conductor.Instance.OnAddLocalStream     += Conductor_OnAddLocalStream;

            /**
             * // Connected to a peer event handler
             * Conductor.Instance.OnPeerConnectionCreated += () =>
             * {
             *  RunOnUiThread(() =>
             *  {
             *      IsReadyToConnect = false;
             *      IsConnectedToPeer = true;
             *      if (SettingsButtonChecked)
             *      {
             *          // close settings screen if open
             *          SettingsButtonChecked = false;
             *          ScrollBarVisibilityType = ScrollBarVisibility.Disabled;
             *      }
             *      IsReadyToDisconnect = false;
             *      if (SettingsButtonChecked)
             *      {
             *          // close settings screen if open
             *          SettingsButtonChecked = false;
             *          ScrollBarVisibilityType = ScrollBarVisibility.Disabled;
             *      }
             *
             *      // Make sure the screen is always active while on call
             *      if (!_keepOnScreenRequested)
             *      {
             *          _keepScreenOnRequest.RequestActive();
             *          _keepOnScreenRequested = true;
             *      }
             *
             *      UpdateScrollBarVisibilityTypeHelper();
             *  });
             * };
             *
             * // Connection between the current user and a peer is closed event handler
             * Conductor.Instance.OnPeerConnectionClosed += () =>
             * {
             *  RunOnUiThread(() =>
             *  {
             *      IsConnectedToPeer = false;
             *      Conductor.Instance.Media.RemoveVideoTrackMediaElementPair(_peerVideoTrack);
             *      //PeerVideo.Source = null;
             *
             *      Conductor.Instance.Media.RemoveVideoTrackMediaElementPair(_selfVideoTrack);
             *      //SelfVideo.Stop();
             *      //SelfVideo.ClearValue(MediaElement.SourceProperty);
             *      //SelfVideo.Source = null;
             *
             *      _peerVideoTrack = null;
             *      _selfVideoTrack = null;
             *      GC.Collect(); // Ensure all references are truly dropped.
             *      IsMicrophoneEnabled = true;
             *      IsCameraEnabled = true;
             *      SelfVideoFps = PeerVideoFps = "";
             *
             *      // Make sure to allow the screen to be locked after the call
             *      if (_keepOnScreenRequested)
             *      {
             *          _keepScreenOnRequest.RequestRelease();
             *          _keepOnScreenRequested = false;
             *      }
             *      UpdateScrollBarVisibilityTypeHelper();
             *  });
             * };
             *
             * // Ready to connect to the server event handler
             * Conductor.Instance.OnReadyToConnect += () => { RunOnUiThread(() => { IsReadyToConnect = true; }); };
             *
             * // Initialize the Ice servers list
             * IceServers = new ObservableCollection<IceServer>();
             * NewIceServer = new IceServer();
             *
             * // Prepare to list supported audio codecs
             * AudioCodecs = new ObservableCollection<CodecInfo>();
             * var audioCodecList = WebRTC.GetAudioCodecs();
             *
             * // These are features added to existing codecs, they can't decode/encode real audio data so ignore them
             * string[] incompatibleAudioCodecs = new string[] { "CN32000", "CN16000", "CN8000", "red8000", "telephone-event8000" };
             *
             * // Prepare to list supported video codecs
             * VideoCodecs = new ObservableCollection<CodecInfo>();
             *
             * // Order the video codecs so that the stable VP8 is in front.
             * var videoCodecList = WebRTC.GetVideoCodecs().OrderBy(codec =>
             * {
             *  switch (codec.Name)
             *  {
             *      case "VP8": return 1;
             *      case "VP9": return 2;
             *      case "H264": return 3;
             *      default: return 99;
             *  }
             * });
             *
             * // Load the supported audio/video information into the Settings controls
             * RunOnUiThread(() =>
             * {
             *  foreach (var audioCodec in audioCodecList)
             *  {
             *      if (!incompatibleAudioCodecs.Contains(audioCodec.Name + audioCodec.ClockRate))
             *      {
             *          AudioCodecs.Add(audioCodec);
             *      }
             *  }
             *
             *  if (AudioCodecs.Count > 0)
             *  {
             *      if (settings.Values["SelectedAudioCodecId"] != null)
             *      {
             *
             *          int id = Convert.ToInt32(settings.Values["SelectedAudioCodecId"]);
             *
             *          foreach (var audioCodec in AudioCodecs)
             *          {
             *
             *              int audioCodecId = audioCodec.Id;
             *              if (audioCodecId == id)
             *              {
             *                  SelectedAudioCodec = audioCodec;
             *                  break;
             *              }
             *          }
             *      }
             *      if (SelectedAudioCodec == null)
             *      {
             *          SelectedAudioCodec = AudioCodecs.First();
             *      }
             *  }
             *
             *  foreach (var videoCodec in videoCodecList)
             *  {
             *      VideoCodecs.Add(videoCodec);
             *  }
             *
             *  if (VideoCodecs.Count > 0)
             *  {
             *      if (settings.Values["SelectedVideoCodecId"] != null)
             *      {
             *
             *          int id = Convert.ToInt32(settings.Values["SelectedVideoCodecId"]);
             *          foreach (var videoCodec in VideoCodecs)
             *          {
             *              int videoCodecId = videoCodec.Id;
             *              if (videoCodecId == id)
             *              {
             *                  SelectedVideoCodec = videoCodec;
             *                  break;
             *              }
             *          }
             *      }
             *      if (SelectedVideoCodec == null)
             *      {
             *          SelectedVideoCodec = VideoCodecs.First();
             *      }
             *  }
             * });
             * LoadSettings();
             * RunOnUiThread(() =>
             * {
             *  OnInitialized?.Invoke();
             * });*/
        }