Example #1
0
    private DelegateOnMessage onDataChannelMessage;//データチャンネル受信時のコールバック


    private void Awake()
    {
        WebRTC.Initialize();
        //メッセージ受信時の処理
        onDataChannelMessage = new DelegateOnMessage(bytes => {
            recieveText.text = System.Text.Encoding.UTF8.GetString(bytes);
            if (!_connectRTC)
            {
                SendMsg_data("Connected");
            }
            _connectRTC = true;
        });
        _matchingNCMB = GetComponent <MatchingNCMB>();
    }
 private void Awake()
 {
     WebRTC.Initialize(WebRTCSettings.EncoderType, WebRTCSettings.LimitTextureSize);
     bandwidthSelector.options = bandwidthOptions
                                 .Select(pair => new Dropdown.OptionData {
         text = pair.Key
     })
                                 .ToList();
     bandwidthSelector.onValueChanged.AddListener(ChangeBandwitdh);
     callButton.onClick.AddListener(Call);
     hangUpButton.onClick.AddListener(HangUp);
     copyClipboard.onClick.AddListener(CopyClipboard);
     receiveStream = new MediaStream();
 }
Example #3
0
        public static bool CheckVideoSendRecvCodecSupport(EncoderType encoderType)
        {
            WebRTC.Initialize(encoderType);
            var capabilitiesSenderCodec = RTCRtpSender.GetCapabilities(TrackKind.Video)
                                          .codecs
                                          .Select(x => x.mimeType)
                                          .Except(excludeCodecMimeType);
            var capabilitiesReceiverCodec = RTCRtpReceiver.GetCapabilities(TrackKind.Video)
                                            .codecs
                                            .Select(x => x.mimeType)
                                            .Except(excludeCodecMimeType);
            var isSupported = capabilitiesSenderCodec.Any(x => capabilitiesReceiverCodec.Contains(x));

            WebRTC.Dispose();
            return(isSupported);
        }
Example #4
0
 private void Awake()
 {
     WebRTC.Initialize(WebRTCSettings.EncoderType, WebRTCSettings.LimitTextureSize);
     callButton.onClick.AddListener(() =>
     {
         callButton.interactable   = false;
         hangupButton.interactable = true;
         StartCoroutine(Call());
     });
     hangupButton.onClick.AddListener(() =>
     {
         callButton.interactable   = true;
         hangupButton.interactable = false;
         Dispose();
     });
 }
Example #5
0
 private void Awake()
 {
     WebRTC.Initialize();
     callButton.onClick.AddListener(() =>
     {
         callButton.interactable   = false;
         hangupButton.interactable = true;
         StartCoroutine(Call());
     });
     hangupButton.onClick.AddListener(() =>
     {
         callButton.interactable   = true;
         hangupButton.interactable = false;
         Dispose();
     });
 }
Example #6
0
    private void InitializeWebRTC()
    {
        WebRTC.Initialize(EncoderType.Software);
        var configuration = GetSelectedSdpSemantics();

        pc = new RTCPeerConnection(ref configuration);
        pc.OnIceCandidate        = OnIceCandidate;
        pc.OnIceConnectionChange = OnIceConnectionChange;

        videoStreamTrack = cam.CaptureStreamTrack(width, height, 1000000);
        pc.AddTrack(videoStreamTrack);

        if (!videoUpdateStarted)
        {
            StartCoroutine(WebRTC.Update());
            videoUpdateStarted = true;
        }
    }
        public SymplePlayerEngineWebRTC(SymplePlayer player) : base(player)
        {
            Messenger.Broadcast(SympleLog.LogInfo, "symple:webrtc: init");

#if NETFX_CORE
            if (!webrtcInitialized)
            {
                WebRTC.Initialize(null);    // needed before calling any webrtc functions http://stackoverflow.com/questions/43331677/webrtc-for-uwp-new-rtcpeerconnection-doesnt-complete-execution
                webrtcInitialized = true;
            }


            if (player.options.rtcConfig != null)
            {
                this.rtcConfig = player.options.rtcConfig;
            }
            else
            {
                this.rtcConfig = new RTCConfiguration();
                this.rtcConfig.IceServers.Add(new RTCIceServer()
                {
                    Url = "stun:stun.l.google.com:19302", Username = string.Empty, Credential = string.Empty
                });
            }
#endif

            /*
             * this.rtcOptions = player.options.rtcOptions || {
             *  optional: [
             *      { DtlsSrtpKeyAgreement: true } // required for FF <=> Chrome interop
             *  ]
             * };
             */

            // Specifies that this client will be the ICE initiator,
            // and will be sending the initial SDP Offer.
            this.initiator = player.options.initiator;
            Messenger.Broadcast(SympleLog.LogInfo, "symple:webrtc: constructor, set this.initiator to " + this.initiator);

#if NETFX_CORE
            // Reference to the active local or remote media stream
            this.activeStream = null;
#endif
        }
 void Start()
 {
     webRTC.Initialize();
     if (PhotonNetwork.LocalPlayer.NickName == null || PhotonNetwork.LocalPlayer.NickName == "")
     {
         // clickToPlayPage.SetActive(true);
         clickToPlayPage.Open();
         // nameInputPage.Open();
     }
     else if (PhotonNetwork.CurrentRoom != null)
     {
         themeAudio.Play();
         lobbyPage.Open();
     }
     else
     {
         themeAudio.Play();
         joinRoomPage.Open();
     }
 }
        public async Task InitialiseAsync(string localHostName, string remotePeerName)
        {
            if (!this.initialised)
            {
                this.initialised = true;

                this.hostName       = localHostName;
                this.remotePeerName = remotePeerName;

                // I find that if I don't do this before Initialize() then I crash.
                await WebRTC.RequestAccessForMediaCapture();

                // TODO: we need a dispatcher here.
                WebRTC.Initialize(dispatcherProvider.Dispatcher);

                await this.mediaManager.CreateAsync();

                await this.mediaManager.AddLocalStreamAsync(this.mediaManager.UserMedia);
            }
        }
 private void Awake()
 {
     WebRTC.Initialize(WebRTCSettings.EncoderType, WebRTCSettings.LimitTextureSize);
     callButton.onClick.AddListener(Call);
     hangUpButton.onClick.AddListener(HangUp);
     addVideoObjectButton.onClick.AddListener(AddVideoObject);
     addTracksButton.onClick.AddListener(AddTracks);
     widthInput.onValueChanged.AddListener(w =>
     {
         if (!int.TryParse(w, out width))
         {
             width = DefaultWidth;
         }
     });
     heightInput.onValueChanged.AddListener(h =>
     {
         if (!int.TryParse(h, out height))
         {
             height = DefaultHeight;
         }
     });
 }
Example #11
0
        public async Task Initialize(ConductorConfig config)
        {
            if (config == null)
            {
                throw new ArgumentException();
            }

            this.coreDispatcher = config.CoreDispatcher ?? throw new ArgumentException();

            if (config.Signaller != null)
            {
                this.signaller = new WebRtcSignaller(
                    config.Signaller ?? throw new ArgumentException());

                this.signaller.ReceivedIceCandidate += this.signaller_ReceivedIceCandidate;
                this.signaller.ReceivedAnswer       += this.signaller_ReceivedAnswer;
                this.signaller.ReceivedOffer        += this.signaller_ReceivedOffer;
            }

            this.localVideo  = config.LocalVideo;
            this.remoteVideo = config.RemoteVideo;

            var allowed = await WebRTC.RequestAccessForMediaCapture();

            if (!allowed)
            {
                throw new Exception("Failed to access media for WebRtc...");
            }
            WebRTC.Initialize(this.coreDispatcher);


            this.MediaDevices   = this.getMediaDevices().ToList();
            this.selectedDevice = this.MediaDevices.First();

            this.CaptureProfiles = await this.getCaptureProfiles(this.selectedDevice);

            this.selectedProfile = this.CaptureProfiles.First();
        }
Example #12
0
        public void EnsureRtcIsInitialized()
        {
            lock (_lock)
            {
                if (!_rtcIsInitialized)
                {
                    // On Windows 10, we don't need to use the CoreDispatcher.
                    // Pass null to initialize.
                    WebRTC.Initialize(null);
                    // Cache the media object for later use.
                    Media                        = WebRTCMedia.CreateMedia();
                    _rtcIsInitialized            = true;
                    Media.OnMediaDevicesChanged += OnMediaDevicesChanged;
                    WebRTCMedia.SetDisplayOrientation(_displayOrientation);

                    // Uncomment the following line to enable WebRTC logging.
                    // Logs are:
                    //  - Saved to local storage. Log folder location can be obtained using WebRTC.LogFolder()
                    //  - Sent over network if client is connected to TCP port 47003
                    //WebRTC.EnableLogging(LogLevel.LOGLVL_INFO);
                }
            }
        }
Example #13
0
        public static bool CheckVideoSendRecvCodecSupport(EncoderType encoderType)
        {
            // hardware encoder is not supported
            if (encoderType == EncoderType.Hardware &&
                !NativeMethods.GetHardwareEncoderSupport())
            {
                return(false);
            }

            WebRTC.Initialize(encoderType);
            var capabilitiesSenderCodec = RTCRtpSender.GetCapabilities(TrackKind.Video)
                                          .codecs
                                          .Select(x => x.mimeType)
                                          .Except(excludeCodecMimeType);
            var capabilitiesReceiverCodec = RTCRtpReceiver.GetCapabilities(TrackKind.Video)
                                            .codecs
                                            .Select(x => x.mimeType)
                                            .Except(excludeCodecMimeType);
            var isSupported = capabilitiesSenderCodec.Any(x => capabilitiesReceiverCodec.Contains(x));

            WebRTC.Dispose();
            return(isSupported);
        }
Example #14
0
        public MainPage()
        {
            this.InitializeComponent();
            WebRTC.Initialize(this.Dispatcher);

            Random R = new Random();

            Uid         = R.Next(1000, 10000);
            uidTbk.Text = "Uid: " + Uid.ToString();
            CurrentRoom = new Room
            {
                Id    = 10001,
                Uid   = Uid,
                Recvs = new Dictionary <long, RTCPeerConnection>()
            };

            Candidates = new List <SendCadidatModel>();
            var iceServers = new List <RTCIceServer>()
            {
                new RTCIceServer {
                    Url = "stun:stun.ideasip.com"
                },
                new RTCIceServer {
                    Url = "stun:stun.voipgate.com:3478"
                }
            };

            RtcConfig = new RTCConfiguration()
            {
                BundlePolicy       = RTCBundlePolicy.Balanced,
                IceServers         = iceServers,
                IceTransportPolicy = RTCIceTransportPolicy.All,
            };

            //var test = Http.GetAsync("Test", "").Result;
            //Debug.WriteLine(test);
        }
Example #15
0
        public async Task Initialize(CoreDispatcher coreDispatcher)
        {
            if (this.media != null || this.mediaStream != null)
            {
                throw new Exception("Media lock is alreay initialized.");
            }

            var allowed = await WebRTC.RequestAccessForMediaCapture();

            if (!allowed)
            {
                throw new Exception("Failed to access media for WebRtc...");
            }

            WebRTC.Initialize(coreDispatcher);

            this.media = Media.CreateMedia();

            var videoDevice  = this.media.GetVideoCaptureDevices().First();
            var capabilities = await videoDevice.GetVideoCaptureCapabilities();

            var selectedFormat = capabilities
                                 .OrderBy(cap => cap.Width * cap.Height * cap.FrameRate)
                                 .FirstOrDefault();

            if (selectedFormat != null)
            {
                WebRTC.SetPreferredVideoCaptureFormat(
                    (int)selectedFormat.Width,
                    (int)selectedFormat.Height,
                    (int)selectedFormat.FrameRate,
                    selectedFormat.MrcEnabled
                    );
            }

            this.mediaStream = await this.media.GetUserMedia(this.Constraints);
        }
Example #16
0
 private void Awake()
 {
     WebRTC.Initialize(WebRTCSettings.EncoderType, WebRTCSettings.LimitTextureSize);
 }
 public void SetUp()
 {
     WebRTC.Initialize(encoderType);
 }
Example #18
0
 public void SetUp()
 {
     WebRTC.Initialize(type: encoderType, limitTextureSize: true, forTest: true);
 }
Example #19
0
        public void SetUp()
        {
            var value = TestHelper.HardwareCodecSupport();

            WebRTC.Initialize(value ? EncoderType.Hardware : EncoderType.Software);
        }
Example #20
0
        public void Initialize()
        {
            WebRTC.Initialize(_uiDispatcher);
            Conductor.Instance.ETWStatsEnabled = false;

            Cameras             = new ObservableCollection <MediaDevice>();
            Microphones         = new ObservableCollection <MediaDevice>();
            AudioPlayoutDevices = new ObservableCollection <MediaDevice>();

            // WebRTCUWP M58 library does not support audio capture/playout devices
            //foreach (MediaDevice audioCaptureDevice in Conductor.Instance.Media.GetAudioCaptureDevices())
            //{
            //    Microphones.Add(audioCaptureDevice);
            //}

            //foreach (MediaDevice audioPlayoutDevice in Conductor.Instance.Media.GetAudioPlayoutDevices())
            //{
            //    AudioPlayoutDevices.Add(audioPlayoutDevice);
            //}

            // HACK Remove Automatic Device Assignment
            if (SelectedCamera == null && Cameras.Count > 0)
            {
                SelectedCamera = Cameras.First();
            }

            if (SelectedMicrophone == null && Microphones.Count > 0)
            {
                SelectedMicrophone = Microphones.First();
            }

            Debug.WriteLine("Device Status: SelectedCamera: {0} - SelectedMic: {1}", SelectedCamera == null ? "NULL" : "OK", SelectedMicrophone == null ? "NULL" : "OK");
            if (SelectedAudioPlayoutDevice == null && AudioPlayoutDevices.Count > 0)
            {
                SelectedAudioPlayoutDevice = AudioPlayoutDevices.First();
            }

            Conductor.Instance.Media.OnMediaDevicesChanged += OnMediaDevicesChanged;
            Conductor.Instance.Signaller.OnPeerConnected   += (peerId, peerName) =>
            {
                RunOnUiThread(() =>
                {
                    if (Peers == null)
                    {
                        Peers = new ObservableCollection <Peer>();
                        Conductor.Instance.Peers = Peers;
                    }

                    Peers.Add(new Peer {
                        Id = peerId, Name = peerName
                    });
                });
            };

            Conductor.Instance.Signaller.OnPeerDisconnected += peerId =>
            {
                RunOnUiThread(() =>
                {
                    var peerToRemove = Peers?.FirstOrDefault(p => p.Id == peerId);
                    if (peerToRemove != null)
                    {
                        Peers.Remove(peerToRemove);
                    }
                });
            };

            Conductor.Instance.Signaller.OnSignedIn += () =>
            {
                RunOnUiThread(() =>
                {
                    IsConnected         = true;
                    IsMicrophoneEnabled = false;
                    IsCameraEnabled     = false;
                    IsConnecting        = false;

                    OnStatusMessageUpdate?.Invoke("Signed-In");
                });
            };

            Conductor.Instance.Signaller.OnServerConnectionFailure += (Exception ex) =>
            {
                RunOnUiThread(() =>
                {
                    IsConnecting = false;
                    OnStatusMessageUpdate?.Invoke("Server Connection Failure: " + ex.Message + "\n" + ex.StackTrace);
                });
            };

            Conductor.Instance.Signaller.OnDisconnected += () =>
            {
                RunOnUiThread(() =>
                {
                    IsConnected         = false;
                    IsMicrophoneEnabled = false;
                    IsCameraEnabled     = false;
                    IsDisconnecting     = false;
                    Peers?.Clear();
                    OnStatusMessageUpdate?.Invoke("Disconnected");
                });
            };

            Conductor.Instance.Signaller.OnMessageFromPeer += (id, message) =>
            {
                RunOnUiThread(() =>
                {
                    // TODO: Handles All Peer Messages (Signal Channel)
                });
            };

            Conductor.Instance.Signaller.OnPeerConnected += (id, name) =>
            {
                RunOnUiThread(() =>
                {
                    SelectedPeer = Peers.First(x => x.Id == id);
                    OnStatusMessageUpdate?.Invoke(string.Format("Connected Peer: {0}-{1}", SelectedPeer.Id, SelectedPeer.Name));
                });
            };

            // TODO: Restore Event Handler in Utility Wrapper
            // Implemented in Unity Consumer due to Event Handling Issue
            // Conductor.Instance.OnAddRemoteStream += Conductor_OnAddRemoteStream does not propagate

            Conductor.Instance.OnRemoveRemoteStream    += Conductor_OnRemoveRemoteStream;
            Conductor.Instance.OnAddLocalStream        += Conductor_OnAddLocalStream;
            Conductor.Instance.OnConnectionHealthStats += Conductor_OnPeerConnectionHealthStats;
            Conductor.Instance.OnPeerConnectionCreated += () =>
            {
                RunOnUiThread(() =>
                {
                    IsReadyToConnect    = false;
                    IsConnectedToPeer   = true;
                    IsReadyToDisconnect = false;
                    IsMicrophoneEnabled = false;
                    OnStatusMessageUpdate?.Invoke("Peer Connection Created");
                });
            };

            Conductor.Instance.OnPeerConnectionClosed += () =>
            {
                RunOnUiThread(() =>
                {
                    IsConnectedToPeer   = false;
                    _peerVideoTrack     = null;
                    _selfVideoTrack     = null;
                    IsMicrophoneEnabled = false;
                    IsCameraEnabled     = false;

                    // TODO: Clean-up References
                    //GC.Collect(); // Ensure all references are truly dropped.

                    OnStatusMessageUpdate?.Invoke("Peer Connection Closed");
                });
            };

            Conductor.Instance.OnPeerMessageDataReceived += (peerId, message) =>
            {
                OnPeerMessageDataReceived?.Invoke(peerId, message);
            };

            // DATA Channel Setup
            Conductor.Instance.OnPeerMessageDataReceived += (i, s) =>
            {
            };

            Conductor.Instance.OnReadyToConnect += () => { RunOnUiThread(() => { IsReadyToConnect = true; }); };

            IceServers   = new ObservableCollection <IceServer>();
            NewIceServer = new IceServer();
            AudioCodecs  = new ObservableCollection <CodecInfo>();
            var audioCodecList = WebRTC.GetAudioCodecs();

            string[] incompatibleAudioCodecs = new string[] { "CN32000", "CN16000", "CN8000", "red8000", "telephone-event8000" };
            VideoCodecs = new ObservableCollection <CodecInfo>();

            // TODO: REMOVE DISPLAY LIST SUPPORT
            var videoCodecList = WebRTC.GetVideoCodecs().OrderBy(codec =>
            {
                switch (codec.Name)
                {
                case "VP8": return(1);

                case "VP9": return(2);

                case "H264": return(3);

                default: return(99);
                }
            });

            RunOnUiThread(() =>
            {
                foreach (var audioCodec in audioCodecList)
                {
                    if (!incompatibleAudioCodecs.Contains(audioCodec.Name + audioCodec.ClockRate))
                    {
                        AudioCodecs.Add(audioCodec);
                    }
                }

                if (AudioCodecs.Count > 0)
                {
                    SelectedAudioCodec = AudioCodecs.FirstOrDefault(x => x.Name.Contains("PCMU"));
                }

                foreach (var videoCodec in videoCodecList)
                {
                    VideoCodecs.Add(videoCodec);
                }

                if (VideoCodecs.Count > 0)
                {
                    SelectedVideoCodec = VideoCodecs.FirstOrDefault(x => x.Name.Contains("H264"));
                }
            });

            RunOnUiThread(() =>
            {
                OnInitialized?.Invoke();
            });
        }
Example #21
0
 public void SetUp()
 {
     WebRTC.Initialize(true);
 }
Example #22
0
 private void Awake()
 {
     WebRTC.Initialize();
     callButton.onClick.AddListener(() => { StartCoroutine(Call()); });
 }
Example #23
0
 private void Awake()
 {
     WebRTC.Initialize(WebRTCSettings.EncoderType, WebRTCSettings.LimitTextureSize);
     callButton.onClick.AddListener(() => { StartCoroutine(Call()); });
     hangupButton.onClick.AddListener(() => { Hangup(); });
 }
Example #24
0
 private void Awake()
 {
     WebRTC.Initialize(EncoderType.Software);
 }
Example #25
0
 public void InitializeTwiceThrowException()
 {
     Assert.That(() => WebRTC.Initialize(), Throws.InvalidOperationException);
 }
Example #26
0
 public void SetUp()
 {
     WebRTC.Initialize(EncoderType.Software);
 }
Example #27
0
        public void SetUp()
        {
            var value = NativeMethods.GetHardwareEncoderSupport();

            WebRTC.Initialize(value ? EncoderType.Hardware : EncoderType.Software);
        }
Example #28
0
        public async void testStartVideoLocal()
        {
            Messenger.Broadcast(SympleLog.LogDebug, "basicTestVideo()");

            if (!webrtcInitialized)
            {
                WebRTC.Initialize(null);    // needed before calling any webrtc functions http://stackoverflow.com/questions/43331677/webrtc-for-uwp-new-rtcpeerconnection-doesnt-complete-execution
                webrtcInitialized = true;
            }

            Messenger.Broadcast(SympleLog.LogDebug, "creating media");

            if (_media == null)
            {
                _media = Media.CreateMedia();
                _media.OnMediaDevicesChanged += (MediaDeviceType mediaType) =>
                {
                    Messenger.Broadcast(SympleLog.LogDebug, "OnMediaDevicesChanged(), mediaType = " + mediaType);
                };
            }

            Messenger.Broadcast(SympleLog.LogDebug, "created media");

            var videoCaptureDevices = _media.GetVideoCaptureDevices();

            Messenger.Broadcast(SympleLog.LogDebug, "num videoCaptureDevices: " + videoCaptureDevices.Count);

            var videoDevice = videoCaptureDevices[0];

            Messenger.Broadcast(SympleLog.LogDebug, "getting videoCaptureCapabilities");
            var videoCaptureCapabilities = await videoDevice.GetVideoCaptureCapabilities();

            Messenger.Broadcast(SympleLog.LogDebug, "got videoCaptureCapabilities");

            var chosenCapability = videoCaptureCapabilities[0];

            Messenger.Broadcast(SympleLog.LogDebug, "chosenCapability:");
            Messenger.Broadcast(SympleLog.LogDebug, "\tWidth: " + (int)chosenCapability.Width);
            Messenger.Broadcast(SympleLog.LogDebug, "\tHeight: " + (int)chosenCapability.Height);
            Messenger.Broadcast(SympleLog.LogDebug, "\tFrameRate: " + (int)chosenCapability.FrameRate);
            WebRTC.SetPreferredVideoCaptureFormat((int)chosenCapability.Width, (int)chosenCapability.Height, (int)chosenCapability.FrameRate);

            Messenger.Broadcast(SympleLog.LogDebug, "getting usermedia");
            if (_localStream == null)
            {
                _localStream = await _media.GetUserMedia(new RTCMediaStreamConstraints { videoEnabled = true, audioEnabled = true });
            }

            Messenger.Broadcast(SympleLog.LogDebug, "got usermedia");

            Messenger.Broadcast(SympleLog.LogDebug, "localStream id: " + _localStream.Id);
            Messenger.Broadcast(SympleLog.LogDebug, "localStream Active?: " + _localStream.Active);

            var videoTracks = _localStream.GetVideoTracks();

            Messenger.Broadcast(SympleLog.LogDebug, "num videoTracks: " + videoTracks.Count);

            _selectedVideoTrack = videoTracks[0];

            Messenger.Broadcast(SympleLog.LogDebug, "selected video track id: " + _selectedVideoTrack.Id);
            Messenger.Broadcast(SympleLog.LogDebug, "selected video track suspended?: " + _selectedVideoTrack.Suspended);
            Messenger.Broadcast(SympleLog.LogDebug, "selected video track enabled?: " + _selectedVideoTrack.Enabled);

            var source = _media.CreateMediaSource(_selectedVideoTrack, Symple.LocalMediaStreamId);

            Messenger.Broadcast(SympleLog.LogDebug, "created mediasource");

            Messenger.Broadcast(SympleLog.CreatedMediaSource, source);
        }
Example #29
0
        public void SetUp()
        {
            var type = TestHelper.HardwareCodecSupport() ? EncoderType.Hardware : EncoderType.Software;

            WebRTC.Initialize(type: type, limitTextureSize: true, forTest: true);
        }
        public SymplePlayerEngineWebRTC(SymplePlayer player) : base(player)
        {
            Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: init");

#if NETFX_CORE
            if (!webrtcInitialized)
            {
                // needed before calling any webrtc functions http://stackoverflow.com/questions/43331677/webrtc-for-uwp-new-rtcpeerconnection-doesnt-complete-execution
                if (player.options.CoreDispatcher != null)
                {
                    WebRTC.Initialize(player.options.CoreDispatcher);
                }
                else
                {
                    WebRTC.Initialize(null);
                }


                WebRTC.EnableLogging(LogLevel.LOGLVL_ERROR);
                WebRTC.EnableLogging(LogLevel.LOGLVL_INFO);
                WebRTC.EnableLogging(LogLevel.LOGLVL_SENSITIVE);
                WebRTC.EnableLogging(LogLevel.LOGLVL_VERBOSE);
                WebRTC.EnableLogging(LogLevel.LOGLVL_WARNING);
                Messenger.Broadcast(SympleLog.LogInfo, "WebRTC logging enabled, log folder = " + WebRTC.LogFolder.Path + ", filename = " + WebRTC.LogFileName);
                webrtcInitialized = true;
            }

            this.userMediaConstraints = player.options.userMediaConstraints;

            if (player.options.rtcConfig != null)
            {
                this.rtcConfig = player.options.rtcConfig;
            }
            else
            {
                this.rtcConfig = new RTCConfiguration();
                this.rtcConfig.IceServers.Add(new RTCIceServer()
                {
                    Url = "stun:stun.l.google.com:19302", Username = string.Empty, Credential = string.Empty
                });
            }
#endif

            /*
             * this.rtcOptions = player.options.rtcOptions || {
             *  optional: [
             *      { DtlsSrtpKeyAgreement: true } // required for FF <=> Chrome interop
             *  ]
             * };
             */

            // Specifies that this client will be the ICE initiator,
            // and will be sending the initial SDP Offer.
            this.initiator = player.options.initiator;
            Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: constructor, set this.initiator to " + this.initiator);

#if NETFX_CORE
            // Reference to the active local or remote media stream
            this.activeStream = null;
#endif
        }