Ejemplo n.º 1
0
        private void Instance_OnAddRemoteStream(MediaStreamEvent evt)
        {
            RunOnUiThread(() =>
            {
                System.Diagnostics.Debug.WriteLine("Conductor_OnAddRemoteStream()");

                _peerVideoTrack = evt.Stream.GetVideoTracks().FirstOrDefault();
                if (_peerVideoTrack != null)
                {
                    System.Diagnostics.Debug.WriteLine(
                        "Conductor_OnAddRemoteStream() - GetVideoTracks: {0}",
                        evt.Stream.GetVideoTracks().Count);
                    // Raw Video from VP8 Encoded Sender
                    // H264 Encoded Stream does not trigger this event

                    // TODO:  Switch between RAW or ENCODED Frame
#if HACK_VP8
                    rawVideo = Media.CreateMedia().CreateRawVideoSource(_peerVideoTrack);
                    rawVideo.OnRawVideoFrame += Source_OnRawVideoFrame;
#else
                    encodedVideo = Media.CreateMedia().CreateEncodedVideoSource(_peerVideoTrack);
                    encodedVideo.OnEncodedVideoFrame += EncodedVideo_OnEncodedVideoFrame;
#endif
                }
                else
                {
                    System.Diagnostics.Debug.WriteLine("Conductor_OnAddRemoteStream() - peerVideoTrack NULL");
                }
                _webRtcControl.IsReadyToDisconnect = true;
            }
                          );
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Add local stream event handler.
        /// </summary>
        /// <param name="evt">Details about Media stream event.</param>
        private void Conductor_OnAddLocalStream(MediaStreamEvent evt)
        {
            _selfVideoTrack = evt.Stream.GetVideoTracks().FirstOrDefault();
            if (_selfVideoTrack != null)
            {
                //var source = Media.CreateMedia().CreateMediaSource(_selfVideoTrack, "SELF");
                RunOnUiThread(() =>
                {
                    if (_cameraEnabled)
                    {
                        Conductor.Instance.EnableLocalVideoStream();
                    }
                    else
                    {
                        Conductor.Instance.DisableLocalVideoStream();
                    }

                    if (_microphoneIsOn)
                    {
                        Conductor.Instance.UnmuteMicrophone();
                    }
                    else
                    {
                        Conductor.Instance.MuteMicrophone();
                    }
                });
            }
        }
Ejemplo n.º 3
0
    public void CreateRemoteMediaStreamSource(object track, string type, string id)
    {
#if !UNITY_EDITOR
        MediaVideoTrack videoTrack = (MediaVideoTrack)track;
        var             source     = Media.CreateMedia().CreateMediaStreamSource(videoTrack, type, id);
        Plugin.LoadRemoteMediaStreamSource((MediaStreamSource)source);
#endif
    }
Ejemplo n.º 4
0
 private void Conductor_OnAddRemoteStream(MediaStreamEvent evt)
 {
     _peerVideoTrack = evt.Stream.GetVideoTracks().FirstOrDefault();
     if (_peerVideoTrack != null)
     {
         _decodedVideo = Media.CreateMedia().CreateDecodedVideoSource(_peerVideoTrack);
         _decodedVideo.OnDecodedVideoFrame += Source_OnDecodedVideoFrame;
     }
 }
Ejemplo n.º 5
0
 private void Conductor_OnAddRemoteStream(MediaStreamEvent evt)
 {
     System.Diagnostics.Debug.WriteLine("Conductor_OnAddRemoteStream()");
     _peerVideoTrack = evt.Stream.GetVideoTracks().FirstOrDefault();
     if (_peerVideoTrack != null)
     {
         System.Diagnostics.Debug.WriteLine(
             "Conductor_OnAddRemoteStream() - GetVideoTracks: {0}",
             evt.Stream.GetVideoTracks().Count);
     }
     else
     {
         System.Diagnostics.Debug.WriteLine("Conductor_OnAddRemoteStream() - peerVideoTrack NULL");
     }
     _webRtcControl.IsReadyToDisconnect = true;
 }
Ejemplo n.º 6
0
        private void Conductor_OnAddLocalStream(MediaStreamEvent evt)
        {
            if (evt == null)
            {
                var msg = "Conductor_OnAddLocalStream--media stream NULL";
                Debug.WriteLine(msg);
                OnStatusMessageUpdate?.Invoke(msg);
            }
            _selfVideoTrack = evt.Stream.GetVideoTracks().FirstOrDefault();
            //if ((_selfVideoTrack != null) && (SelectedCamera != null))
            if (_selfVideoTrack != null)
            {
                Debug.WriteLine("selfVideoTrack Setup-IsCameraEnabled:{0}-IsMicrophoneEnabled:{1}", IsCameraEnabled, IsMicrophoneEnabled);
                RunOnUiThread(() =>
                {
                    if (IsCameraEnabled)
                    {
                        Conductor.Instance.EnableLocalVideoStream();
                    }
                    else
                    {
                        Conductor.Instance.DisableLocalVideoStream();
                    }

                    if (IsMicrophoneEnabled)
                    {
                        Conductor.Instance.UnmuteMicrophone();
                    }
                    else
                    {
                        Conductor.Instance.MuteMicrophone();
                    }

// TODO: Handle Video Loopback
//                    if ((VideoLoopbackEnabled) && (SelfVideo != null))
//                    {
//                        var source = Media.CreateMedia().CreateMediaSource(_selfVideoTrack, "SELF");
//                        SelfVideo.SetMediaStreamSource(source);
//                    }
                });
            }
            else
            {
                Debug.WriteLine("selfVideoTrack NULL");
            }
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Add remote stream event handler.
        /// </summary>
        /// <param name="evt">Details about Media stream event.</param>
        private void Conductor_OnAddRemoteStream(MediaStreamEvent evt)
        {
            _peerVideoTrack = evt.Stream.GetVideoTracks().FirstOrDefault();
            if (_peerVideoTrack != null)
            {
                //if (!_isHoloLens)
                //{
                //    Conductor.Instance.Media.AddVideoTrackMediaElementPair(_peerVideoTrack, PeerVideo, "PEER");
                //}

                //var source = Media.CreateMedia().CreateMediaSource(_peerVideoTrack, "PEER");
                //RunOnUiThread(() =>
                //{
                //  PeerVideo.SetMediaStreamSource(source);
                //});

                // Pass the spatial coordinate system to webrtc.
                var spatialCoordinateSystem = UrhoAppView.Current.ReferenceFrame.CoordinateSystem;
                Media.SetSpatialCoordinateSystem(spatialCoordinateSystem);
            }

            IsReadyToDisconnect = true;
        }
Ejemplo n.º 8
0
        public void Initialize()
        {
            WebRTC.Initialize(_uiDispatcher);
            Conductor.Instance.ETWStatsEnabled = false;

            Cameras             = new ObservableCollection <MediaDevice>();
            Microphones         = new ObservableCollection <MediaDevice>();
            AudioPlayoutDevices = new ObservableCollection <MediaDevice>();

            // WebRTCUWP M58 library does not support audio capture/playout devices
            //foreach (MediaDevice audioCaptureDevice in Conductor.Instance.Media.GetAudioCaptureDevices())
            //{
            //    Microphones.Add(audioCaptureDevice);
            //}

            //foreach (MediaDevice audioPlayoutDevice in Conductor.Instance.Media.GetAudioPlayoutDevices())
            //{
            //    AudioPlayoutDevices.Add(audioPlayoutDevice);
            //}

            // HACK Remove Automatic Device Assignment
            if (SelectedCamera == null && Cameras.Count > 0)
            {
                SelectedCamera = Cameras.First();
            }

            if (SelectedMicrophone == null && Microphones.Count > 0)
            {
                SelectedMicrophone = Microphones.First();
            }

            Debug.WriteLine("Device Status: SelectedCamera: {0} - SelectedMic: {1}", SelectedCamera == null ? "NULL" : "OK", SelectedMicrophone == null ? "NULL" : "OK");
            if (SelectedAudioPlayoutDevice == null && AudioPlayoutDevices.Count > 0)
            {
                SelectedAudioPlayoutDevice = AudioPlayoutDevices.First();
            }

            Conductor.Instance.Media.OnMediaDevicesChanged += OnMediaDevicesChanged;
            Conductor.Instance.Signaller.OnPeerConnected   += (peerId, peerName) =>
            {
                RunOnUiThread(() =>
                {
                    if (Peers == null)
                    {
                        Peers = new ObservableCollection <Peer>();
                        Conductor.Instance.Peers = Peers;
                    }

                    Peers.Add(new Peer {
                        Id = peerId, Name = peerName
                    });
                });
            };

            Conductor.Instance.Signaller.OnPeerDisconnected += peerId =>
            {
                RunOnUiThread(() =>
                {
                    var peerToRemove = Peers?.FirstOrDefault(p => p.Id == peerId);
                    if (peerToRemove != null)
                    {
                        Peers.Remove(peerToRemove);
                    }
                });
            };

            Conductor.Instance.Signaller.OnSignedIn += () =>
            {
                RunOnUiThread(() =>
                {
                    IsConnected         = true;
                    IsMicrophoneEnabled = false;
                    IsCameraEnabled     = false;
                    IsConnecting        = false;

                    OnStatusMessageUpdate?.Invoke("Signed-In");
                });
            };

            Conductor.Instance.Signaller.OnServerConnectionFailure += (Exception ex) =>
            {
                RunOnUiThread(() =>
                {
                    IsConnecting = false;
                    OnStatusMessageUpdate?.Invoke("Server Connection Failure: " + ex.Message + "\n" + ex.StackTrace);
                });
            };

            Conductor.Instance.Signaller.OnDisconnected += () =>
            {
                RunOnUiThread(() =>
                {
                    IsConnected         = false;
                    IsMicrophoneEnabled = false;
                    IsCameraEnabled     = false;
                    IsDisconnecting     = false;
                    Peers?.Clear();
                    OnStatusMessageUpdate?.Invoke("Disconnected");
                });
            };

            Conductor.Instance.Signaller.OnMessageFromPeer += (id, message) =>
            {
                RunOnUiThread(() =>
                {
                    // TODO: Handles All Peer Messages (Signal Channel)
                });
            };

            Conductor.Instance.Signaller.OnPeerConnected += (id, name) =>
            {
                RunOnUiThread(() =>
                {
                    SelectedPeer = Peers.First(x => x.Id == id);
                    OnStatusMessageUpdate?.Invoke(string.Format("Connected Peer: {0}-{1}", SelectedPeer.Id, SelectedPeer.Name));
                });
            };

            // TODO: Restore Event Handler in Utility Wrapper
            // Implemented in Unity Consumer due to Event Handling Issue
            // Conductor.Instance.OnAddRemoteStream += Conductor_OnAddRemoteStream does not propagate

            Conductor.Instance.OnRemoveRemoteStream    += Conductor_OnRemoveRemoteStream;
            Conductor.Instance.OnAddLocalStream        += Conductor_OnAddLocalStream;
            Conductor.Instance.OnConnectionHealthStats += Conductor_OnPeerConnectionHealthStats;
            Conductor.Instance.OnPeerConnectionCreated += () =>
            {
                RunOnUiThread(() =>
                {
                    IsReadyToConnect    = false;
                    IsConnectedToPeer   = true;
                    IsReadyToDisconnect = false;
                    IsMicrophoneEnabled = false;
                    OnStatusMessageUpdate?.Invoke("Peer Connection Created");
                });
            };

            Conductor.Instance.OnPeerConnectionClosed += () =>
            {
                RunOnUiThread(() =>
                {
                    IsConnectedToPeer   = false;
                    _peerVideoTrack     = null;
                    _selfVideoTrack     = null;
                    IsMicrophoneEnabled = false;
                    IsCameraEnabled     = false;

                    // TODO: Clean-up References
                    //GC.Collect(); // Ensure all references are truly dropped.

                    OnStatusMessageUpdate?.Invoke("Peer Connection Closed");
                });
            };

            Conductor.Instance.OnPeerMessageDataReceived += (peerId, message) =>
            {
                OnPeerMessageDataReceived?.Invoke(peerId, message);
            };

            // DATA Channel Setup
            Conductor.Instance.OnPeerMessageDataReceived += (i, s) =>
            {
            };

            Conductor.Instance.OnReadyToConnect += () => { RunOnUiThread(() => { IsReadyToConnect = true; }); };

            IceServers   = new ObservableCollection <IceServer>();
            NewIceServer = new IceServer();
            AudioCodecs  = new ObservableCollection <CodecInfo>();
            var audioCodecList = WebRTC.GetAudioCodecs();

            string[] incompatibleAudioCodecs = new string[] { "CN32000", "CN16000", "CN8000", "red8000", "telephone-event8000" };
            VideoCodecs = new ObservableCollection <CodecInfo>();

            // TODO: REMOVE DISPLAY LIST SUPPORT
            var videoCodecList = WebRTC.GetVideoCodecs().OrderBy(codec =>
            {
                switch (codec.Name)
                {
                case "VP8": return(1);

                case "VP9": return(2);

                case "H264": return(3);

                default: return(99);
                }
            });

            RunOnUiThread(() =>
            {
                foreach (var audioCodec in audioCodecList)
                {
                    if (!incompatibleAudioCodecs.Contains(audioCodec.Name + audioCodec.ClockRate))
                    {
                        AudioCodecs.Add(audioCodec);
                    }
                }

                if (AudioCodecs.Count > 0)
                {
                    SelectedAudioCodec = AudioCodecs.FirstOrDefault(x => x.Name.Contains("PCMU"));
                }

                foreach (var videoCodec in videoCodecList)
                {
                    VideoCodecs.Add(videoCodec);
                }

                if (VideoCodecs.Count > 0)
                {
                    SelectedVideoCodec = VideoCodecs.FirstOrDefault(x => x.Name.Contains("H264"));
                }
            });

            RunOnUiThread(() =>
            {
                OnInitialized?.Invoke();
            });
        }
        public Action <RTCIceCandidate> sendLocalCandidate = null; // new Function,

        private void _createPeerConnection()
        {
            if (this.pc != null)
            {
                throw new Exception("the peer connection is already initialized");
            }

            Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: create peer connection: " + this.rtcConfig);

            this.pc = new RTCPeerConnection(this.rtcConfig);
            Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: created this.pc");
            pc.OnIceCandidate += (RTCPeerConnectionIceEvent iceEvent) =>
            {
                if (iceEvent.Candidate != null)
                {
                    Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: candidate gathered: " + iceEvent.Candidate);
                    if (sendLocalCandidate != null)
                    {
                        this.sendLocalCandidate(iceEvent.Candidate);
                    }
                }
                else
                {
                    Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: candidate gathering complete");
                }
            };

            pc.OnAddStream += (MediaStreamEvent mediaStreamEvent) =>
            {
                //string objectURL = createObjectURL(mediaStreamEvent.Stream);
                Messenger.Broadcast(SympleLog.LogInfo, "symple:webrtc: remote stream added");

                // Set the state to playing once candidates have completed gathering.
                // This is the best we can do until ICE onstatechange is implemented.
                this.setState("playing");

                // ====== here we would play the video element ======
                Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: remote stream added, should play it now (TODO)");

                MediaVideoTrack peerVideoTrack = mediaStreamEvent.Stream.GetVideoTracks().FirstOrDefault();
                if (peerVideoTrack != null)
                {
                    Messenger.Broadcast(SympleLog.RemoteStreamAdded, GetMedia(), peerVideoTrack);


                    //IMediaSource mediaSource = GetMedia().CreateMediaSource(peerVideoTrack, RemotePeerVideoTrackId); // was valid in org.webrt 1.54, not valid anymore
                    //Messenger.Broadcast(SympleLog.LogInfo, "Creating video source for remote peer video");
                    //IMediaSource mediaSource = GetMedia().CreateMediaStreamSource(RemotePeerVideoTrackId);
                    //Messenger.Broadcast(SympleLog.LogInfo, "Created video source for remote peer video");
                    //Messenger.Broadcast(SympleLog.CreatedMediaSource, mediaSource);
                }
                else
                {
                    Messenger.Broadcast(SympleLog.LogError, "ERROR: Received remote media stream, but there was no video track");
                }

                // store the active stream
                this.activeStream = mediaStreamEvent.Stream;
            };

            pc.OnRemoveStream += (MediaStreamEvent mediaStreamEvent) =>
            {
                Messenger.Broadcast(SympleLog.LogInfo, "symple:webrtc: remote stream removed: " + mediaStreamEvent);

                MediaVideoTrack peerVideoTrack = mediaStreamEvent.Stream.GetVideoTracks().FirstOrDefault();

                Messenger.Broadcast(SympleLog.RemoteStreamRemoved, GetMedia(), peerVideoTrack);



                //this.video.stop();
                //this.video.src = "";
            };

            // NOTE: The following state events are still very unreliable.
            // Hopefully when the spec is complete this will change, but until then
            // we need to 'guess' the state.
            // this.pc.onconnecting = function(event) { Symple.log('symple:webrtc: onconnecting:', event); };
            // this.pc.onopen = function(event) { Symple.log('symple:webrtc: onopen:', event); };
            // this.pc.onicechange = function(event) { Symple.log('symple:webrtc: onicechange :', event); };
            // this.pc.onstatechange = function(event) { Symple.log('symple:webrtc: onstatechange :', event); };
        }
Ejemplo n.º 10
0
        public async void testStartVideoLocal()
        {
            Messenger.Broadcast(SympleLog.LogDebug, "basicTestVideo()");

            if (!webrtcInitialized)
            {
                WebRTC.Initialize(null);    // needed before calling any webrtc functions http://stackoverflow.com/questions/43331677/webrtc-for-uwp-new-rtcpeerconnection-doesnt-complete-execution
                webrtcInitialized = true;
            }

            Messenger.Broadcast(SympleLog.LogDebug, "creating media");

            if (_media == null)
            {
                _media = Media.CreateMedia();
                _media.OnMediaDevicesChanged += (MediaDeviceType mediaType) =>
                {
                    Messenger.Broadcast(SympleLog.LogDebug, "OnMediaDevicesChanged(), mediaType = " + mediaType);
                };
            }

            Messenger.Broadcast(SympleLog.LogDebug, "created media");

            var videoCaptureDevices = _media.GetVideoCaptureDevices();

            Messenger.Broadcast(SympleLog.LogDebug, "num videoCaptureDevices: " + videoCaptureDevices.Count);

            var videoDevice = videoCaptureDevices[0];

            Messenger.Broadcast(SympleLog.LogDebug, "getting videoCaptureCapabilities");
            var videoCaptureCapabilities = await videoDevice.GetVideoCaptureCapabilities();

            Messenger.Broadcast(SympleLog.LogDebug, "got videoCaptureCapabilities");

            var chosenCapability = videoCaptureCapabilities[0];

            Messenger.Broadcast(SympleLog.LogDebug, "chosenCapability:");
            Messenger.Broadcast(SympleLog.LogDebug, "\tWidth: " + (int)chosenCapability.Width);
            Messenger.Broadcast(SympleLog.LogDebug, "\tHeight: " + (int)chosenCapability.Height);
            Messenger.Broadcast(SympleLog.LogDebug, "\tFrameRate: " + (int)chosenCapability.FrameRate);
            WebRTC.SetPreferredVideoCaptureFormat((int)chosenCapability.Width, (int)chosenCapability.Height, (int)chosenCapability.FrameRate);

            Messenger.Broadcast(SympleLog.LogDebug, "getting usermedia");
            if (_localStream == null)
            {
                _localStream = await _media.GetUserMedia(new RTCMediaStreamConstraints { videoEnabled = true, audioEnabled = true });
            }

            Messenger.Broadcast(SympleLog.LogDebug, "got usermedia");

            Messenger.Broadcast(SympleLog.LogDebug, "localStream id: " + _localStream.Id);
            Messenger.Broadcast(SympleLog.LogDebug, "localStream Active?: " + _localStream.Active);

            var videoTracks = _localStream.GetVideoTracks();

            Messenger.Broadcast(SympleLog.LogDebug, "num videoTracks: " + videoTracks.Count);

            _selectedVideoTrack = videoTracks[0];

            Messenger.Broadcast(SympleLog.LogDebug, "selected video track id: " + _selectedVideoTrack.Id);
            Messenger.Broadcast(SympleLog.LogDebug, "selected video track suspended?: " + _selectedVideoTrack.Suspended);
            Messenger.Broadcast(SympleLog.LogDebug, "selected video track enabled?: " + _selectedVideoTrack.Enabled);

            var source = _media.CreateMediaSource(_selectedVideoTrack, Symple.LocalMediaStreamId);

            Messenger.Broadcast(SympleLog.LogDebug, "created mediasource");

            Messenger.Broadcast(SympleLog.CreatedMediaSource, source);
        }
Ejemplo n.º 11
0
        public override async Task OnSdpOfferAsync(RelayMessage message)
        {
            bool isHold = SdpUtils.IsHold(message.Payload);

            if (isHold)
            {
                Context.VoipHelper.SetCallHeld();
            }
            else
            {
                Context.VoipHelper.SetCallActive(Context.PeerId, Context.IsVideoEnabled);
            }

            // If PeerConnection is not null, then this is an SDP renegotiation.
            if (Context.PeerConnection == null)
            {
                var config = new RTCConfiguration
                {
                    IceServers = WebRtcSettingsUtils.ToRTCIceServer(IceServerSettings.IceServers)
                };
                Context.PeerConnection = new RTCPeerConnection(config);
            }

            if (isHold)
            {
                // Even for just a renegotiation, it's easier to just teardown the media capture and start over.
                if (Context.LocalStream != null)
                {
                    Context.PeerConnection.RemoveStream(Context.LocalStream);
                }
                Context.LocalStream?.Stop();
                Context.LocalStream = null;
                Context.RemoteStream?.Stop();
                Context.RemoteStream = null;
                Context.ResetRenderers();
            }

            MediaVideoTrack oldVideoTrack = Context.RemoteStream?.GetVideoTracks()?.FirstOrDefault();

            await Context.PeerConnection.SetRemoteDescription(new RTCSessionDescription(RTCSdpType.Offer, message.Payload));

            MediaVideoTrack newVideoTrack = Context.RemoteStream?.GetVideoTracks()?.FirstOrDefault();

            bool videoTrackChanged = oldVideoTrack != null && newVideoTrack != null &&
                                     oldVideoTrack.Id.CompareTo(newVideoTrack.Id) != 0;

            if (videoTrackChanged)
            {
                Context.ResetRemoteRenderer();
                var source = RtcManager.Instance.Media.CreateMediaSource(newVideoTrack, CallContext.PeerMediaStreamId);
                Context.RemoteVideoRenderer.SetupRenderer(Context.ForegroundProcessId, source, Context.RemoteVideoControlSize);
            }
            else if (!isHold)
            {
                Context.LocalStream = await RtcManager.Instance.Media.GetUserMedia(new RTCMediaStreamConstraints
                {
                    videoEnabled = Context.IsVideoEnabled,
                    audioEnabled = true
                });

                Context.PeerConnection.AddStream(Context.LocalStream);

                // Setup the rendering of the local capture.
                var tracks = Context.LocalStream.GetVideoTracks();
                if (tracks.Count > 0)
                {
                    var source = RtcManager.Instance.Media.CreateMediaSource(tracks[0], CallContext.LocalMediaStreamId);
                    Context.LocalVideoRenderer.SetupRenderer(Context.ForegroundProcessId, source, Context.LocalVideoControlSize);
                }
            }

            var sdpAnswer = await Context.PeerConnection.CreateAnswer();

            await Context.PeerConnection.SetLocalDescription(sdpAnswer);

            var sdpVideoCodecIds = SdpUtils.GetVideoCodecIds(message.Payload);

            if (sdpVideoCodecIds.Count > 0)
            {
                Context.VideoCodecUsed = Array.Find((await Hub.Instance.MediaSettingsChannel.GetVideoCodecsAsync())?.Codecs,
                                                    it => it.Id == sdpVideoCodecIds.First())?.FromDto();
            }

            Context.SendToPeer(RelayMessageTags.SdpAnswer, sdpAnswer.Sdp);
            if (isHold)
            {
                await Context.SwitchState(new Held());
            }
            else
            {
                await Context.SwitchState(new Active());
            }
        }
        public void Initialize()
        {
            // WebRTCライブラリの初期化
            // WebRTC.Initialize(_uiDispathcer);
            // Conductor.Instance.ETWStatsEnabled = false;

            /*
             * Cameras = new List<MediaDevice>();
             * Microphones = new List<MediaDevice>();
             * AudioPlayoutDevices = new List<MediaDevice>();
             * // マシン上で使用できるメディアデバイスをすべて取得する
             * foreach(var videoCaptureDevice in Conductor.Instance.Media.GetVideoCaptureDevices())
             * {
             *  Cameras.Add(videoCaptureDevice);
             * }
             * foreach(var audioCaptureDevice in Conductor.Instance.Media.GetAudioCaptureDevices())
             * {
             *  Microphones.Add(audioCaptureDevice);
             * }
             * foreach(var audioPlayoutDevice in Conductor.Instance.Media.GetAudioPlayoutDevices())
             * {
             *  AudioPlayoutDevices.Add(audioPlayoutDevice);
             * }
             */

            // 各種メディアデバイスはリストの先頭のものを使用する
            // Holoはいいけど、Immersiveの場合は考え直すべきです

            /*
             * if(SelectedCamera == null && Cameras.Count > 0)
             * {
             *  SelectedCamera = Cameras.First();
             * }
             *
             * if(SelectedMicrophone == null && Microphones.Count > 0)
             * {
             *  SelectedMicrophone = Microphones.First();
             * }
             *
             * if(SelectedAudioPlayoutDevice == null && AudioPlayoutDevices.Count >0)
             * {
             *  SelectedAudioPlayoutDevice = AudioPlayoutDevices.First();
             * }
             */

            // ================================
            // シグナリング関連のイベントハンドラ
            // ================================

            // マシンに接続されたメディアデバイスが変更されたときのイベントハンドラ
            // Conductor.Instance.Media.OnMediaDevicesChanged += OnMediaDeviceChanged;
            // リモートユーザがシグナリングサーバに接続してきたときのハンドラ
            // 自分の初回ログイン、ポーリング時の新規ユーザ追加時にコールされる
            // TODO 接続ユーザの選択方法を工夫したいところ
            Conductor.Instance.Signaller.OnPeerConnected += (peerId, peerName) =>
            {
                // リモートユーザのリストを行進する
                if (Peers == null)
                {
                    Peers = new List <Peer>();
                    Conductor.Instance.Peers = Peers;
                }
                Peers.Add(new Peer {
                    Id = peerId, Name = peerName
                });
                // 接続してきたリモートユーザをPeer候補とする
                SelectedPeer = Peers.First(x => x.Id == peerId);
            };
            // リモートユーザがシグナリングサーバからログアウトしたときのハンドラ
            Conductor.Instance.Signaller.OnPeerDisconnected += (peerId) =>
            {
                var peerToRemove = Peers?.FirstOrDefault(p => p.Id == peerId);
                if (peerToRemove != null)
                {
                    Peers.Remove(peerToRemove);
                }
            };
            // シグナリングサーバへの接続が完了したときのハンドラ
            Conductor.Instance.Signaller.OnSignedIn += () =>
            {
                IsConnected         = true;
                IsMicrophoneEnabled = false;
                IsCameraEnabled     = false;
                IsConnecting        = false;

                OnStatusMessageUpdate?.Invoke("Signed in");
            };
            // シグナリングサーバへの接続が失敗したときのハンドラ
            Conductor.Instance.Signaller.OnServerConnectionFailure += () =>
            {
                IsConnecting = false;

                OnStatusMessageUpdate?.Invoke("Server Connection Failure");
            };
            // シグナリングサーバからログアウトしたときのハンドラ
            Conductor.Instance.Signaller.OnDisconnected += () =>
            {
                IsConnected         = false;
                IsMicrophoneEnabled = false;
                IsCameraEnabled     = false;
                IsDisconnecting     = false;
                Peers?.Clear();

                OnStatusMessageUpdate?.Invoke("Disconnected");
            };
            //
            Conductor.Instance.OnReadyToConnect += () =>
            {
                IsReadyToConnect = true;
            };


            // =============================
            // Peerコネクション関連のイベントハンドラ
            // =============================

            // Peerコネクションが生成されたときのイベントハンドラ(通話開始)
            Conductor.Instance.OnPeerConnectionCreated += () =>
            {
                IsReadyToConnect    = false;
                IsConnectedToPeer   = true;
                IsReadyToDisconnect = false;

                IsCameraEnabled     = true;
                IsMicrophoneEnabled = true; // ??

                OnStatusMessageUpdate?.Invoke("Peer Connection Created");
            };
            // Peerコネクションが破棄されたときのイベントハンドラ
            Conductor.Instance.OnPeerConnectionClosed += () =>
            {
                IsConnectedToPeer   = false;
                _peerVideoTrack     = null;
                _selfVideoTrack     = null;
                IsMicrophoneEnabled = false;
                IsCameraEnabled     = false;
            };
            // Peer(リモートユーザ)からメッセージを受信したときのハンドラ
            Conductor.Instance.OnPeerMessageDataReceived += (peerId, message) =>
            {
                OnPeerMessageDataReceived?.Invoke(peerId, message);
            };

            // =============================
            // コーデック設定
            // =============================

            /*
             * // オーディオコーデックの設定
             * AudioCodecs = new List<CodecInfo>();
             * var audioCodecList = WebRTC.GetAudioCodecs();
             * string[] incompatibleAudioCodecs = new string[] { "CN32000", "CN16000", "CN8000", "red8000", "telephone-event8000" };
             *
             * foreach (var audioCodec in audioCodecList)
             * {
             *  if (!incompatibleAudioCodecs.Contains(audioCodec.Name + audioCodec.ClockRate))
             *  {
             *      AudioCodecs.Add(audioCodec);
             *  }
             * }
             * if (AudioCodecs.Count > 0)
             * {
             *  SelectedAudioCodec = AudioCodecs.First();
             * }
             *
             * // ビデオコーデックの設定。デフォルトはH.264を使う
             * VideoCodecs = new List<CodecInfo>();
             * var videoCodecList = WebRTC.GetVideoCodecs().OrderBy(codec =>
             * {
             *  switch (codec.Name)
             *  {
             *      case "VP8": return 1;
             *      case "VP9": return 2;
             *      case "H264": return 3;
             *      default: return 99;
             *  }
             * });
             *
             * foreach (var videoCodec in videoCodecList)
             * {
             *  VideoCodecs.Add(videoCodec);
             * }
             * if (VideoCodecs.Count > 0)
             * {
             *  SelectedVideoCodec = VideoCodecs.FirstOrDefault(codec => codec.Name.Contains("H264"));
             * }
             */
            /*
             * // =============================
             * // Iceサーバの設定
             * // =============================
             * IceServers = new List<IceServer>();
             * NewIceServer = new IceServer();
             *
             * IceServers.Add(new IceServer("stun.l.google.com:19302", IceServer.ServerType.STUN));
             * IceServers.Add(new IceServer("stun1.l.google.com:19302", IceServer.ServerType.STUN));
             * IceServers.Add(new IceServer("stun2.l.google.com:19302", IceServer.ServerType.STUN));
             * IceServers.Add(new IceServer("stun3.l.google.com:19302", IceServer.ServerType.STUN));
             * IceServers.Add(new IceServer("stun4.l.google.com:19302", IceServer.ServerType.STUN));
             *
             * Conductor.Instance.ConfigureIceServers(IceServers);
             */
            OnInitialized?.Invoke();
        }