public void SendMessageToPeer(Message message, EndPoint address) { var peerReciever = Peers.FirstOrDefault(peer => peer.Address.Equals(address)); if (peerReciever != null) { peerReciever.SendMessage(message); } }
private void OnConnectPeer(Connection connection) { var existPeer = Peers.FirstOrDefault(p => p.Address.Equals(connection.ConnectionInfo.RemoteEndPoint)); if (existPeer == null && (connection.ConnectionInfo.RemoteEndPoint as IPEndPoint).Port != CommonHelpers.TrackerPort) { var peer = new Peer(connection, Peers, t); Peers.Add(peer); } else { existPeer.SetupConnection(connection); } }
private void OnConnectToPeerWithTrackerMessage(object sender, MessageEventArgs e) { var message = e.Message as ConnectToPeerWithTrackerMessage; var peerToConnect = Peers.FirstOrDefault(peer => peer.Address.Equals(message.SenderAddress)); //если пира нет в списке, то добавляем его и подключаемся прямо через трекер if (peerToConnect == null) { AddPeer(message.SenderAddress, (sender as Tracker), true); } //если пир есть в списке, то просто подключаемся через трекер else if (peerToConnect.Status != PeerStatus.Connected) { peerToConnect.Connect(true); } }
private void HandleSocialUpdateState(SocialStateMessage message) { Logger.Debug($"{message.CharacterID} has {message.State} in {message.Service}"); // TODO: cleanup var partyMember = PartyManager.GetPartyMember(message.CharacterID); var party = partyMember?.Party; if (party == null) { return; } Logger.Debug($"{message.CharacterID} is in a party of {party.Members.Count}"); var service = Peers.FirstOrDefault(p => p.Name == message.Service); if (service == null) { return; } partyMember.Record.ChannelID = service.ID; partyMember.Record.FieldID = 310020100; SendMessage(service, new SocialInitPartyMessage { CharacterID = message.CharacterID, Data = new PartyData { ID = party.Record.ID, BossCharacterID = party.Record.BossCharacterID, Members = party.Members .Select(m => new PartyMemberData { CharacterID = m.Record.CharacterID, Name = m.Record.Name, Job = m.Record.Job, Level = m.Record.Level, ChannelID = m.Record.ChannelID, FieldID = m.Record.FieldID }) .ToList() } }).Wait(); }
private void tcpListenService_ClientDisconnect(object sender, IpEndpoint e) { _log.LogDebug($"TCP client disconnected from {e}"); var peer = Peers.FirstOrDefault(p => p.RemoteTcpPorts.Contains(e.Port)); if (peer == null) { _log.LogDebug("Failed to identify disconnecting peer"); return; } peer.RemoveTcpConnection(e.Port); peer.LastUpdateReceived = DateTime.Now; _log.LogInfo($"CITP Peer '{peer}' disconnected on TCP Port {e.Port}"); }
private void receivedPeerLocationMessage(PeerLocationMessagePacket message, IpAddress remoteIp) { // Filter out the local CITP peer if (remoteIp == _nicAddress && message.Name == _device.PeerName && message.ListeningTcpPort == LocalTcpListenPort) { return; } var peer = Peers.FirstOrDefault(p => p.Ip.Equals(remoteIp) && p.Name == message.Name); if (peer == null) { peer = new CitpPeer(remoteIp, message.Name); _peers.Add(peer); } peer.Type = message.Type; peer.State = message.State; peer.LastUpdateReceived = DateTime.Now; }
/// <summary> /// Finds a <seealso cref="BoundPeer"/> whose <see cref="Address"/> matches with /// the given <paramref name="addr"/> if it exits. /// </summary> /// <param name="addr">The <see cref="Address"/> to search.</param> /// <returns>A <see cref="BoundPeer"/> whose <see cref="Address"/> matches /// the given <paramref name="addr"/>.</returns> public BoundPeer?GetPeer(Address addr) => Peers.FirstOrDefault(peer => peer.Address.Equals(addr));
public void Initialize() { WebRTC.Initialize(_uiDispatcher); Conductor.Instance.ETWStatsEnabled = false; Cameras = new ObservableCollection <MediaDevice>(); Microphones = new ObservableCollection <MediaDevice>(); AudioPlayoutDevices = new ObservableCollection <MediaDevice>(); // WebRTCUWP M58 library does not support audio capture/playout devices //foreach (MediaDevice audioCaptureDevice in Conductor.Instance.Media.GetAudioCaptureDevices()) //{ // Microphones.Add(audioCaptureDevice); //} //foreach (MediaDevice audioPlayoutDevice in Conductor.Instance.Media.GetAudioPlayoutDevices()) //{ // AudioPlayoutDevices.Add(audioPlayoutDevice); //} // HACK Remove Automatic Device Assignment if (SelectedCamera == null && Cameras.Count > 0) { SelectedCamera = Cameras.First(); } if (SelectedMicrophone == null && Microphones.Count > 0) { SelectedMicrophone = Microphones.First(); } Debug.WriteLine("Device Status: SelectedCamera: {0} - SelectedMic: {1}", SelectedCamera == null ? "NULL" : "OK", SelectedMicrophone == null ? "NULL" : "OK"); if (SelectedAudioPlayoutDevice == null && AudioPlayoutDevices.Count > 0) { SelectedAudioPlayoutDevice = AudioPlayoutDevices.First(); } Conductor.Instance.Media.OnMediaDevicesChanged += OnMediaDevicesChanged; Conductor.Instance.Signaller.OnPeerConnected += (peerId, peerName) => { RunOnUiThread(() => { if (Peers == null) { Peers = new ObservableCollection <Peer>(); Conductor.Instance.Peers = Peers; } Peers.Add(new Peer { Id = peerId, Name = peerName }); }); }; Conductor.Instance.Signaller.OnPeerDisconnected += peerId => { RunOnUiThread(() => { var peerToRemove = Peers?.FirstOrDefault(p => p.Id == peerId); if (peerToRemove != null) { Peers.Remove(peerToRemove); } }); }; Conductor.Instance.Signaller.OnSignedIn += () => { RunOnUiThread(() => { IsConnected = true; IsMicrophoneEnabled = false; IsCameraEnabled = false; IsConnecting = false; OnStatusMessageUpdate?.Invoke("Signed-In"); }); }; Conductor.Instance.Signaller.OnServerConnectionFailure += (Exception ex) => { RunOnUiThread(() => { IsConnecting = false; OnStatusMessageUpdate?.Invoke("Server Connection Failure: " + ex.Message + "\n" + ex.StackTrace); }); }; Conductor.Instance.Signaller.OnDisconnected += () => { RunOnUiThread(() => { IsConnected = false; IsMicrophoneEnabled = false; IsCameraEnabled = false; IsDisconnecting = false; Peers?.Clear(); OnStatusMessageUpdate?.Invoke("Disconnected"); }); }; Conductor.Instance.Signaller.OnMessageFromPeer += (id, message) => { RunOnUiThread(() => { // TODO: Handles All Peer Messages (Signal Channel) }); }; Conductor.Instance.Signaller.OnPeerConnected += (id, name) => { RunOnUiThread(() => { SelectedPeer = Peers.First(x => x.Id == id); OnStatusMessageUpdate?.Invoke(string.Format("Connected Peer: {0}-{1}", SelectedPeer.Id, SelectedPeer.Name)); }); }; // TODO: Restore Event Handler in Utility Wrapper // Implemented in Unity Consumer due to Event Handling Issue // Conductor.Instance.OnAddRemoteStream += Conductor_OnAddRemoteStream does not propagate Conductor.Instance.OnRemoveRemoteStream += Conductor_OnRemoveRemoteStream; Conductor.Instance.OnAddLocalStream += Conductor_OnAddLocalStream; Conductor.Instance.OnConnectionHealthStats += Conductor_OnPeerConnectionHealthStats; Conductor.Instance.OnPeerConnectionCreated += () => { RunOnUiThread(() => { IsReadyToConnect = false; IsConnectedToPeer = true; IsReadyToDisconnect = false; IsMicrophoneEnabled = false; OnStatusMessageUpdate?.Invoke("Peer Connection Created"); }); }; Conductor.Instance.OnPeerConnectionClosed += () => { RunOnUiThread(() => { IsConnectedToPeer = false; _peerVideoTrack = null; _selfVideoTrack = null; IsMicrophoneEnabled = false; IsCameraEnabled = false; // TODO: Clean-up References //GC.Collect(); // Ensure all references are truly dropped. OnStatusMessageUpdate?.Invoke("Peer Connection Closed"); }); }; Conductor.Instance.OnPeerMessageDataReceived += (peerId, message) => { OnPeerMessageDataReceived?.Invoke(peerId, message); }; // DATA Channel Setup Conductor.Instance.OnPeerMessageDataReceived += (i, s) => { }; Conductor.Instance.OnReadyToConnect += () => { RunOnUiThread(() => { IsReadyToConnect = true; }); }; IceServers = new ObservableCollection <IceServer>(); NewIceServer = new IceServer(); AudioCodecs = new ObservableCollection <CodecInfo>(); var audioCodecList = WebRTC.GetAudioCodecs(); string[] incompatibleAudioCodecs = new string[] { "CN32000", "CN16000", "CN8000", "red8000", "telephone-event8000" }; VideoCodecs = new ObservableCollection <CodecInfo>(); // TODO: REMOVE DISPLAY LIST SUPPORT var videoCodecList = WebRTC.GetVideoCodecs().OrderBy(codec => { switch (codec.Name) { case "VP8": return(1); case "VP9": return(2); case "H264": return(3); default: return(99); } }); RunOnUiThread(() => { foreach (var audioCodec in audioCodecList) { if (!incompatibleAudioCodecs.Contains(audioCodec.Name + audioCodec.ClockRate)) { AudioCodecs.Add(audioCodec); } } if (AudioCodecs.Count > 0) { SelectedAudioCodec = AudioCodecs.FirstOrDefault(x => x.Name.Contains("PCMU")); } foreach (var videoCodec in videoCodecList) { VideoCodecs.Add(videoCodec); } if (VideoCodecs.Count > 0) { SelectedVideoCodec = VideoCodecs.FirstOrDefault(x => x.Name.Contains("H264")); } }); RunOnUiThread(() => { OnInitialized?.Invoke(); }); }
public void Initialize() { // WebRTCライブラリの初期化 // WebRTC.Initialize(_uiDispathcer); // Conductor.Instance.ETWStatsEnabled = false; /* * Cameras = new List<MediaDevice>(); * Microphones = new List<MediaDevice>(); * AudioPlayoutDevices = new List<MediaDevice>(); * // マシン上で使用できるメディアデバイスをすべて取得する * foreach(var videoCaptureDevice in Conductor.Instance.Media.GetVideoCaptureDevices()) * { * Cameras.Add(videoCaptureDevice); * } * foreach(var audioCaptureDevice in Conductor.Instance.Media.GetAudioCaptureDevices()) * { * Microphones.Add(audioCaptureDevice); * } * foreach(var audioPlayoutDevice in Conductor.Instance.Media.GetAudioPlayoutDevices()) * { * AudioPlayoutDevices.Add(audioPlayoutDevice); * } */ // 各種メディアデバイスはリストの先頭のものを使用する // Holoはいいけど、Immersiveの場合は考え直すべきです /* * if(SelectedCamera == null && Cameras.Count > 0) * { * SelectedCamera = Cameras.First(); * } * * if(SelectedMicrophone == null && Microphones.Count > 0) * { * SelectedMicrophone = Microphones.First(); * } * * if(SelectedAudioPlayoutDevice == null && AudioPlayoutDevices.Count >0) * { * SelectedAudioPlayoutDevice = AudioPlayoutDevices.First(); * } */ // ================================ // シグナリング関連のイベントハンドラ // ================================ // マシンに接続されたメディアデバイスが変更されたときのイベントハンドラ // Conductor.Instance.Media.OnMediaDevicesChanged += OnMediaDeviceChanged; // リモートユーザがシグナリングサーバに接続してきたときのハンドラ // 自分の初回ログイン、ポーリング時の新規ユーザ追加時にコールされる // TODO 接続ユーザの選択方法を工夫したいところ Conductor.Instance.Signaller.OnPeerConnected += (peerId, peerName) => { // リモートユーザのリストを行進する if (Peers == null) { Peers = new List <Peer>(); Conductor.Instance.Peers = Peers; } Peers.Add(new Peer { Id = peerId, Name = peerName }); // 接続してきたリモートユーザをPeer候補とする SelectedPeer = Peers.First(x => x.Id == peerId); }; // リモートユーザがシグナリングサーバからログアウトしたときのハンドラ Conductor.Instance.Signaller.OnPeerDisconnected += (peerId) => { var peerToRemove = Peers?.FirstOrDefault(p => p.Id == peerId); if (peerToRemove != null) { Peers.Remove(peerToRemove); } }; // シグナリングサーバへの接続が完了したときのハンドラ Conductor.Instance.Signaller.OnSignedIn += () => { IsConnected = true; IsMicrophoneEnabled = false; IsCameraEnabled = false; IsConnecting = false; OnStatusMessageUpdate?.Invoke("Signed in"); }; // シグナリングサーバへの接続が失敗したときのハンドラ Conductor.Instance.Signaller.OnServerConnectionFailure += () => { IsConnecting = false; OnStatusMessageUpdate?.Invoke("Server Connection Failure"); }; // シグナリングサーバからログアウトしたときのハンドラ Conductor.Instance.Signaller.OnDisconnected += () => { IsConnected = false; IsMicrophoneEnabled = false; IsCameraEnabled = false; IsDisconnecting = false; Peers?.Clear(); OnStatusMessageUpdate?.Invoke("Disconnected"); }; // Conductor.Instance.OnReadyToConnect += () => { IsReadyToConnect = true; }; // ============================= // Peerコネクション関連のイベントハンドラ // ============================= // Peerコネクションが生成されたときのイベントハンドラ(通話開始) Conductor.Instance.OnPeerConnectionCreated += () => { IsReadyToConnect = false; IsConnectedToPeer = true; IsReadyToDisconnect = false; IsCameraEnabled = true; IsMicrophoneEnabled = true; // ?? OnStatusMessageUpdate?.Invoke("Peer Connection Created"); }; // Peerコネクションが破棄されたときのイベントハンドラ Conductor.Instance.OnPeerConnectionClosed += () => { IsConnectedToPeer = false; _peerVideoTrack = null; _selfVideoTrack = null; IsMicrophoneEnabled = false; IsCameraEnabled = false; }; // Peer(リモートユーザ)からメッセージを受信したときのハンドラ Conductor.Instance.OnPeerMessageDataReceived += (peerId, message) => { OnPeerMessageDataReceived?.Invoke(peerId, message); }; // ============================= // コーデック設定 // ============================= /* * // オーディオコーデックの設定 * AudioCodecs = new List<CodecInfo>(); * var audioCodecList = WebRTC.GetAudioCodecs(); * string[] incompatibleAudioCodecs = new string[] { "CN32000", "CN16000", "CN8000", "red8000", "telephone-event8000" }; * * foreach (var audioCodec in audioCodecList) * { * if (!incompatibleAudioCodecs.Contains(audioCodec.Name + audioCodec.ClockRate)) * { * AudioCodecs.Add(audioCodec); * } * } * if (AudioCodecs.Count > 0) * { * SelectedAudioCodec = AudioCodecs.First(); * } * * // ビデオコーデックの設定。デフォルトはH.264を使う * VideoCodecs = new List<CodecInfo>(); * var videoCodecList = WebRTC.GetVideoCodecs().OrderBy(codec => * { * switch (codec.Name) * { * case "VP8": return 1; * case "VP9": return 2; * case "H264": return 3; * default: return 99; * } * }); * * foreach (var videoCodec in videoCodecList) * { * VideoCodecs.Add(videoCodec); * } * if (VideoCodecs.Count > 0) * { * SelectedVideoCodec = VideoCodecs.FirstOrDefault(codec => codec.Name.Contains("H264")); * } */ /* * // ============================= * // Iceサーバの設定 * // ============================= * IceServers = new List<IceServer>(); * NewIceServer = new IceServer(); * * IceServers.Add(new IceServer("stun.l.google.com:19302", IceServer.ServerType.STUN)); * IceServers.Add(new IceServer("stun1.l.google.com:19302", IceServer.ServerType.STUN)); * IceServers.Add(new IceServer("stun2.l.google.com:19302", IceServer.ServerType.STUN)); * IceServers.Add(new IceServer("stun3.l.google.com:19302", IceServer.ServerType.STUN)); * IceServers.Add(new IceServer("stun4.l.google.com:19302", IceServer.ServerType.STUN)); * * Conductor.Instance.ConfigureIceServers(IceServers); */ OnInitialized?.Invoke(); }
/// <summary> /// The initializer for MainViewModel. /// </summary> /// <param name="uiDispatcher">The UI dispatcher.</param> public void Initialize(CoreDispatcher uiDispatcher) { WebRTC.Initialize(uiDispatcher); // For the HoloLens if (_isHoloLens) { WebRTC.SetPreferredVideoCaptureFormat(896, 504, 30); } // Pick the codec var videoCodecs = WebRTC.GetVideoCodecs(); foreach (var codec in videoCodecs) { if (codec.Name == "H264") { Conductor.Instance.VideoCodec = codec; break; } } // Pick the bitrate Conductor.Instance.VideoBitrate = 512; var settings = ApplicationData.Current.LocalSettings; // A Peer is connected to the server event handler Conductor.Instance.Signaller.OnPeerConnected += (peerId, peerName) => { RunOnUiThread(() => { if (Peers == null) { Peers = new ObservableCollection <Peer>(); Conductor.Instance.Peers = Peers; } Peers.Add(new Peer { Id = peerId, Name = peerName }); }); }; // A Peer is disconnected from the server event handler Conductor.Instance.Signaller.OnPeerDisconnected += peerId => { RunOnUiThread(() => { var peerToRemove = Peers?.FirstOrDefault(p => p.Id == peerId); if (peerToRemove != null) { Peers.Remove(peerToRemove); } }); }; // The user is Signed in to the server event handler Conductor.Instance.Signaller.OnSignedIn += () => { RunOnUiThread(() => { IsConnected = true; IsMicrophoneEnabled = true; IsCameraEnabled = true; IsConnecting = false; }); }; // Failed to connect to the server event handler Conductor.Instance.Signaller.OnServerConnectionFailure += () => { RunOnUiThread(async() => { IsConnecting = false; MessageDialog msgDialog = new MessageDialog("Failed to connect to server!"); await msgDialog.ShowAsync(); }); }; // The current user is disconnected from the server event handler Conductor.Instance.Signaller.OnDisconnected += () => { RunOnUiThread(() => { IsConnected = false; IsMicrophoneEnabled = false; IsCameraEnabled = false; IsDisconnecting = false; Peers?.Clear(); }); }; LoadSettings(); Connect(); // Event handlers for managing the media streams Conductor.Instance.OnAddRemoteStream += Conductor_OnAddRemoteStream; Conductor.Instance.OnRemoveRemoteStream += Conductor_OnRemoveRemoteStream; Conductor.Instance.OnAddLocalStream += Conductor_OnAddLocalStream; /** * // Connected to a peer event handler * Conductor.Instance.OnPeerConnectionCreated += () => * { * RunOnUiThread(() => * { * IsReadyToConnect = false; * IsConnectedToPeer = true; * if (SettingsButtonChecked) * { * // close settings screen if open * SettingsButtonChecked = false; * ScrollBarVisibilityType = ScrollBarVisibility.Disabled; * } * IsReadyToDisconnect = false; * if (SettingsButtonChecked) * { * // close settings screen if open * SettingsButtonChecked = false; * ScrollBarVisibilityType = ScrollBarVisibility.Disabled; * } * * // Make sure the screen is always active while on call * if (!_keepOnScreenRequested) * { * _keepScreenOnRequest.RequestActive(); * _keepOnScreenRequested = true; * } * * UpdateScrollBarVisibilityTypeHelper(); * }); * }; * * // Connection between the current user and a peer is closed event handler * Conductor.Instance.OnPeerConnectionClosed += () => * { * RunOnUiThread(() => * { * IsConnectedToPeer = false; * Conductor.Instance.Media.RemoveVideoTrackMediaElementPair(_peerVideoTrack); * //PeerVideo.Source = null; * * Conductor.Instance.Media.RemoveVideoTrackMediaElementPair(_selfVideoTrack); * //SelfVideo.Stop(); * //SelfVideo.ClearValue(MediaElement.SourceProperty); * //SelfVideo.Source = null; * * _peerVideoTrack = null; * _selfVideoTrack = null; * GC.Collect(); // Ensure all references are truly dropped. * IsMicrophoneEnabled = true; * IsCameraEnabled = true; * SelfVideoFps = PeerVideoFps = ""; * * // Make sure to allow the screen to be locked after the call * if (_keepOnScreenRequested) * { * _keepScreenOnRequest.RequestRelease(); * _keepOnScreenRequested = false; * } * UpdateScrollBarVisibilityTypeHelper(); * }); * }; * * // Ready to connect to the server event handler * Conductor.Instance.OnReadyToConnect += () => { RunOnUiThread(() => { IsReadyToConnect = true; }); }; * * // Initialize the Ice servers list * IceServers = new ObservableCollection<IceServer>(); * NewIceServer = new IceServer(); * * // Prepare to list supported audio codecs * AudioCodecs = new ObservableCollection<CodecInfo>(); * var audioCodecList = WebRTC.GetAudioCodecs(); * * // These are features added to existing codecs, they can't decode/encode real audio data so ignore them * string[] incompatibleAudioCodecs = new string[] { "CN32000", "CN16000", "CN8000", "red8000", "telephone-event8000" }; * * // Prepare to list supported video codecs * VideoCodecs = new ObservableCollection<CodecInfo>(); * * // Order the video codecs so that the stable VP8 is in front. * var videoCodecList = WebRTC.GetVideoCodecs().OrderBy(codec => * { * switch (codec.Name) * { * case "VP8": return 1; * case "VP9": return 2; * case "H264": return 3; * default: return 99; * } * }); * * // Load the supported audio/video information into the Settings controls * RunOnUiThread(() => * { * foreach (var audioCodec in audioCodecList) * { * if (!incompatibleAudioCodecs.Contains(audioCodec.Name + audioCodec.ClockRate)) * { * AudioCodecs.Add(audioCodec); * } * } * * if (AudioCodecs.Count > 0) * { * if (settings.Values["SelectedAudioCodecId"] != null) * { * * int id = Convert.ToInt32(settings.Values["SelectedAudioCodecId"]); * * foreach (var audioCodec in AudioCodecs) * { * * int audioCodecId = audioCodec.Id; * if (audioCodecId == id) * { * SelectedAudioCodec = audioCodec; * break; * } * } * } * if (SelectedAudioCodec == null) * { * SelectedAudioCodec = AudioCodecs.First(); * } * } * * foreach (var videoCodec in videoCodecList) * { * VideoCodecs.Add(videoCodec); * } * * if (VideoCodecs.Count > 0) * { * if (settings.Values["SelectedVideoCodecId"] != null) * { * * int id = Convert.ToInt32(settings.Values["SelectedVideoCodecId"]); * foreach (var videoCodec in VideoCodecs) * { * int videoCodecId = videoCodec.Id; * if (videoCodecId == id) * { * SelectedVideoCodec = videoCodec; * break; * } * } * } * if (SelectedVideoCodec == null) * { * SelectedVideoCodec = VideoCodecs.First(); * } * } * }); * LoadSettings(); * RunOnUiThread(() => * { * OnInitialized?.Invoke(); * });*/ }