public CallViewModel(CoreDispatcher uiDispatcher) : base(uiDispatcher) { if (AnalyticsInfo.VersionInfo.DeviceFamily == "Windows.Holographic") { _isHoloLens = true; } // We don't want the PC to send video to the HoloLens if (!_isHoloLens) { _cameraEnabled = false; } // Display a permission dialog to request access to the microphone and camera WebRTC.RequestAccessForMediaCapture().AsTask().ContinueWith(antecedent => { if (antecedent.Result) { Initialize(uiDispatcher); } else { RunOnUiThread(async() => { var msgDialog = new MessageDialog( "Failed to obtain access to multimedia devices!"); await msgDialog.ShowAsync(); }); } }); }
/// <summary> /// Constructor for MainViewModel. /// </summary> /// <param name="uiDispatcher">Core event message dispatcher.</param> public MainViewModel(CoreDispatcher uiDispatcher) : base(uiDispatcher) { // Initialize all the action commands InitStartWebRtcCommand = new ActionCommand(InitStartWebRtcCommandExecute, InitStartWebRtcCommandCanExecute); SendTestMessageToAnnotationReceiverCommand = new ActionCommand(SendTestMessageToAnnotationReceiverCommandExecute, SendTestMessageToAnnotationReceiverCommandCanExecute); starWebrtcContext = StarWebrtcContext.CreateMentorContext(); starWebrtcContext.CoreDispatcher = uiDispatcher; // right after creating the context (before starting the connections), we could edit some parameters such as the signalling server // comment these out if not needed Messenger.AddListener <string>(SympleLog.LogTrace, OnLog); Messenger.AddListener <string>(SympleLog.LogDebug, OnLog); Messenger.AddListener <string>(SympleLog.LogInfo, OnLog); Messenger.AddListener <string>(SympleLog.LogError, OnLog); //Messenger.AddListener<IMediaSource>(SympleLog.CreatedMediaSource, OnCreatedMediaSource); //Messenger.AddListener(SympleLog.DestroyedMediaSource, OnDestroyedMediaSource); Messenger.AddListener(SympleLog.RemoteAnnotationReceiverConnected, OnRemoteAnnotationReceiverConnected); Messenger.AddListener(SympleLog.RemoteAnnotationReceiverDisconnected, OnRemoteAnnotationReceiverDisconnected); Messenger.AddListener <Org.WebRtc.Media, Org.WebRtc.MediaVideoTrack>(SympleLog.RemoteStreamAdded, OnRemoteStreamAdded); Messenger.AddListener <Org.WebRtc.Media, Org.WebRtc.MediaVideoTrack>(SympleLog.RemoteStreamRemoved, OnRemoteStreamRemoved); // Display a permission dialog to request access to the microphone and camera WebRTC.RequestAccessForMediaCapture().AsTask().ContinueWith(antecedent => { if (antecedent.Result) { Initialize(uiDispatcher); } else { RunOnUiThread(async() => { var msgDialog = new MessageDialog( "Failed to obtain access to multimedia devices!"); await msgDialog.ShowAsync(); }); } }); }
public async Task InitialiseAsync(string localHostName, string remotePeerName) { if (!this.initialised) { this.initialised = true; this.hostName = localHostName; this.remotePeerName = remotePeerName; // I find that if I don't do this before Initialize() then I crash. await WebRTC.RequestAccessForMediaCapture(); // TODO: we need a dispatcher here. WebRTC.Initialize(dispatcherProvider.Dispatcher); await this.mediaManager.CreateAsync(); await this.mediaManager.AddLocalStreamAsync(this.mediaManager.UserMedia); } }
public async Task Initialize(ConductorConfig config) { if (config == null) { throw new ArgumentException(); } this.coreDispatcher = config.CoreDispatcher ?? throw new ArgumentException(); if (config.Signaller != null) { this.signaller = new WebRtcSignaller( config.Signaller ?? throw new ArgumentException()); this.signaller.ReceivedIceCandidate += this.signaller_ReceivedIceCandidate; this.signaller.ReceivedAnswer += this.signaller_ReceivedAnswer; this.signaller.ReceivedOffer += this.signaller_ReceivedOffer; } this.localVideo = config.LocalVideo; this.remoteVideo = config.RemoteVideo; var allowed = await WebRTC.RequestAccessForMediaCapture(); if (!allowed) { throw new Exception("Failed to access media for WebRtc..."); } WebRTC.Initialize(this.coreDispatcher); this.MediaDevices = this.getMediaDevices().ToList(); this.selectedDevice = this.MediaDevices.First(); this.CaptureProfiles = await this.getCaptureProfiles(this.selectedDevice); this.selectedProfile = this.CaptureProfiles.First(); }
public async Task Initialize(CoreDispatcher coreDispatcher) { if (this.media != null || this.mediaStream != null) { throw new Exception("Media lock is alreay initialized."); } var allowed = await WebRTC.RequestAccessForMediaCapture(); if (!allowed) { throw new Exception("Failed to access media for WebRtc..."); } WebRTC.Initialize(coreDispatcher); this.media = Media.CreateMedia(); var videoDevice = this.media.GetVideoCaptureDevices().First(); var capabilities = await videoDevice.GetVideoCaptureCapabilities(); var selectedFormat = capabilities .OrderBy(cap => cap.Width * cap.Height * cap.FrameRate) .FirstOrDefault(); if (selectedFormat != null) { WebRTC.SetPreferredVideoCaptureFormat( (int)selectedFormat.Width, (int)selectedFormat.Height, (int)selectedFormat.FrameRate, selectedFormat.MrcEnabled ); } this.mediaStream = await this.media.GetUserMedia(this.Constraints); }
public void Initialize() { WebRTC.RequestAccessForMediaCapture().Completed += (asyncInfo, status) => { if (status == Windows.Foundation.AsyncStatus.Completed && asyncInfo.GetResults() == true) { WebRTC.Initialize(_uiDispatcher); Conductor.Instance.ETWStatsEnabled = false; Cameras = new ObservableCollection <MediaDevice>(); Microphones = new ObservableCollection <MediaDevice>(); AudioPlayoutDevices = new ObservableCollection <MediaDevice>(); // WebRTCUWP M58 library does not support audio capture/playout devices //foreach (MediaDevice audioCaptureDevice in Conductor.Instance.Media.GetAudioCaptureDevices()) //{ // Microphones.Add(audioCaptureDevice); //} //foreach (MediaDevice audioPlayoutDevice in Conductor.Instance.Media.GetAudioPlayoutDevices()) //{ // AudioPlayoutDevices.Add(audioPlayoutDevice); //} // HACK Remove Automatic Device Assignment if (SelectedCamera == null && Cameras.Count > 0) { SelectedCamera = Cameras.First(); } if (SelectedMicrophone == null && Microphones.Count > 0) { SelectedMicrophone = Microphones.First(); } Debug.WriteLine("Device Status: SelectedCamera: {0} - SelectedMic: {1}", SelectedCamera == null ? "NULL" : "OK", SelectedMicrophone == null ? "NULL" : "OK"); if (SelectedAudioPlayoutDevice == null && AudioPlayoutDevices.Count > 0) { SelectedAudioPlayoutDevice = AudioPlayoutDevices.First(); } Conductor.Instance.Media.OnMediaDevicesChanged += OnMediaDevicesChanged; Conductor.Instance.Signaller.OnPeerConnected += (peerId, peerName) => { RunOnUiThread(() => { if (Peers == null) { Peers = new ObservableCollection <Peer>(); Conductor.Instance.Peers = Peers; } Peers.Add(new Peer { Id = peerId, Name = peerName }); }); }; Conductor.Instance.Signaller.OnPeerDisconnected += peerId => { RunOnUiThread(() => { var peerToRemove = Peers?.FirstOrDefault(p => p.Id == peerId); if (peerToRemove != null) { Peers.Remove(peerToRemove); } }); }; Conductor.Instance.Signaller.OnSignedIn += () => { RunOnUiThread(() => { IsConnected = true; IsMicrophoneEnabled = false; IsCameraEnabled = false; IsConnecting = false; OnStatusMessageUpdate?.Invoke("Signed-In"); }); }; Conductor.Instance.Signaller.OnServerConnectionFailure += (Exception ex) => { RunOnUiThread(() => { IsConnecting = false; OnStatusMessageUpdate?.Invoke("Server Connection Failure: " + ex.Message + "\n" + ex.StackTrace); }); }; Conductor.Instance.Signaller.OnDisconnected += () => { RunOnUiThread(() => { IsConnected = false; IsMicrophoneEnabled = false; IsCameraEnabled = false; IsDisconnecting = false; Peers?.Clear(); OnStatusMessageUpdate?.Invoke("Disconnected"); }); }; Conductor.Instance.Signaller.OnMessageFromPeer += (id, message) => { RunOnUiThread(() => { // TODO: Handles All Peer Messages (Signal Channel) }); }; Conductor.Instance.Signaller.OnPeerConnected += (id, name) => { RunOnUiThread(() => { SelectedPeer = Peers.First(x => x.Id == id); OnStatusMessageUpdate?.Invoke(string.Format("Connected Peer: {0}-{1}", SelectedPeer.Id, SelectedPeer.Name)); }); }; // TODO: Restore Event Handler in Utility Wrapper // Implemented in Unity Consumer due to Event Handling Issue // Conductor.Instance.OnAddRemoteStream += Conductor_OnAddRemoteStream does not propagate Conductor.Instance.OnRemoveRemoteStream += Conductor_OnRemoveRemoteStream; Conductor.Instance.OnAddLocalStream += Conductor_OnAddLocalStream; Conductor.Instance.OnConnectionHealthStats += Conductor_OnPeerConnectionHealthStats; Conductor.Instance.OnPeerConnectionCreated += () => { RunOnUiThread(() => { IsReadyToConnect = false; IsConnectedToPeer = true; IsReadyToDisconnect = false; IsMicrophoneEnabled = false; OnStatusMessageUpdate?.Invoke("Peer Connection Created"); }); }; Conductor.Instance.OnPeerConnectionClosed += () => { RunOnUiThread(() => { IsConnectedToPeer = false; _peerVideoTrack = null; _selfVideoTrack = null; IsMicrophoneEnabled = false; IsCameraEnabled = false; // TODO: Clean-up References //GC.Collect(); // Ensure all references are truly dropped. OnStatusMessageUpdate?.Invoke("Peer Connection Closed"); }); }; Conductor.Instance.OnPeerMessageDataReceived += (peerId, message) => { OnPeerMessageDataReceived?.Invoke(peerId, message); }; // DATA Channel Setup Conductor.Instance.OnPeerMessageDataReceived += (i, s) => { }; Conductor.Instance.OnReadyToConnect += () => { RunOnUiThread(() => { IsReadyToConnect = true; }); }; IceServers = new ObservableCollection <IceServer>(); NewIceServer = new IceServer(); AudioCodecs = new ObservableCollection <CodecInfo>(); var audioCodecList = WebRTC.GetAudioCodecs(); string[] incompatibleAudioCodecs = new string[] { "CN32000", "CN16000", "CN8000", "red8000", "telephone-event8000" }; VideoCodecs = new ObservableCollection <CodecInfo>(); // TODO: REMOVE DISPLAY LIST SUPPORT var videoCodecList = WebRTC.GetVideoCodecs().OrderBy(codec => { switch (codec.Name) { case "VP8": return(1); case "VP9": return(2); case "H264": return(3); default: return(99); } }); RunOnUiThread(() => { foreach (var audioCodec in audioCodecList) { if (!incompatibleAudioCodecs.Contains(audioCodec.Name + audioCodec.ClockRate)) { AudioCodecs.Add(audioCodec); } } if (AudioCodecs.Count > 0) { SelectedAudioCodec = AudioCodecs.FirstOrDefault(x => x.Name.Contains("PCMU")); } foreach (var videoCodec in videoCodecList) { VideoCodecs.Add(videoCodec); } if (VideoCodecs.Count > 0) { SelectedVideoCodec = VideoCodecs.FirstOrDefault(x => x.Name.Contains("H264")); } }); RunOnUiThread(() => { OnInitialized?.Invoke(); }); } }; }
public async Task <bool> RequestAccessForMediaCaptureAsync() { return(await WebRTC.RequestAccessForMediaCapture().AsTask()); }