public void SetUp() { var value = TestHelper.HardwareCodecSupport(); WebRTC.Initialize(value ? EncoderType.Hardware : EncoderType.Software); }
public void SetUp() { WebRTC.Initialize(EncoderType.Software); }
public IAsyncOperation <DtoCodecInfos> GetVideoCodecsAsync() { RtcManager.Instance.EnsureRtcIsInitialized(); return(Task.FromResult(WebRTC.GetVideoCodecs().ToArray().ToDto()).AsAsyncOperation()); }
private void Awake() { WebRTC.Initialize(WebRTCSettings.EncoderType, WebRTCSettings.LimitTextureSize); }
protected override void SetUpCodecCapability() { WebRTC.Initialize(); videoCodec = RTCRtpSender.GetCapabilities(TrackKind.Video).codecs.FirstOrDefault(c => c.mimeType.Contains("VP8")); WebRTC.Dispose(); }
public void Initialize() { WebRTC.Initialize(_uiDispatcher); Conductor.Instance.ETWStatsEnabled = false; Cameras = new ObservableCollection <MediaDevice>(); Microphones = new ObservableCollection <MediaDevice>(); AudioPlayoutDevices = new ObservableCollection <MediaDevice>(); // WebRTCUWP M58 library does not support audio capture/playout devices //foreach (MediaDevice audioCaptureDevice in Conductor.Instance.Media.GetAudioCaptureDevices()) //{ // Microphones.Add(audioCaptureDevice); //} //foreach (MediaDevice audioPlayoutDevice in Conductor.Instance.Media.GetAudioPlayoutDevices()) //{ // AudioPlayoutDevices.Add(audioPlayoutDevice); //} // HACK Remove Automatic Device Assignment if (SelectedCamera == null && Cameras.Count > 0) { SelectedCamera = Cameras.First(); } if (SelectedMicrophone == null && Microphones.Count > 0) { SelectedMicrophone = Microphones.First(); } Debug.WriteLine("Device Status: SelectedCamera: {0} - SelectedMic: {1}", SelectedCamera == null ? "NULL" : "OK", SelectedMicrophone == null ? "NULL" : "OK"); if (SelectedAudioPlayoutDevice == null && AudioPlayoutDevices.Count > 0) { SelectedAudioPlayoutDevice = AudioPlayoutDevices.First(); } Conductor.Instance.Media.OnMediaDevicesChanged += OnMediaDevicesChanged; Conductor.Instance.Signaller.OnPeerConnected += (peerId, peerName) => { RunOnUiThread(() => { if (Peers == null) { Peers = new ObservableCollection <Peer>(); Conductor.Instance.Peers = Peers; } Peers.Add(new Peer { Id = peerId, Name = peerName }); }); }; Conductor.Instance.Signaller.OnPeerDisconnected += peerId => { RunOnUiThread(() => { var peerToRemove = Peers?.FirstOrDefault(p => p.Id == peerId); if (peerToRemove != null) { Peers.Remove(peerToRemove); } }); }; Conductor.Instance.Signaller.OnSignedIn += () => { RunOnUiThread(() => { IsConnected = true; IsMicrophoneEnabled = false; IsCameraEnabled = false; IsConnecting = false; OnStatusMessageUpdate?.Invoke("Signed-In"); }); }; Conductor.Instance.Signaller.OnServerConnectionFailure += (Exception ex) => { RunOnUiThread(() => { IsConnecting = false; OnStatusMessageUpdate?.Invoke("Server Connection Failure: " + ex.Message + "\n" + ex.StackTrace); }); }; Conductor.Instance.Signaller.OnDisconnected += () => { RunOnUiThread(() => { IsConnected = false; IsMicrophoneEnabled = false; IsCameraEnabled = false; IsDisconnecting = false; Peers?.Clear(); OnStatusMessageUpdate?.Invoke("Disconnected"); }); }; Conductor.Instance.Signaller.OnMessageFromPeer += (id, message) => { RunOnUiThread(() => { // TODO: Handles All Peer Messages (Signal Channel) }); }; Conductor.Instance.Signaller.OnPeerConnected += (id, name) => { RunOnUiThread(() => { SelectedPeer = Peers.First(x => x.Id == id); OnStatusMessageUpdate?.Invoke(string.Format("Connected Peer: {0}-{1}", SelectedPeer.Id, SelectedPeer.Name)); }); }; // TODO: Restore Event Handler in Utility Wrapper // Implemented in Unity Consumer due to Event Handling Issue // Conductor.Instance.OnAddRemoteStream += Conductor_OnAddRemoteStream does not propagate Conductor.Instance.OnRemoveRemoteStream += Conductor_OnRemoveRemoteStream; Conductor.Instance.OnAddLocalStream += Conductor_OnAddLocalStream; Conductor.Instance.OnConnectionHealthStats += Conductor_OnPeerConnectionHealthStats; Conductor.Instance.OnPeerConnectionCreated += () => { RunOnUiThread(() => { IsReadyToConnect = false; IsConnectedToPeer = true; IsReadyToDisconnect = false; IsMicrophoneEnabled = false; OnStatusMessageUpdate?.Invoke("Peer Connection Created"); }); }; Conductor.Instance.OnPeerConnectionClosed += () => { RunOnUiThread(() => { IsConnectedToPeer = false; _peerVideoTrack = null; _selfVideoTrack = null; IsMicrophoneEnabled = false; IsCameraEnabled = false; // TODO: Clean-up References //GC.Collect(); // Ensure all references are truly dropped. OnStatusMessageUpdate?.Invoke("Peer Connection Closed"); }); }; Conductor.Instance.OnPeerMessageDataReceived += (peerId, message) => { OnPeerMessageDataReceived?.Invoke(peerId, message); }; // DATA Channel Setup Conductor.Instance.OnPeerMessageDataReceived += (i, s) => { }; Conductor.Instance.OnReadyToConnect += () => { RunOnUiThread(() => { IsReadyToConnect = true; }); }; IceServers = new ObservableCollection <IceServer>(); NewIceServer = new IceServer(); AudioCodecs = new ObservableCollection <CodecInfo>(); var audioCodecList = WebRTC.GetAudioCodecs(); string[] incompatibleAudioCodecs = new string[] { "CN32000", "CN16000", "CN8000", "red8000", "telephone-event8000" }; VideoCodecs = new ObservableCollection <CodecInfo>(); // TODO: REMOVE DISPLAY LIST SUPPORT var videoCodecList = WebRTC.GetVideoCodecs().OrderBy(codec => { switch (codec.Name) { case "VP8": return(1); case "VP9": return(2); case "H264": return(3); default: return(99); } }); RunOnUiThread(() => { foreach (var audioCodec in audioCodecList) { if (!incompatibleAudioCodecs.Contains(audioCodec.Name + audioCodec.ClockRate)) { AudioCodecs.Add(audioCodec); } } if (AudioCodecs.Count > 0) { SelectedAudioCodec = AudioCodecs.FirstOrDefault(x => x.Name.Contains("PCMU")); } foreach (var videoCodec in videoCodecList) { VideoCodecs.Add(videoCodec); } if (VideoCodecs.Count > 0) { SelectedVideoCodec = VideoCodecs.FirstOrDefault(x => x.Name.Contains("H264")); } }); RunOnUiThread(() => { OnInitialized?.Invoke(); }); }
public Coroutine CoroutineWebRTCUpdate() { return(StartCoroutine(WebRTC.Update())); }
public void ValidateGraphicsFormat() { var format = WebRTC.GetSupportedGraphicsFormat(SystemInfo.graphicsDeviceType); Assert.That(() => WebRTC.ValidateGraphicsFormat(format), Throws.Nothing); }
[TestCase((GraphicsFormat)88)] //LegacyARGB32_UNorm public void ValidateLegacyGraphicsFormat(GraphicsFormat format) { Assert.That(() => WebRTC.ValidateGraphicsFormat(format), Throws.Nothing); }
public void SetWindow(CoreWindow window) { ApplicationView.GetForCurrentView().TryEnterFullScreenMode(); // Initializes DirectX. _appCallbacks.SetWindow(window); // Initializes webrtc. WebRTC.Initialize(CoreApplication.MainView.CoreWindow.Dispatcher); Conductor.Instance.ETWStatsEnabled = false; Conductor.Instance.Signaller.OnPeerConnected += (peerId, peerName) => { Conductor.Instance.Peers.Add( _selectedPeer = new Peer { Id = peerId, Name = peerName }); }; Conductor.Instance.Signaller.OnPeerDisconnected += peerId => { var peerToRemove = Conductor.Instance.Peers?.FirstOrDefault(p => p.Id == peerId); if (peerToRemove != null) { Conductor.Instance.Peers.Remove(peerToRemove); } }; Conductor.Instance.OnAddRemoteStream += Conductor_OnAddRemoteStream; Conductor.Instance.OnRemoveRemoteStream += Conductor_OnRemoveRemoteStream; Conductor.Instance.OnAddLocalStream += Conductor_OnAddLocalStream; if (Conductor.Instance.Peers == null) { Conductor.Instance.Peers = new ObservableCollection <Peer>(); } Task.Run(() => { var videoCodecList = WebRTC.GetVideoCodecs().OrderBy(codec => { switch (codec.Name) { case "VP8": return(1); case "VP9": return(2); case "H264": return(3); default: return(99); } }); //Conductor.Instance.VideoCodec = videoCodecList.FirstOrDefault(x => x.Name.Contains("VP8")); Conductor.Instance.VideoCodec = videoCodecList.FirstOrDefault(x => x.Name.Contains("H264")); var audioCodecList = WebRTC.GetAudioCodecs(); string[] incompatibleAudioCodecs = new string[] { "CN32000", "CN16000", "CN8000", "red8000", "telephone-event8000" }; var audioCodecs = new List <CodecInfo>(); foreach (var audioCodec in audioCodecList) { if (!incompatibleAudioCodecs.Contains(audioCodec.Name + audioCodec.ClockRate)) { audioCodecs.Add(audioCodec); } } if (audioCodecs.Count > 0) { Conductor.Instance.AudioCodec = audioCodecs.FirstOrDefault(x => x.Name.Contains("PCMU")); } Conductor.Instance.DisableLocalVideoStream(); Conductor.Instance.MuteMicrophone(); }); }
public IAsyncAction SyncWithNtpAsync(long ntpTime) { RtcManager.Instance.EnsureRtcIsInitialized(); WebRTC.SynNTPTime(ntpTime); return(Task.CompletedTask.AsAsyncAction()); }
public IAsyncAction SaveTraceAsync(TraceServerConfig traceServer) { RtcManager.Instance.EnsureRtcIsInitialized(); WebRTC.SaveTrace(traceServer.Ip, traceServer.Port); return(Task.CompletedTask.AsAsyncAction()); }
public void SetUp() { var type = TestHelper.HardwareCodecSupport() ? EncoderType.Hardware : EncoderType.Software; WebRTC.Initialize(type: type, limitTextureSize: true, forTest: true); }
private void Awake() { WebRTC.Initialize(); callButton.onClick.AddListener(() => { StartCoroutine(Call()); }); }
public void SetUp() { var value = NativeMethods.GetHardwareEncoderSupport(); WebRTC.Initialize(value ? EncoderType.Hardware : EncoderType.Software); }
public override async void _play(JObject parameters) { Messenger.Broadcast(SympleLog.LogTrace, "symple:webrtc: _play"); // if there is an active stream, play it now if (this.activeStream != null) { Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: active stream is not null, shuld play it now (TODO)"); //this.video.src = URL.createObjectURL(this.activeStream); //this.video.play(); this.setState("playing"); } else { // otherwise, wait until ICE to complete before setting the playing state // if we are the ICE initiator, then attempt to open the local video device and send the SDP offer to the peer if (this.initiator) { Messenger.Broadcast(SympleLog.LogInfo, "symple:webrtc: initiating"); var videoCaptureDevices = GetMedia().GetVideoCaptureDevices(); Messenger.Broadcast(SympleLog.LogInfo, "videoCaptureDevices:"); foreach (var dev in videoCaptureDevices) { Messenger.Broadcast(SympleLog.LogInfo, "id = " + dev.Id + ", name = " + dev.Name + ", location = " + dev.Location); var capabilities = await dev.GetVideoCaptureCapabilities(); foreach (var capability in capabilities) { Messenger.Broadcast(SympleLog.LogDebug, "\t" + capability.FullDescription); } } int requestedWebRtcCameraIndex = parameters["requestedWebRtcCameraIndex"].ToObject <int>(); int usedWebRtcCameraIndex = requestedWebRtcCameraIndex; if (requestedWebRtcCameraIndex >= videoCaptureDevices.Count) { Messenger.Broadcast(SympleLog.LogInfo, "NOTE: requested WebRTC camera index of " + requestedWebRtcCameraIndex + " is out of range of the number of available video capture devices (" + videoCaptureDevices.Count + "). Resetting to 0."); usedWebRtcCameraIndex = 0; } Messenger.Broadcast(SympleLog.LogInfo, "Selecting WebRTC camera with index " + usedWebRtcCameraIndex); var selectedVideoDevice = videoCaptureDevices[usedWebRtcCameraIndex]; Messenger.Broadcast(SympleLog.LogDebug, "getting videoCaptureCapabilities"); var videoCaptureCapabilities = await selectedVideoDevice.GetVideoCaptureCapabilities(); Messenger.Broadcast(SympleLog.LogDebug, "got videoCaptureCapabilities"); GetMedia().SelectVideoDevice(selectedVideoDevice); int requestedVideoWidth; if (parameters["requestedVideoWidth"] != null) { requestedVideoWidth = parameters["requestedVideoWidth"].ToObject <int>(); } else { Messenger.Broadcast(SympleLog.LogDebug, "requestedVideoWidth set to default"); requestedVideoWidth = 640; } int requestedVideoHeight; if (parameters["requestedVideoHeight"] != null) { requestedVideoHeight = parameters["requestedVideoHeight"].ToObject <int>(); } else { Messenger.Broadcast(SympleLog.LogDebug, "requestedVideoHeight set to default"); requestedVideoHeight = 480; } int numRequestedPixels = requestedVideoWidth * requestedVideoHeight; // We need to specify a preferred video capture format; it has to be one of the supported capabilities of the device. // We will choose the capability that is as close as possible to the requested resolution while also having the highest frame rate for that resolution; var chosenCapability = videoCaptureCapabilities[0]; foreach (var capability in videoCaptureCapabilities) { int numPixelsInThisCapability = (int)(capability.Width * capability.Height); int numPixelsInChosenCapability = (int)(chosenCapability.Width * chosenCapability.Height); long thisPixelDeltaFromRequested = Math.Abs(numPixelsInThisCapability - numRequestedPixels); long chosenPixelDeltaFromRequested = Math.Abs(numPixelsInChosenCapability - numRequestedPixels); if (thisPixelDeltaFromRequested < chosenPixelDeltaFromRequested) { chosenCapability = capability; } else if (thisPixelDeltaFromRequested == chosenPixelDeltaFromRequested && capability.FrameRate > chosenCapability.FrameRate) { chosenCapability = capability; } } Messenger.Broadcast(SympleLog.LogDebug, "chosenCapability:"); Messenger.Broadcast(SympleLog.LogDebug, "\tWidth: " + (int)chosenCapability.Width); Messenger.Broadcast(SympleLog.LogDebug, "\tHeight: " + (int)chosenCapability.Height); Messenger.Broadcast(SympleLog.LogDebug, "\tFrameRate: " + (int)chosenCapability.FrameRate); WebRTC.SetPreferredVideoCaptureFormat((int)chosenCapability.Width, (int)chosenCapability.Height, (int)chosenCapability.FrameRate); //Org.WebRtc.Media.SetDisplayOrientation(Windows.Graphics.Display.DisplayOrientations.None); Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: before getUserMedia"); if (_localStream == null) { _localStream = await GetMedia().GetUserMedia(this.userMediaConstraints); } Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: after getUserMedia"); // play the local video stream and create the SDP offer this.pc.AddStream(_localStream); Messenger.Broadcast(SympleLog.LogDebug, "localStream: " + _localStream); var videoTracks = _localStream.GetVideoTracks(); Messenger.Broadcast(SympleLog.LogDebug, "videoTracks in localStream: "); foreach (var track in videoTracks) { Messenger.Broadcast(SympleLog.LogDebug, track.Id + ", enabled = " + track.Enabled + ", kind = " + track.Kind); } var audioTracks = _localStream.GetAudioTracks(); Messenger.Broadcast(SympleLog.LogDebug, "audioTracks in localStream: "); foreach (var track in audioTracks) { Messenger.Broadcast(SympleLog.LogDebug, track.Id + ", enabled = " + track.Enabled + ", kind = " + track.Kind); } if (videoTracks.Count > 0) { //var source = GetMedia().CreateMediaSource(videoTracks[0], Symple.LocalMediaStreamId); // was valid for org.webrtc 1.54, not existing anymore var source = GetMedia().CreateMediaStreamSource(Symple.LocalMediaStreamId); Messenger.Broadcast(SympleLog.CreatedMediaSource, source); if (this.pc != null) { RTCSessionDescription desc = await this.pc.CreateOffer(); Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: offer: " + desc); this._onLocalSDP(desc); // store the active local stream this.activeStream = _localStream; } else { Messenger.Broadcast(SympleLog.LogError, "peer connection was destroyed while trying to creat offer"); } } else { Messenger.Broadcast(SympleLog.LogError, "ERROR: No video track found locally"); } } } }
private void OnDestroy() { WebRTC.Finalize(); }
public SymplePlayerEngineWebRTC(SymplePlayer player) : base(player) { Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: init"); #if NETFX_CORE if (!webrtcInitialized) { // needed before calling any webrtc functions http://stackoverflow.com/questions/43331677/webrtc-for-uwp-new-rtcpeerconnection-doesnt-complete-execution if (player.options.CoreDispatcher != null) { WebRTC.Initialize(player.options.CoreDispatcher); } else { WebRTC.Initialize(null); } WebRTC.EnableLogging(LogLevel.LOGLVL_ERROR); WebRTC.EnableLogging(LogLevel.LOGLVL_INFO); WebRTC.EnableLogging(LogLevel.LOGLVL_SENSITIVE); WebRTC.EnableLogging(LogLevel.LOGLVL_VERBOSE); WebRTC.EnableLogging(LogLevel.LOGLVL_WARNING); Messenger.Broadcast(SympleLog.LogInfo, "WebRTC logging enabled, log folder = " + WebRTC.LogFolder.Path + ", filename = " + WebRTC.LogFileName); webrtcInitialized = true; } this.userMediaConstraints = player.options.userMediaConstraints; if (player.options.rtcConfig != null) { this.rtcConfig = player.options.rtcConfig; } else { this.rtcConfig = new RTCConfiguration(); this.rtcConfig.IceServers.Add(new RTCIceServer() { Url = "stun:stun.l.google.com:19302", Username = string.Empty, Credential = string.Empty }); } #endif /* * this.rtcOptions = player.options.rtcOptions || { * optional: [ * { DtlsSrtpKeyAgreement: true } // required for FF <=> Chrome interop * ] * }; */ // Specifies that this client will be the ICE initiator, // and will be sending the initial SDP Offer. this.initiator = player.options.initiator; Messenger.Broadcast(SympleLog.LogDebug, "symple:webrtc: constructor, set this.initiator to " + this.initiator); #if NETFX_CORE // Reference to the active local or remote media stream this.activeStream = null; #endif }
private void Awake() { WebRTC.Initialize(EncoderType.Software); }
private void OnDestroy() { _signaller?.Close(); WebRTC.Dispose(); }
private void OnDestroy() { WebRTC.Dispose(); }
private void OnDestroy() { Audio.Stop(); WebRTC.Dispose(); }