/// <summary> /// Initialize the underlying WebRTC peer connection. /// </summary> /// <remarks> /// This method must be called once before using the peer connection. If <see cref="AutoInitializeOnStart"/> /// is <c>true</c> then it is automatically called during <a href="https://docs.unity3d.com/ScriptReference/MonoBehaviour.Start.html">MonoBehaviour.Start()</a>. /// /// This method is asynchronous and completes its task when the initializing completed. /// On successful completion, it also trigger the <see cref="OnInitialized"/> event. /// Note however that this completion is free-threaded and complete immediately when the /// underlying peer connection is initialized, whereas any <see cref="OnInitialized"/> /// event handler is invoked when control returns to the main Unity app thread. The former /// is faster, but does not allow accessing the underlying peer connection because it /// returns before <see cref="OnPostInitialize"/> executed. Therefore it is generally /// recommended to listen to the <see cref="OnInitialized"/> event, and ignore the returned /// <see xref="System.Threading.Tasks.Task"/> object. /// /// If the peer connection is already initialized, this method returns immediately with /// a <see xref="System.Threading.Tasks.Task.CompletedTask"/> object. The caller can check /// that the <see cref="Peer"/> property is non-<c>null</c> to confirm that the connection /// is in fact initialized. /// </remarks> public Task InitializeAsync(CancellationToken token = default(CancellationToken)) { // Check in case Awake() was called first if (_nativePeer == null) { CreateNativePeerConnection(); } // if the peer is already set, we refuse to initialize again. // Note: for multi-peer scenarios, use multiple WebRTC components. if (_nativePeer.Initialized) { return(Task.CompletedTask); } // Ensure Android binding is initialized before accessing the native implementation Android.Initialize(); #if UNITY_WSA && !UNITY_EDITOR if (UnityEngine.WSA.Application.RunningOnUIThread()) #endif { return(RequestAccessAndInitAsync(token)); } #if UNITY_WSA && !UNITY_EDITOR else { UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAndInitAsync(token), waitUntilDone: true); return(Task.CompletedTask); } #endif }
/// <summary> /// Initialize the underlying WebRTC peer connection. /// </summary> /// <remarks> /// This method is asynchronous and completes its task when the initializing completed. /// On successful completion, it also trigger the <see cref="OnInitialized"/> event. /// Note however that this completion is free-threaded and complete immediately when the /// underlying peer connection is initialized, whereas any <see cref="OnInitialized"/> /// event handler is invoked when control returns to the main Unity app thread. The former /// is faster, but does not allow accessing the underlying peer connection because it /// returns before <see cref="OnPostInitialize"/> executed. Therefore it is generally /// recommended to listen to the <see cref="OnInitialized"/> event, and ignore the returned /// <see xref="System.Threading.Tasks.Task"/> object. /// </remarks> private async Task <WebRTC.PeerConnection> InitializePluginAsync(CancellationToken token) { // Ensure Android binding is initialized before accessing the native implementation Android.Initialize(); #if UNITY_WSA && !UNITY_EDITOR if (Library.UsedAudioDeviceModule == AudioDeviceModule.LegacyModule) { // Preventing access to audio crashes the ADM1 at startup and the entire application. bool permissionGranted = await UwpUtils.RequestAccessAsync(StreamingCaptureMode.Audio); if (!permissionGranted) { return(null); } } #endif // Create the peer connection managed wrapper and its native implementation var nativePeer = new WebRTC.PeerConnection(); nativePeer.AudioTrackAdded += (RemoteAudioTrack track) => { // Tracks will be output by AudioReceivers, so avoid outputting them twice. track.OutputToDevice(false); }; Debug.Log("Initializing WebRTC Peer Connection..."); var config = new PeerConnectionConfiguration(); foreach (var server in IceServers) { config.IceServers.Add(new IceServer { Urls = { server.ToString() }, TurnUserName = IceUsername, TurnPassword = IceCredential }); } try { await nativePeer.InitializeAsync(config, token); return(nativePeer); } catch (OperationCanceledException canceled) { throw canceled; } catch (Exception ex) { nativePeer.Dispose(); token.ThrowIfCancellationRequested(); EnsureIsMainAppThread(); var errorMessage = new StringBuilder(); errorMessage.Append("WebRTC plugin initializing failed. See full log for exception details.\n"); errorMessage.Append($"Exception: {ex.Message}"); OnError.Invoke(errorMessage.ToString()); throw ex; } }
/// <inheritdoc/> protected async Task OnEnable() { // Media senders are standalone objects, therefore can be created before any peer connection. // Ensure Android binding is initialized before accessing the native implementation. Android.Initialize(); if (AutoStartOnEnabled) { await StartCaptureAsync(); } }
/// <summary> /// Initialize the underlying WebRTC peer connection. /// </summary> /// <remarks> /// This method must be called once before using the peer connection. If <see cref="AutoInitializeOnStart"/> /// is <c>true</c> then it is automatically called during <a href="https://docs.unity3d.com/ScriptReference/MonoBehaviour.Start.html">MonoBehaviour.Start()</a>. /// /// This method is asynchronous and completes its task when the initializing completed. /// On successful completion, it also trigger the <see cref="OnInitialized"/> event. /// Note however that this completion is free-threaded and complete immediately when the /// underlying peer connection is initialized, whereas any <see cref="OnInitialized"/> /// event handler is invoked when control returns to the main Unity app thread. The former /// is faster, but does not allow accessing the underlying peer connection because it /// returns before <see cref="OnPostInitialize"/> executed. Therefore it is generally /// recommended to listen to the <see cref="OnInitialized"/> event, and ignore the returned /// <see xref="System.Threading.Tasks.Task"/> object. /// /// If the peer connection is already initialized, this method returns immediately with /// a <see xref="System.Threading.Tasks.Task.CompletedTask"/> object. The caller can check /// that the <see cref="Peer"/> property is non-<c>null</c> to confirm that the connection /// is in fact initialized. /// </remarks> private Task InitializeAsync(CancellationToken token = default(CancellationToken)) { CreateNativePeerConnection(); // Ensure Android binding is initialized before accessing the native implementation Android.Initialize(); #if UNITY_WSA && !UNITY_EDITOR if (UnityEngine.WSA.Application.RunningOnUIThread()) #endif { return(RequestAccessAndInitAsync(token)); } #if UNITY_WSA && !UNITY_EDITOR else { UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAndInitAsync(token), waitUntilDone: true); return(Task.CompletedTask); } #endif }
protected async void OnEnable() { if (Source != null) { return; } #if PLATFORM_ANDROID // Ensure Android binding is initialized before accessing the native implementation Android.Initialize(); // Check for permission to access the camera if (!Permission.HasUserAuthorizedPermission(Permission.Microphone)) { if (!_androidRecordAudioRequestPending) { // Monitor the OnApplicationFocus(true) event during the next 5 minutes, // and check for permission again each time (see below why). _androidRecordAudioRequestPending = true; _androidRecordAudioRequestRetryUntilTime = Time.time + 300; // Display dialog requesting user permission. This will return immediately, // and unfortunately there's no good way to tell when this completes. As a rule // of thumb, application should lose focus, so check when focus resumes should // be sufficient without having to poll every frame. Permission.RequestUserPermission(Permission.Microphone); } return; } #endif #if UNITY_WSA && !UNITY_EDITOR // Request access to audio capture. The OS may show some popup dialog to the // user to request permission. This will succeed only if the user approves it. try { if (UnityEngine.WSA.Application.RunningOnUIThread()) { await RequestAccessAsync(); } else { UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true); } } catch (Exception ex) { // Log an error and prevent activation Debug.LogError($"Audio access failure: {ex.Message}."); this.enabled = false; return; } #endif var initConfig = new LocalAudioDeviceInitConfig { AutoGainControl = _autoGainControl, }; try { Source = await DeviceAudioTrackSource.CreateAsync(initConfig); if (Source == null) { throw new Exception("DeviceAudioTrackSource.CreateAsync() returned a NULL source."); } } catch (Exception ex) { Debug.LogError($"Failed to create device track source for {nameof(MicrophoneSource)} component '{name}'."); Debug.LogException(ex, this); return; } IsStreaming = true; AudioSourceStarted.Invoke(this); }
protected async void OnEnable() { if (Source != null) { return; } #if PLATFORM_ANDROID // Ensure Android binding is initialized before accessing the native implementation Android.Initialize(); // Check for permission to access the camera if (!Permission.HasUserAuthorizedPermission(Permission.Camera)) { if (!_androidCameraRequestPending) { // Monitor the OnApplicationFocus(true) event during the next 5 minutes, // and check for permission again each time (see below why). _androidCameraRequestPending = true; _androidCameraRequestRetryUntilTime = Time.time + 300; // Display dialog requesting user permission. This will return immediately, // and unfortunately there's no good way to tell when this completes. As a rule // of thumb, application should lose focus, so check when focus resumes should // be sufficient without having to poll every frame. Permission.RequestUserPermission(Permission.Camera); } return; } #elif UNITY_WSA && !UNITY_EDITOR // Request UWP access to video capture. The OS may show some popup dialog to the // user to request permission. This will succeed only if the user grants permission. try { // Note that the UWP UI thread and the main Unity app thread are always different. // https://docs.unity3d.com/Manual/windowsstore-appcallbacks.html // We leave the code below as an example of generic handling in case this would be used in // some other place, and in case a future version of Unity decided to change that assumption, // but currently OnEnable() is always invoked from the main Unity app thread so here the first // branch is never taken. if (UnityEngine.WSA.Application.RunningOnUIThread()) { await RequestAccessAsync(); } else { UnityEngine.WSA.Application.InvokeOnUIThread(() => RequestAccessAsync(), waitUntilDone: true); } } catch (Exception ex) { // Log an error and prevent activation Debug.LogError($"Video access failure: {ex.Message}."); this.enabled = false; return; } #endif // Handle automatic capture format constraints string videoProfileId = VideoProfileId; var videoProfileKind = VideoProfileKind; int width = Constraints.width; int height = Constraints.height; double framerate = Constraints.framerate; #if ENABLE_WINMD_SUPPORT if (FormatMode == LocalVideoSourceFormatMode.Automatic) { // Do not constrain resolution by default, unless the device calls for it (see below). width = 0; // auto height = 0; // auto // Avoid constraining the framerate; this is generally not necessary (formats are listed // with higher framerates first) and is error-prone as some formats report 30.0 FPS while // others report 29.97 FPS. framerate = 0; // auto // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth if (global::Windows.Graphics.Holographic.HolographicSpace.IsAvailable) { if (!global::Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque) { if (global::Windows.ApplicationModel.Package.Current.Id.Architecture == global::Windows.System.ProcessorArchitecture.X86) { // Holographic AR (transparent) x86 platform - Assume HoloLens 1 videoProfileKind = WebRTC.VideoProfileKind.VideoRecording; // No profile in VideoConferencing width = 896; // Target 896 x 504 } else { // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2 videoProfileKind = WebRTC.VideoProfileKind.VideoConferencing; width = 960; // Target 960 x 540 } } } } #elif PLATFORM_ANDROID if (FormatMode == LocalVideoSourceFormatMode.Automatic) { // Avoid constraining the framerate; this is generally not necessary (formats are listed // with higher framerates first) and is error-prone as some formats report 30.0 FPS while // others report 29.97 FPS. framerate = 0; // auto string deviceId = WebcamDevice.id; if (string.IsNullOrEmpty(deviceId)) { List <VideoCaptureDevice> listedDevices = await PeerConnection.GetVideoCaptureDevicesAsync(); if (listedDevices.Count > 0) { deviceId = listedDevices[0].id; } } if (!string.IsNullOrEmpty(deviceId)) { // Find the closest format to 720x480, independent of framerate List <VideoCaptureFormat> formats = await DeviceVideoTrackSource.GetCaptureFormatsAsync(deviceId); double smallestDiff = double.MaxValue; bool hasFormat = false; foreach (var fmt in formats) { double diff = Math.Abs(fmt.width - 720) + Math.Abs(fmt.height - 480); if ((diff < smallestDiff) || !hasFormat) { hasFormat = true; smallestDiff = diff; width = (int)fmt.width; height = (int)fmt.height; } } if (hasFormat) { Debug.Log($"WebcamSource automated mode selected resolution {width}x{height} for Android video capture device #{deviceId}."); } } } #endif // TODO - Fix codec selection (was as below before change) // Force again PreferredVideoCodec right before starting the local capture, // so that modifications to the property done after OnPeerInitialized() are // accounted for. //< FIXME //PeerConnection.Peer.PreferredVideoCodec = PreferredVideoCodec; // Check H.264 requests on Desktop (not supported) //#if !ENABLE_WINMD_SUPPORT // if (PreferredVideoCodec == "H264") // { // Debug.LogError("H.264 encoding is not supported on Desktop platforms. Using VP8 instead."); // PreferredVideoCodec = "VP8"; // } //#endif // Create the track var deviceConfig = new LocalVideoDeviceInitConfig { videoDevice = WebcamDevice, videoProfileId = videoProfileId, videoProfileKind = videoProfileKind, width = (width > 0 ? (uint?)width : null), height = (height > 0 ? (uint?)height : null), framerate = (framerate > 0 ? (double?)framerate : null), enableMrc = EnableMixedRealityCapture, enableMrcRecordingIndicator = EnableMRCRecordingIndicator }; try { var source = await DeviceVideoTrackSource.CreateAsync(deviceConfig); AttachSource(source); } catch (Exception ex) { Debug.LogError($"Failed to create device track source for {nameof(WebcamSource)} component '{name}'."); Debug.LogException(ex, this); return; } }
private Task <bool> RequestAccessAsync(CancellationToken token) { #if !UNITY_EDITOR && UNITY_ANDROID // Ensure Android binding is initialized before accessing the native implementation Android.Initialize(); // Check for permission to access the camera if (!Permission.HasUserAuthorizedPermission(Permission.Microphone)) { // Display dialog requesting user permission. This will return immediately, // and unfortunately there's no good way to tell when this completes. Permission.RequestUserPermission(Permission.Microphone); // As a rule of thumb, application should lose focus, so check when focus resumes should // be sufficient without having to poll every frame. // Monitor the OnApplicationFocus(true) event during the next 5 minutes, // and check for permission again each time var tcs = new TaskCompletionSource <bool>(); lock (_androidPermissionRequestLock) { Debug.Assert(_androidPermissionRequestTcs == null); _androidPermissionRequestTcs = tcs; } Task.Delay(TimeSpan.FromMinutes(5)).ContinueWith(_ => { lock (_androidPermissionRequestLock) { // Check if the component is still waiting on the same permission request. // If it has been disabled and then re-enabled, _androidPermissionRequestTcs will be different. if (_androidPermissionRequestTcs == tcs) { Debug.LogError("User denied RecordAudio (microphone) permission; cannot use MicrophoneSource."); _androidPermissionRequestTcs.SetResult(false); _androidPermissionRequestTcs = null; } } }); // If the initialization is canceled, end the task and reset the TCS. token.Register(() => { lock (_androidPermissionRequestLock) { // Check if the component is still waiting on the same permission request. // If the request has completed or timed out, _androidPermissionRequestTcs will be null. if (_androidPermissionRequestTcs != null) { Debug.Assert(_androidPermissionRequestTcs == tcs); _androidPermissionRequestTcs.SetCanceled(); _androidPermissionRequestTcs = null; } } }); return(tcs.Task); } // Already has permission. return(Task.FromResult(true)); #elif UNITY_WSA && !UNITY_EDITOR return(UwpUtils.RequestAccessAsync(StreamingCaptureMode.Audio)); #else return(Task.FromResult(true)); #endif }