/// <inheritdoc /> public bool CheckCapability(MixedRealityCapability capability) { if (WindowsApiChecker.IsMethodAvailable( "Windows.UI.Input.Spatial", "SpatialInteractionManager", "IsSourceKindSupported")) { #if WINDOWS_UWP switch (capability) { case MixedRealityCapability.ArticulatedHand: case MixedRealityCapability.GGVHand: return(SpatialInteractionManager.IsSourceKindSupported(SpatialInteractionSourceKind.Hand)); case MixedRealityCapability.MotionController: return(SpatialInteractionManager.IsSourceKindSupported(SpatialInteractionSourceKind.Controller)); } #endif // WINDOWS_UWP } else { if (!UnityEngine.XR.WSA.HolographicSettings.IsDisplayOpaque) { // HoloLens supports GGV hands return(capability == MixedRealityCapability.GGVHand); } else { // Windows Mixed Reality immersive devices support motion controllers return(capability == MixedRealityCapability.MotionController); } } return(false); }
/// <summary> /// Constructor. /// </summary> /// <param name="inputSystem">The <see cref="Microsoft.MixedReality.Toolkit.Input.IMixedRealityInputSystem"/> instance that receives data from this provider.</param> /// <param name="name">Friendly name of the service.</param> /// <param name="priority">Service priority. Used to determine order of instantiation.</param> /// <param name="profile">The service's configuration profile.</param> public WindowsMixedRealityEyeGazeDataProvider( IMixedRealityInputSystem inputSystem, string name, uint priority, BaseMixedRealityProfile profile) : base(inputSystem, name, priority, profile) { eyesApiAvailable = WindowsApiChecker.IsPropertyAvailable( "Windows.UI.Input.Spatial", "SpatialPointerPose", "Eyes"); #if (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP if (eyesApiAvailable) { eyesApiAvailable &= EyesPose.IsSupported(); } #endif // (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP gazeSmoother = new EyeGazeSmoother(); // Register for these events to forward along, in case code is still registering for the obsolete actions gazeSmoother.OnSaccade += GazeSmoother_OnSaccade; gazeSmoother.OnSaccadeX += GazeSmoother_OnSaccadeX; gazeSmoother.OnSaccadeY += GazeSmoother_OnSaccadeY; }
/// <inheritdoc /> public override bool CheckCapability(MixedRealityCapability capability) { if (WindowsApiChecker.IsMethodAvailable( "Windows.UI.Input.Spatial", "SpatialInteractionManager", "IsSourceKindSupported")) { #if WINDOWS_UWP switch (capability) { case MixedRealityCapability.ArticulatedHand: case MixedRealityCapability.GGVHand: return(SpatialInteractionManager.IsSourceKindSupported(SpatialInteractionSourceKind.Hand)); case MixedRealityCapability.MotionController: return(SpatialInteractionManager.IsSourceKindSupported(SpatialInteractionSourceKind.Controller)); } #endif // WINDOWS_UWP } else // Pre-Windows 10 1903. { if (XRSDKSubsystemHelpers.DisplaySubsystem != null && !XRSDKSubsystemHelpers.DisplaySubsystem.displayOpaque) { // HoloLens supports GGV hands return(capability == MixedRealityCapability.GGVHand); } else { // Windows Mixed Reality immersive devices support motion controllers return(capability == MixedRealityCapability.MotionController); } } return(false); }
public static IAsyncOperation <IRandomAccessStreamWithContentType> TryGetRenderableModelAsync(this InteractionSource interactionSource) { IAsyncOperation <IRandomAccessStreamWithContentType> returnValue = null; // GetForCurrentView and GetDetectedSourcesAtTimestamp were both introduced in the same Windows version. // We need only check for one of them. if (WindowsApiChecker.IsMethodAvailable( "Windows.UI.Input.Spatial", "SpatialInteractionManager", "GetForCurrentView")) { IReadOnlyList <SpatialInteractionSourceState> sources = null; UnityEngine.WSA.Application.InvokeOnUIThread(() => { sources = SpatialInteractionManager.GetForCurrentView()?.GetDetectedSourcesAtTimestamp(PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now)); }, true); for (var i = 0; i < sources?.Count; i++) { if (sources[i].Source.Id.Equals(interactionSource.id)) { returnValue = sources[i].Source.Controller.TryGetRenderableModelAsync(); } } } return(returnValue); }
/// <summary> /// Constructor. /// </summary> public WindowsMixedRealityArticulatedHand(TrackingState trackingState, Handedness controllerHandedness, IMixedRealityInputSource inputSource = null, MixedRealityInteractionMapping[] interactions = null) : base(trackingState, controllerHandedness, inputSource, interactions) { handDefinition = new WindowsMixedRealityArticulatedHandDefinition(inputSource, controllerHandedness); articulatedHandApiAvailable = WindowsApiChecker.IsMethodAvailable( "Windows.UI.Input.Spatial", "SpatialInteractionSourceState", "TryGetHandPose"); }
/// <summary> /// Constructor. /// </summary> /// <param name="inputSystem">The <see cref="Microsoft.MixedReality.Toolkit.Input.IMixedRealityInputSystem"/> instance that receives data from this provider.</param> /// <param name="name">Friendly name of the service.</param> /// <param name="priority">Service priority. Used to determine order of instantiation.</param> /// <param name="profile">The service's configuration profile.</param> public WindowsMixedRealityEyeGazeDataProvider( IMixedRealityInputSystem inputSystem, string name, uint priority, BaseMixedRealityProfile profile) : base(inputSystem, name, priority, profile) { eyesApiAvailable = WindowsApiChecker.IsPropertyAvailable( "Windows.UI.Input.Spatial", "SpatialPointerPose", "Eyes"); }
/// <inheritdoc /> public bool CheckCapability(MixedRealityCapability capability) { if (WindowsApiChecker.IsMethodAvailable( "Windows.Perception.Spatial.Surfaces", "SpatialSurfaceObserver", "IsSupported")) { #if WINDOWS_UWP return((capability == MixedRealityCapability.SpatialAwarenessMesh) && WindowsSpatialSurfaces.SpatialSurfaceObserver.IsSupported()); #endif // WINDOWS_UWP } return(false); }
/// <summary> /// Check whether the Windows Eyes API is available /// </summary> private void CheckIfEyesApiAvailable() { #if (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP // Make sure EyeTracking is available on the device EyesApiAvailable = WindowsApiChecker.IsPropertyAvailable( "Windows.UI.Input.Spatial", "SpatialPointerPose", "Eyes"); // If yes, ask for permission to use it if (EyesApiAvailable) { EyesApiAvailable &= EyesPose.IsSupported(); } #endif }
/// <summary> /// Constructor. /// </summary> public WindowsMixedRealityArticulatedHand( TrackingState trackingState, Handedness controllerHandedness, IMixedRealityInputSource inputSource = null, MixedRealityInteractionMapping[] interactions = null) : base(trackingState, controllerHandedness, inputSource, interactions, new ArticulatedHandDefinition(inputSource, controllerHandedness)) { handDefinition = Definition as ArticulatedHandDefinition; handMeshProvider = (controllerHandedness == Handedness.Left) ? WindowsMixedRealityHandMeshProvider.Left : WindowsMixedRealityHandMeshProvider.Right; handMeshProvider.SetInputSource(inputSource); articulatedHandApiAvailable = WindowsApiChecker.IsMethodAvailable( "Windows.UI.Input.Spatial", "SpatialInteractionSourceState", "TryGetHandPose"); }
/// <summary> /// Constructor. /// </summary> /// <param name="inputSystem">The <see cref="Microsoft.MixedReality.Toolkit.Input.IMixedRealityInputSystem"/> instance that receives data from this provider.</param> /// <param name="name">Friendly name of the service.</param> /// <param name="priority">Service priority. Used to determine order of instantiation.</param> /// <param name="profile">The service's configuration profile.</param> public WindowsMixedRealityEyeGazeDataProvider( IMixedRealityInputSystem inputSystem, string name, uint priority, BaseMixedRealityProfile profile) : base(inputSystem, name, priority, profile) { eyesApiAvailable = WindowsApiChecker.IsPropertyAvailable( "Windows.UI.Input.Spatial", "SpatialPointerPose", "Eyes"); #if (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP if (eyesApiAvailable) { eyesApiAvailable &= EyesPose.IsSupported(); } #endif // (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP }