/// <summary> /// Update the spatial pointer input from the device. /// </summary> /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param> /// <param name="interactionMapping"></param> private void UpdatePointerData(InteractionSourceState interactionSourceState, MixedRealityInteractionMapping interactionMapping) { if (interactionSourceState.source.supportsPointing) { interactionSourceState.sourcePose.TryGetPosition(out currentPointerPosition, InteractionSourceNode.Pointer); interactionSourceState.sourcePose.TryGetRotation(out currentPointerRotation, InteractionSourceNode.Pointer); // We want the source to follow the Playspace, so fold in the playspace transform here to // put the source pose into world space. currentPointerPose.Position = MixedRealityPlayspace.TransformPoint(currentPointerPosition); currentPointerPose.Rotation = MixedRealityPlayspace.Rotation * currentPointerRotation; } // Update the interaction data source interactionMapping.PoseData = currentPointerPose; // If our value changed raise it. if (interactionMapping.Changed) { // Raise input system Event if it enabled InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interactionMapping.MixedRealityInputAction, currentPointerPose); } }
/// <summary> /// Update spatial pointer and spatial grip data. /// </summary> protected override void UpdatePoseData(MixedRealityInteractionMapping interactionMapping, InputDevice inputDevice) { using (UpdatePoseDataPerfMarker.Auto()) { Debug.Assert(interactionMapping.AxisType == AxisType.SixDof); base.UpdatePoseData(interactionMapping, inputDevice); // Update the interaction data source switch (interactionMapping.InputType) { case DeviceInputType.SpatialPointer: if (inputDevice.TryGetFeatureValue(WindowsMRUsages.PointerPosition, out currentPointerPosition)) { currentPointerPose.Position = MixedRealityPlayspace.TransformPoint(currentPointerPosition); } if (inputDevice.TryGetFeatureValue(WindowsMRUsages.PointerRotation, out currentPointerRotation)) { currentPointerPose.Rotation = MixedRealityPlayspace.Rotation * currentPointerRotation; } interactionMapping.PoseData = currentPointerPose; // If our value changed raise it. if (interactionMapping.Changed) { // Raise input system event if it's enabled CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interactionMapping.MixedRealityInputAction, interactionMapping.PoseData); } break; default: return; } } }
/// <inheritdoc /> public override void Update() { using (UpdatePerfMarker.Auto()) { if (!eyesApiAvailable || WindowsMixedRealityUtilities.SpatialCoordinateSystem == null) { return; } base.Update(); SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now)); if (pointerPose != null) { var eyes = pointerPose.Eyes; if (eyes != null) { Service?.EyeGazeProvider?.UpdateEyeTrackingStatus(this, eyes.IsCalibrationValid); if (eyes.Gaze.HasValue) { Vector3 origin = MixedRealityPlayspace.TransformPoint(eyes.Gaze.Value.Origin.ToUnityVector3()); Vector3 direction = MixedRealityPlayspace.TransformDirection(eyes.Gaze.Value.Direction.ToUnityVector3()); Ray newGaze = new Ray(origin, direction); if (SmoothEyeTracking) { newGaze = gazeSmoother.SmoothGaze(newGaze); } Service?.EyeGazeProvider?.UpdateEyeGaze(this, newGaze, eyes.UpdateTimestamp.TargetTime.UtcDateTime); } } } } }
/// <inheritdoc /> protected override void UpdatePoseData(MixedRealityInteractionMapping interactionMapping, InputDevice inputDevice) { using (UpdatePoseDataPerfMarker.Auto()) { switch (interactionMapping.InputType) { case DeviceInputType.IndexFinger: handDefinition?.UpdateCurrentIndexPose(interactionMapping); break; case DeviceInputType.SpatialPointer: if (inputDevice.TryGetFeatureValue(Input.CustomUsages.PointerPosition, out currentPointerPosition)) { currentPointerPose.Position = MixedRealityPlayspace.TransformPoint(currentPointerPosition); } if (inputDevice.TryGetFeatureValue(Input.CustomUsages.PointerRotation, out currentPointerRotation)) { currentPointerPose.Rotation = MixedRealityPlayspace.Rotation * currentPointerRotation; } interactionMapping.PoseData = currentPointerPose; // If our value changed raise it. if (interactionMapping.Changed) { // Raise input system event if it's enabled CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interactionMapping.MixedRealityInputAction, interactionMapping.PoseData); } break; default: base.UpdatePoseData(interactionMapping, inputDevice); break; } } }
protected bool TryAddControllerModelToSceneHierarchy(GameObject controllerObject) { if (controllerObject != null) { controllerObject.name = $"{ControllerHandedness}_{controllerObject.name}"; MixedRealityPlayspace.AddChild(controllerObject.transform); Visualizer = controllerObject.GetComponent <IMixedRealityControllerVisualizer>(); if (Visualizer != null) { Visualizer.Controller = this; return(true); } else { Debug.LogError($"{controllerObject.name} is missing a IMixedRealityControllerVisualizer component!"); return(false); } } return(false); }
/// <summary> /// Update the hand data from the device. /// </summary> /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param> private void UpdateHandData(InteractionSourceState interactionSourceState) { #if WINDOWS_UWP || DOTNETWINRT_PRESENT // Articulated hand support is only present in the 18362 version and beyond Windows // SDK (which contains the V8 drop of the Universal API Contract). In particular, // the HandPose related APIs are only present on this version and above. if (!articulatedHandApiAvailable) { return; } PerceptionTimestamp perceptionTimestamp = PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now); IReadOnlyList <SpatialInteractionSourceState> sources = SpatialInteractionManager?.GetDetectedSourcesAtTimestamp(perceptionTimestamp); foreach (SpatialInteractionSourceState sourceState in sources) { if (sourceState.Source.Id.Equals(interactionSourceState.source.id)) { HandPose handPose = sourceState.TryGetHandPose(); #if WINDOWS_UWP if (CoreServices.InputSystem.InputSystemProfile.HandTrackingProfile.EnableHandMeshVisualization) { // Accessing the hand mesh data involves copying quite a bit of data, so only do it if application requests it. if (handMeshObserver == null && !hasRequestedHandMeshObserver) { SetHandMeshObserver(sourceState); hasRequestedHandMeshObserver = true; } if (handMeshObserver != null && handMeshTriangleIndices == null) { uint indexCount = handMeshObserver.TriangleIndexCount; ushort[] indices = new ushort[indexCount]; handMeshObserver.GetTriangleIndices(indices); handMeshTriangleIndices = new int[indexCount]; Array.Copy(indices, handMeshTriangleIndices, (int)handMeshObserver.TriangleIndexCount); // Compute neutral pose Vector3[] neutralPoseVertices = new Vector3[handMeshObserver.VertexCount]; HandPose neutralPose = handMeshObserver.NeutralPose; var vertexAndNormals = new HandMeshVertex[handMeshObserver.VertexCount]; HandMeshVertexState handMeshVertexState = handMeshObserver.GetVertexStateForPose(neutralPose); handMeshVertexState.GetVertices(vertexAndNormals); for (int i = 0; i < handMeshObserver.VertexCount; i++) { neutralPoseVertices[i] = WindowsMixedRealityUtilities.SystemVector3ToUnity(vertexAndNormals[i].Position); } // Compute UV mapping InitializeUVs(neutralPoseVertices); } if (handPose != null && handMeshObserver != null && handMeshTriangleIndices != null) { var vertexAndNormals = new HandMeshVertex[handMeshObserver.VertexCount]; var handMeshVertexState = handMeshObserver.GetVertexStateForPose(handPose); handMeshVertexState.GetVertices(vertexAndNormals); var meshTransform = handMeshVertexState.CoordinateSystem.TryGetTransformTo(WindowsMixedRealityUtilities.SpatialCoordinateSystem); if (meshTransform.HasValue) { System.Numerics.Vector3 scale; System.Numerics.Quaternion rotation; System.Numerics.Vector3 translation; System.Numerics.Matrix4x4.Decompose(meshTransform.Value, out scale, out rotation, out translation); var handMeshVertices = new Vector3[handMeshObserver.VertexCount]; var handMeshNormals = new Vector3[handMeshObserver.VertexCount]; for (int i = 0; i < handMeshObserver.VertexCount; i++) { handMeshVertices[i] = WindowsMixedRealityUtilities.SystemVector3ToUnity(vertexAndNormals[i].Position); handMeshNormals[i] = WindowsMixedRealityUtilities.SystemVector3ToUnity(vertexAndNormals[i].Normal); } /// Hands should follow the Playspace to accommodate teleporting, so fold in the Playspace transform. Vector3 unityPosition = WindowsMixedRealityUtilities.SystemVector3ToUnity(translation); unityPosition = MixedRealityPlayspace.TransformPoint(unityPosition); Quaternion unityRotation = WindowsMixedRealityUtilities.SystemQuaternionToUnity(rotation); unityRotation = MixedRealityPlayspace.Rotation * unityRotation; HandMeshInfo handMeshInfo = new HandMeshInfo { vertices = handMeshVertices, normals = handMeshNormals, triangles = handMeshTriangleIndices, uvs = handMeshUVs, position = unityPosition, rotation = unityRotation }; CoreServices.InputSystem?.RaiseHandMeshUpdated(InputSource, ControllerHandedness, handMeshInfo); } } } else { // if hand mesh visualization is disabled make sure to destroy our hand mesh observer if it has already been created if (handMeshObserver != null) { // notify that hand mesh has been updated (cleared) HandMeshInfo handMeshInfo = new HandMeshInfo(); CoreServices.InputSystem?.RaiseHandMeshUpdated(InputSource, ControllerHandedness, handMeshInfo); hasRequestedHandMeshObserver = false; handMeshObserver = null; } } #endif // WINDOWS_UWP if (handPose != null && handPose.TryGetJoints(WindowsMixedRealityUtilities.SpatialCoordinateSystem, jointIndices, jointPoses)) { for (int i = 0; i < jointPoses.Length; i++) { unityJointOrientations[i] = WindowsMixedRealityUtilities.SystemQuaternionToUnity(jointPoses[i].Orientation); unityJointPositions[i] = WindowsMixedRealityUtilities.SystemVector3ToUnity(jointPoses[i].Position); // We want the controller to follow the Playspace, so fold in the playspace transform here to // put the controller pose into world space. unityJointPositions[i] = MixedRealityPlayspace.TransformPoint(unityJointPositions[i]); unityJointOrientations[i] = MixedRealityPlayspace.Rotation * unityJointOrientations[i]; if (jointIndices[i] == HandJointKind.IndexTip) { lastIndexTipRadius = jointPoses[i].Radius; } TrackedHandJoint handJoint = ConvertHandJointKindToTrackedHandJoint(jointIndices[i]); if (!unityJointPoses.ContainsKey(handJoint)) { unityJointPoses.Add(handJoint, new MixedRealityPose(unityJointPositions[i], unityJointOrientations[i])); } else { unityJointPoses[handJoint] = new MixedRealityPose(unityJointPositions[i], unityJointOrientations[i]); } } CoreServices.InputSystem?.RaiseHandJointsUpdated(InputSource, ControllerHandedness, unityJointPoses); } } } #endif // WINDOWS_UWP || DOTNETWINRT_PRESENT }
/// <summary> /// Update the controller data from XR SDK. /// </summary> public virtual void UpdateController(InputDevice inputDevice) { using (UpdateControllerPerfMarker.Auto()) { if (!Enabled) { return; } if (Interactions == null) { Debug.LogError($"No interaction configuration for {GetType().Name}"); Enabled = false; } var lastState = TrackingState; LastControllerPose = CurrentControllerPose; // Check for position and rotation. IsPositionAvailable = inputDevice.TryGetFeatureValue(CommonUsages.devicePosition, out CurrentControllerPosition); IsPositionApproximate = false; IsRotationAvailable = inputDevice.TryGetFeatureValue(CommonUsages.deviceRotation, out CurrentControllerRotation); // Devices are considered tracked if we receive position OR rotation data from the sensors. TrackingState = (IsPositionAvailable || IsRotationAvailable) ? TrackingState.Tracked : TrackingState.NotTracked; CurrentControllerPosition = MixedRealityPlayspace.TransformPoint(CurrentControllerPosition); CurrentControllerRotation = MixedRealityPlayspace.Rotation * CurrentControllerRotation; CurrentControllerPose.Position = CurrentControllerPosition; CurrentControllerPose.Rotation = CurrentControllerRotation; // Raise input system events if it is enabled. if (lastState != TrackingState) { CoreServices.InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState); } if (TrackingState == TrackingState.Tracked && LastControllerPose != CurrentControllerPose) { if (IsPositionAvailable && IsRotationAvailable) { CoreServices.InputSystem?.RaiseSourcePoseChanged(InputSource, this, CurrentControllerPose); } else if (IsPositionAvailable && !IsRotationAvailable) { CoreServices.InputSystem?.RaiseSourcePositionChanged(InputSource, this, CurrentControllerPosition); } else if (!IsPositionAvailable && IsRotationAvailable) { CoreServices.InputSystem?.RaiseSourceRotationChanged(InputSource, this, CurrentControllerRotation); } } for (int i = 0; i < Interactions?.Length; i++) { switch (Interactions[i].AxisType) { case AxisType.None: break; case AxisType.Digital: UpdateButtonData(Interactions[i], inputDevice); break; case AxisType.SingleAxis: UpdateSingleAxisData(Interactions[i], inputDevice); break; case AxisType.DualAxis: UpdateDualAxisData(Interactions[i], inputDevice); break; case AxisType.SixDof: UpdatePoseData(Interactions[i], inputDevice); break; } } } }
/// <summary> /// Update the source input from the device. /// </summary> /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param> private void UpdateSourceData(InteractionSourceState interactionSourceState) { var lastState = TrackingState; var sourceKind = interactionSourceState.source.kind; lastSourcePose = currentSourcePose; if (sourceKind == InteractionSourceKind.Hand || (sourceKind == InteractionSourceKind.Controller && interactionSourceState.source.supportsPointing)) { // The source is either a hand or a controller that supports pointing. // We can now check for position and rotation. IsPositionAvailable = interactionSourceState.sourcePose.TryGetPosition(out currentSourcePosition); if (IsPositionAvailable) { IsPositionApproximate = (interactionSourceState.sourcePose.positionAccuracy == InteractionSourcePositionAccuracy.Approximate); } else { IsPositionApproximate = false; } IsRotationAvailable = interactionSourceState.sourcePose.TryGetRotation(out currentSourceRotation); // We want the source to follow the Playspace, so fold in the playspace transform here to // put the source pose into world space. currentSourcePosition = MixedRealityPlayspace.TransformPoint(currentSourcePosition); currentSourceRotation = MixedRealityPlayspace.Rotation * currentSourceRotation; // Devices are considered tracked if we receive position OR rotation data from the sensors. TrackingState = (IsPositionAvailable || IsRotationAvailable) ? TrackingState.Tracked : TrackingState.NotTracked; } else { // The input source does not support tracking. TrackingState = TrackingState.NotApplicable; } currentSourcePose.Position = currentSourcePosition; currentSourcePose.Rotation = currentSourceRotation; // Raise input system events if it is enabled. if (lastState != TrackingState) { InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState); } if (TrackingState == TrackingState.Tracked && lastSourcePose != currentSourcePose) { if (IsPositionAvailable && IsRotationAvailable) { InputSystem?.RaiseSourcePoseChanged(InputSource, this, currentSourcePose); } else if (IsPositionAvailable && !IsRotationAvailable) { InputSystem?.RaiseSourcePositionChanged(InputSource, this, currentSourcePosition); } else if (!IsPositionAvailable && IsRotationAvailable) { InputSystem?.RaiseSourceRotationChanged(InputSource, this, currentSourceRotation); } } }
public IEnumerator TestSurfaceMagnetism() { // Reset view to origin MixedRealityPlayspace.PerformTransformation(p => { p.position = Vector3.zero; p.LookAt(Vector3.forward); }); // Build wall to collide against var wall = GameObject.CreatePrimitive(PrimitiveType.Cube); wall.transform.localScale = new Vector3(25.0f, 25.0f, 0.2f); wall.transform.Rotate(Vector3.up, 180.0f); // Rotate wall so forward faces camera wall.transform.position = Vector3.forward * 10.0f; yield return(WaitForFrames(2)); // Instantiate our test GameObject with solver. // Set layer to ignore raycast so solver doesn't raycast itself (i.e BoxCollider) var testObjects = InstantiateTestSolver <SurfaceMagnetism>(); testObjects.target.layer = LayerMask.NameToLayer("Ignore Raycast"); SurfaceMagnetism surfaceMag = testObjects.solver as SurfaceMagnetism; var targetTransform = testObjects.target.transform; var cameraTransform = CameraCache.Main.transform; yield return(WaitForFrames(2)); // Confirm that the surfacemagnetic cube is about on the wall straight ahead Assert.LessOrEqual(Vector3.Distance(targetTransform.position, wall.transform.position), DistanceThreshold); // Rotate the camera Vector3 cameraDir = Vector3.forward + Vector3.right; MixedRealityPlayspace.PerformTransformation(p => { p.position = Vector3.zero; p.LookAt(cameraDir); }); // Calculate where our camera hits the wall RaycastHit hitInfo; Assert.IsTrue(UnityEngine.Physics.Raycast(Vector3.zero, cameraDir, out hitInfo), "Raycast from camera did not hit wall"); // Let SurfaceMagnetism update yield return(WaitForFrames(2)); // Confirm that the surfacemagnetic cube is on the wall with camera rotated Assert.LessOrEqual(Vector3.Distance(targetTransform.position, hitInfo.point), DistanceThreshold); // Default orientation mode is TrackedTarget, test object should be facing camera Assert.IsTrue(Mathf.Approximately(-1.0f, Vector3.Dot(targetTransform.forward.normalized, cameraTransform.forward.normalized))); // Change default orientation mode to surface normal surfaceMag.CurrentOrientationMode = SurfaceMagnetism.OrientationMode.SurfaceNormal; yield return(WaitForFrames(2)); // Test object should now be facing into the wall (i.e Z axis) Assert.IsTrue(Mathf.Approximately(1.0f, Vector3.Dot(targetTransform.forward.normalized, Vector3.forward))); }
public IEnumerator TestFollowDistance() { // Reset view to origin TestUtilities.PlayspaceToOriginLookingForward(); // Instantiate our test GameObject with solver. var testObjects = InstantiateTestSolver <Follow>(); var followSolver = (Follow)testObjects.solver; followSolver.MoveToDefaultDistanceLerpTime = 0; testObjects.handler.TrackedTargetType = TrackedObjectType.Head; var targetTransform = testObjects.target.transform; yield return(new WaitForFixedUpdate()); yield return(null); // Test distance remains within min/max bounds float distanceToHead = Vector3.Distance(targetTransform.position, CameraCache.Main.transform.position); Assert.LessOrEqual(distanceToHead, followSolver.MaxDistance, "Follow exceeded max distance"); Assert.GreaterOrEqual(distanceToHead, followSolver.MinDistance, "Follow subceeded min distance"); MixedRealityPlayspace.PerformTransformation(p => { p.position = Vector3.back * 2; }); yield return(new WaitForFixedUpdate()); yield return(null); distanceToHead = Vector3.Distance(targetTransform.position, CameraCache.Main.transform.position); Assert.LessOrEqual(distanceToHead, followSolver.MaxDistance, "Follow exceeded max distance"); Assert.GreaterOrEqual(distanceToHead, followSolver.MinDistance, "Follow subceeded min distance"); MixedRealityPlayspace.PerformTransformation(p => { p.position = Vector3.forward * 4; }); yield return(new WaitForFixedUpdate()); yield return(null); distanceToHead = Vector3.Distance(targetTransform.position, CameraCache.Main.transform.position); Assert.LessOrEqual(distanceToHead, followSolver.MaxDistance, "Follow exceeded max distance"); Assert.GreaterOrEqual(distanceToHead, followSolver.MinDistance, "Follow subceeded min distance"); // Test VerticalMaxDistance followSolver.VerticalMaxDistance = 0.1f; targetTransform.position = Vector3.forward; targetTransform.rotation = Quaternion.identity; MixedRealityPlayspace.PerformTransformation(p => { p.position = Vector3.zero; p.LookAt(Vector3.forward); }); yield return(new WaitForFixedUpdate()); yield return(null); MixedRealityPlayspace.PerformTransformation(p => { p.LookAt(Vector3.forward + Vector3.up); }); yield return(new WaitForFixedUpdate()); yield return(null); float yDistance = targetTransform.position.y - CameraCache.Main.transform.position.y; Assert.AreEqual(followSolver.VerticalMaxDistance, yDistance); followSolver.VerticalMaxDistance = 0f; }
public void UpdateHandMesh(SpatialInteractionSourceState sourceState) { using (UpdateHandMeshPerfMarker.Auto()) { MixedRealityHandTrackingProfile handTrackingProfile = null; MixedRealityInputSystemProfile inputSystemProfile = CoreServices.InputSystem?.InputSystemProfile; if (inputSystemProfile != null) { handTrackingProfile = inputSystemProfile.HandTrackingProfile; } if (handTrackingProfile == null || !handTrackingProfile.EnableHandMeshVisualization) { // If hand mesh visualization is disabled make sure to destroy our hand mesh observer if it has already been created if (handMeshObserver != null) { // Notify that hand mesh has been updated (cleared) HandMeshInfo handMeshInfo = new HandMeshInfo(); CoreServices.InputSystem?.RaiseHandMeshUpdated(InputSource, Handedness, handMeshInfo); hasRequestedHandMeshObserver = false; handMeshObserver = null; } return; } HandPose handPose = sourceState.TryGetHandPose(); // Accessing the hand mesh data involves copying quite a bit of data, so only do it if application requests it. if (handMeshObserver == null && !hasRequestedHandMeshObserver) { SetHandMeshObserver(sourceState); hasRequestedHandMeshObserver = true; } if (handMeshObserver != null && handPose != null) { if (handMeshTriangleIndices == null) { handMeshTriangleIndices = new ushort[handMeshObserver.TriangleIndexCount]; handMeshTriangleIndicesUnity = new int[handMeshObserver.TriangleIndexCount]; handMeshObserver.GetTriangleIndices(handMeshTriangleIndices); Array.Copy(handMeshTriangleIndices, handMeshTriangleIndicesUnity, (int)handMeshObserver.TriangleIndexCount); // Compute neutral pose Vector3[] neutralPoseVertices = new Vector3[handMeshObserver.VertexCount]; HandPose neutralPose = handMeshObserver.NeutralPose; var neutralVertexAndNormals = new HandMeshVertex[handMeshObserver.VertexCount]; HandMeshVertexState handMeshVertexState = handMeshObserver.GetVertexStateForPose(neutralPose); handMeshVertexState.GetVertices(neutralVertexAndNormals); Parallel.For(0, handMeshObserver.VertexCount, i => { neutralVertexAndNormals[i].Position.ConvertToUnityVector3(ref neutralPoseVertices[i]); }); // Compute UV mapping InitializeUVs(neutralPoseVertices); } if (vertexAndNormals == null) { vertexAndNormals = new HandMeshVertex[handMeshObserver.VertexCount]; handMeshVerticesUnity = new Vector3[handMeshObserver.VertexCount]; handMeshNormalsUnity = new Vector3[handMeshObserver.VertexCount]; } if (vertexAndNormals != null && handMeshTriangleIndices != null) { var handMeshVertexState = handMeshObserver.GetVertexStateForPose(handPose); handMeshVertexState.GetVertices(vertexAndNormals); var meshTransform = handMeshVertexState.CoordinateSystem.TryGetTransformTo(WindowsMixedRealityUtilities.SpatialCoordinateSystem); if (meshTransform.HasValue) { System.Numerics.Matrix4x4.Decompose(meshTransform.Value, out System.Numerics.Vector3 scale, out System.Numerics.Quaternion rotation, out System.Numerics.Vector3 translation); Parallel.For(0, handMeshObserver.VertexCount, i => { vertexAndNormals[i].Position.ConvertToUnityVector3(ref handMeshVerticesUnity[i]); vertexAndNormals[i].Normal.ConvertToUnityVector3(ref handMeshNormalsUnity[i]); }); /// Hands should follow the Playspace to accommodate teleporting, so fold in the Playspace transform. Vector3 positionUnity = MixedRealityPlayspace.TransformPoint(translation.ToUnityVector3()); Quaternion rotationUnity = MixedRealityPlayspace.Rotation * rotation.ToUnityQuaternion(); HandMeshInfo handMeshInfo = new HandMeshInfo { vertices = handMeshVerticesUnity, normals = handMeshNormalsUnity, triangles = handMeshTriangleIndicesUnity, uvs = handMeshUVsUnity, position = positionUnity, rotation = rotationUnity }; CoreServices.InputSystem?.RaiseHandMeshUpdated(InputSource, Handedness, handMeshInfo); } } } } }
protected virtual bool TryRenderControllerModel(Type controllerType, InputSourceType inputSourceType) { GameObject controllerModel = null; if (GetControllerVisualizationProfile() == null || !GetControllerVisualizationProfile().RenderMotionControllers) { return(true); } // If a specific controller template wants to override the global model, assign that instead. if (IsControllerMappingEnabled() && GetControllerVisualizationProfile() != null && inputSourceType == InputSourceType.Controller && !(GetControllerVisualizationProfile().GetUseDefaultModelsOverride(controllerType, ControllerHandedness))) { controllerModel = GetControllerVisualizationProfile().GetControllerModelOverride(controllerType, ControllerHandedness); } // Get the global controller model for each hand. if (controllerModel == null && GetControllerVisualizationProfile() != null) { if (inputSourceType == InputSourceType.Controller) { if (ControllerHandedness == Handedness.Left && GetControllerVisualizationProfile().GlobalLeftHandModel != null) { controllerModel = GetControllerVisualizationProfile().GlobalLeftHandModel; } else if (ControllerHandedness == Handedness.Right && GetControllerVisualizationProfile().GlobalRightHandModel != null) { controllerModel = GetControllerVisualizationProfile().GlobalRightHandModel; } } else if (inputSourceType == InputSourceType.Hand) { if (ControllerHandedness == Handedness.Left && GetControllerVisualizationProfile().GlobalLeftHandVisualizer != null) { controllerModel = GetControllerVisualizationProfile().GlobalLeftHandVisualizer; } else if (ControllerHandedness == Handedness.Right && GetControllerVisualizationProfile().GlobalRightHandVisualizer != null) { controllerModel = GetControllerVisualizationProfile().GlobalRightHandVisualizer; } } } if (controllerModel == null) { // no controller model available return(false); } // If we've got a controller model prefab, then create it and place it in the scene. GameObject controllerObject = UnityEngine.Object.Instantiate(controllerModel); MixedRealityPlayspace.AddChild(controllerObject.transform); return(TryAddControllerModelToSceneHierarchy(controllerObject)); }
/// <inheritdoc /> public override void OnInputChanged(InputEventData <Vector2> eventData) { // Don't process input if we've got an active teleport request in progress. if (isTeleportRequestActive || CoreServices.TeleportSystem == null) { return; } if (eventData.SourceId == InputSourceParent.SourceId && eventData.Handedness == Handedness && eventData.MixedRealityInputAction == teleportAction) { currentInputPosition = eventData.InputData; } if (currentInputPosition.sqrMagnitude > InputThresholdSquared) { // Get the angle of the pointer input float angle = Mathf.Atan2(currentInputPosition.x, currentInputPosition.y) * Mathf.Rad2Deg; // Offset the angle so it's 'forward' facing angle += angleOffset; PointerOrientation = angle; if (!TeleportRequestRaised) { float absoluteAngle = Mathf.Abs(angle); if (absoluteAngle < teleportActivationAngle) { TeleportRequestRaised = true; CoreServices.TeleportSystem?.RaiseTeleportRequest(this, TeleportHotSpot); } else if (canMove) { // wrap the angle value. if (absoluteAngle > 180f) { absoluteAngle = Mathf.Abs(absoluteAngle - 360f); } // Calculate the offset rotation angle from the 90 degree mark. // Half the rotation activation angle amount to make sure the activation angle stays centered at 90. float offsetRotationAngle = 90f - rotateActivationAngle; // subtract it from our current angle reading offsetRotationAngle = absoluteAngle - offsetRotationAngle; // if it's less than zero, then we don't have activation if (offsetRotationAngle > 0) { // check to make sure we're still under our activation threshold. if (offsetRotationAngle < 2 * rotateActivationAngle) { canMove = false; // Rotate the camera by the rotation amount. If our angle is positive then rotate in the positive direction, otherwise in the opposite direction. MixedRealityPlayspace.RotateAround(CameraCache.Main.transform.position, Vector3.up, angle >= 0.0f ? rotationAmount : -rotationAmount); } else // We may be trying to strafe backwards. { // Calculate the offset rotation angle from the 180 degree mark. // Half the strafe activation angle to make sure the activation angle stays centered at 180f float offsetStrafeAngle = 180f - backStrafeActivationAngle; // subtract it from our current angle reading offsetStrafeAngle = absoluteAngle - offsetStrafeAngle; // Check to make sure we're still under our activation threshold. if (offsetStrafeAngle > 0 && offsetStrafeAngle <= backStrafeActivationAngle) { canMove = false; var height = MixedRealityPlayspace.Position.y; var newPosition = -CameraCache.Main.transform.forward * strafeAmount + MixedRealityPlayspace.Position; newPosition.y = height; MixedRealityPlayspace.Position = newPosition; } } } } } } else { if (!canTeleport && !TeleportRequestRaised) { // Reset the move flag when the user stops moving the joystick // but hasn't yet started teleport request. canMove = true; } if (canTeleport) { canTeleport = false; TeleportRequestRaised = false; if (TeleportSurfaceResult == TeleportSurfaceResult.Valid || TeleportSurfaceResult == TeleportSurfaceResult.HotSpot) { CoreServices.TeleportSystem?.RaiseTeleportStarted(this, TeleportHotSpot); } } if (TeleportRequestRaised) { canTeleport = false; TeleportRequestRaised = false; CoreServices.TeleportSystem?.RaiseTeleportCanceled(this, TeleportHotSpot); } } if (TeleportRequestRaised && TeleportSurfaceResult == TeleportSurfaceResult.Valid || TeleportSurfaceResult == TeleportSurfaceResult.HotSpot) { canTeleport = true; } }
/// <summary> /// Compute the world direction corresponding to the input local direction in playspace. /// </summary> /// <param name="localDirection">The local direction.</param> /// <returns>The world direction.</returns> public static Vector3 DirectionRelativeToPlayspace(Vector3 localDirection) { return(MixedRealityPlayspace.TransformDirection(localDirection)); }
/// <summary> /// Manipulates the given testObject in a number of ways and records the output here /// </summary> /// <param name="testObject">An unrotated primitive cube at (0, 0, 1) with scale (0.2, 0.2, 0,2)</param> public IEnumerator RecordTransformValues(GameObject testObject) { TestUtilities.PlayspaceToOriginLookingForward(); float testRotation = 45; Quaternion testQuaternion = Quaternion.Euler(testRotation, testRotation, testRotation); Vector3 leftHandNearPos = new Vector3(-0.1f, 0, 1); Vector3 rightHandNearPos = new Vector3(0.1f, 0, 1); Vector3 leftHandFarPos = new Vector3(-0.06f, -0.1f, 0.5f); Vector3 rightHandFarPos = new Vector3(0.06f, -0.1f, 0.5f); TestHand leftHand = new TestHand(Handedness.Left); TestHand rightHand = new TestHand(Handedness.Right); // One hand rotate near yield return(rightHand.MoveTo(rightHandNearPos, numSteps)); yield return(rightHand.SetGesture(ArticulatedHandPose.GestureId.Pinch)); yield return(rightHand.SetRotation(testQuaternion, numSteps)); RecordTransform(testObject.transform, "one hand rotate near"); // Two hand rotate/scale near yield return(rightHand.SetRotation(Quaternion.identity, numSteps)); yield return(leftHand.MoveTo(leftHandNearPos, numSteps)); yield return(leftHand.SetGesture(ArticulatedHandPose.GestureId.Pinch)); yield return(rightHand.Move(new Vector3(0.2f, 0.2f, 0), numSteps)); yield return(leftHand.Move(new Vector3(-0.2f, -0.2f, 0), numSteps)); RecordTransform(testObject.transform, "two hand rotate/scale near"); // Two hand rotate/scale far yield return(rightHand.MoveTo(rightHandNearPos, numSteps)); yield return(leftHand.MoveTo(leftHandNearPos, numSteps)); yield return(rightHand.SetGesture(ArticulatedHandPose.GestureId.Open)); yield return(leftHand.SetGesture(ArticulatedHandPose.GestureId.Open)); yield return(rightHand.MoveTo(rightHandFarPos, numSteps)); yield return(leftHand.MoveTo(leftHandFarPos, numSteps)); yield return(rightHand.SetGesture(ArticulatedHandPose.GestureId.Pinch)); yield return(leftHand.SetGesture(ArticulatedHandPose.GestureId.Pinch)); yield return(rightHand.Move(new Vector3(0.2f, 0.2f, 0), numSteps)); yield return(leftHand.Move(new Vector3(-0.2f, -0.2f, 0), numSteps)); RecordTransform(testObject.transform, "two hand rotate/scale far"); // One hand rotate near yield return(rightHand.MoveTo(rightHandFarPos, numSteps)); yield return(leftHand.MoveTo(leftHandFarPos, numSteps)); yield return(leftHand.SetGesture(ArticulatedHandPose.GestureId.Open)); yield return(leftHand.Hide()); MixedRealityPlayspace.PerformTransformation( p => { p.position = MixedRealityPlayspace.Position; Vector3 rotatedFwd = Quaternion.AngleAxis(testRotation, Vector3.up) * Vector3.forward; p.LookAt(rotatedFwd); }); yield return(null); Vector3 newHandPosition = Quaternion.AngleAxis(testRotation, Vector3.up) * rightHandFarPos; yield return(rightHand.MoveTo(newHandPosition, numSteps)); RecordTransform(testObject.transform, "one hand rotate far"); yield return(rightHand.SetGesture(ArticulatedHandPose.GestureId.Open)); yield return(rightHand.Hide()); }
public IEnumerator ManipulationHandlerOneHandMoveFar() { // set up cube with manipulation handler var testObject = GameObject.CreatePrimitive(PrimitiveType.Cube); testObject.transform.localScale = Vector3.one * 0.2f; Vector3 initialObjectPosition = new Vector3(0f, 0f, 1f); testObject.transform.position = initialObjectPosition; var manipHandler = testObject.AddComponent <ManipulationHandler>(); manipHandler.HostTransform = testObject.transform; manipHandler.SmoothingActive = false; manipHandler.ManipulationType = ManipulationHandler.HandMovementType.OneHandedOnly; // add near interaction grabbable to be able to grab the cube with the simulated articulated hand testObject.AddComponent <NearInteractionGrabbable>(); yield return(new WaitForFixedUpdate()); yield return(null); const int numCircleSteps = 10; const int numHandSteps = 3; Vector3 initialHandPosition = new Vector3(0.04f, -0.18f, 0.3f); // grab point on the lower center part of the cube TestHand hand = new TestHand(Handedness.Right); // do this test for every one hand rotation mode foreach (ManipulationHandler.RotateInOneHandType type in Enum.GetValues(typeof(ManipulationHandler.RotateInOneHandType))) { // TODO: grab point is moving in this test and has to be covered by a different test if (type == ManipulationHandler.RotateInOneHandType.MaintainOriginalRotation) { continue; } manipHandler.OneHandRotationModeFar = type; TestUtilities.PlayspaceToOriginLookingForward(); yield return(hand.Show(initialHandPosition)); yield return(null); // pinch and let go of the object again to make sure that any rotation adjustment we're doing is applied // at the beginning of our test and doesn't interfere with our grab position on the cubes surface while we're moving around yield return(hand.SetGesture(ArticulatedHandPose.GestureId.Pinch)); yield return(new WaitForFixedUpdate()); yield return(null); yield return(hand.SetGesture(ArticulatedHandPose.GestureId.Open)); yield return(hand.SetGesture(ArticulatedHandPose.GestureId.Pinch)); // save relative pos grab point to object - for far interaction we need to check the grab point where the pointer ray hits the manipulated object InputSimulationService simulationService = PlayModeTestUtilities.GetInputSimulationService(); IMixedRealityController[] inputControllers = simulationService.GetActiveControllers(); // assume hand is first controller and pointer for this test IMixedRealityController handController = inputControllers[0]; IMixedRealityPointer handPointer = handController.InputSource.Pointers[0]; Vector3 initialGrabPosition = handPointer.Result.Details.Point; Vector3 initialOffsetGrabToObjPivot = MixedRealityPlayspace.InverseTransformPoint(initialGrabPosition) - MixedRealityPlayspace.InverseTransformPoint(testObject.transform.position); // full circle const int degreeStep = 360 / numCircleSteps; // rotating the pointer in a circle around "the user" for (int i = 1; i <= numCircleSteps; ++i) { // rotate main camera (user) MixedRealityPlayspace.PerformTransformation( p => { p.position = MixedRealityPlayspace.Position; Vector3 rotatedFwd = Quaternion.AngleAxis(degreeStep * i, Vector3.up) * Vector3.forward; p.LookAt(rotatedFwd); }); yield return(null); // move hand with the camera Vector3 newHandPosition = Quaternion.AngleAxis(degreeStep * i, Vector3.up) * initialHandPosition; yield return(hand.MoveTo(newHandPosition, numHandSteps)); yield return(new WaitForFixedUpdate()); yield return(null); // make sure that the offset between grab point and object pivot hasn't changed while rotating Vector3 newGrabPosition = handPointer.Result.Details.Point; Vector3 offsetRotated = MixedRealityPlayspace.InverseTransformPoint(newGrabPosition) - MixedRealityPlayspace.InverseTransformPoint(testObject.transform.position); TestUtilities.AssertAboutEqual(offsetRotated, initialOffsetGrabToObjPivot, "Grab point on object changed during rotation"); } yield return(hand.SetGesture(ArticulatedHandPose.GestureId.Open)); yield return(hand.Hide()); } }
public IEnumerator ManipulationHandlerOneHandMoveNear() { // set up cube with manipulation handler var testObject = GameObject.CreatePrimitive(PrimitiveType.Cube); testObject.transform.localScale = Vector3.one * 0.2f; Vector3 initialObjectPosition = new Vector3(0f, 0f, 1f); testObject.transform.position = initialObjectPosition; var manipHandler = testObject.AddComponent <ManipulationHandler>(); manipHandler.HostTransform = testObject.transform; manipHandler.SmoothingActive = false; manipHandler.ManipulationType = ManipulationHandler.HandMovementType.OneHandedOnly; // add near interaction grabbable to be able to grab the cube with the simulated articulated hand testObject.AddComponent <NearInteractionGrabbable>(); yield return(new WaitForFixedUpdate()); yield return(null); const int numCircleSteps = 10; const int numHandSteps = 3; Vector3 initialHandPosition = new Vector3(0, 0, 0.5f); Vector3 initialGrabPosition = new Vector3(-0.1f, -0.1f, 1f); // grab the left bottom corner of the cube TestHand hand = new TestHand(Handedness.Right); // do this test for every one hand rotation mode foreach (ManipulationHandler.RotateInOneHandType type in Enum.GetValues(typeof(ManipulationHandler.RotateInOneHandType))) { manipHandler.OneHandRotationModeNear = type; TestUtilities.PlayspaceToOriginLookingForward(); yield return(hand.Show(initialHandPosition)); var pointer = hand.GetPointer <SpherePointer>(); Assert.IsNotNull(pointer); yield return(hand.MoveTo(initialGrabPosition, numHandSteps)); yield return(hand.SetGesture(ArticulatedHandPose.GestureId.Pinch)); // save relative pos grab point to object Vector3 initialOffsetGrabToObjPivot = pointer.Position - testObject.transform.position; Vector3 initialGrabPointInObject = testObject.transform.InverseTransformPoint(manipHandler.GetPointerGrabPoint(pointer.PointerId)); // full circle const int degreeStep = 360 / numCircleSteps; // rotating the pointer in a circle around "the user" for (int i = 1; i <= numCircleSteps; ++i) { // rotate main camera (user) MixedRealityPlayspace.PerformTransformation( p => { p.position = MixedRealityPlayspace.Position; Vector3 rotatedFwd = Quaternion.AngleAxis(degreeStep * i, Vector3.up) * Vector3.forward; p.LookAt(rotatedFwd); }); yield return(null); // move hand with the camera Vector3 newHandPosition = Quaternion.AngleAxis(degreeStep * i, Vector3.up) * initialGrabPosition; yield return(hand.MoveTo(newHandPosition, numHandSteps)); if (type == ManipulationHandler.RotateInOneHandType.RotateAboutObjectCenter) { // make sure that the offset between hand and object centre hasn't changed while rotating Vector3 offsetRotated = pointer.Position - testObject.transform.position; TestUtilities.AssertAboutEqual(offsetRotated, initialOffsetGrabToObjPivot, $"Object offset changed during rotation using {type}"); } else { // make sure that the offset between grab point and object pivot hasn't changed while rotating Vector3 grabPoint = manipHandler.GetPointerGrabPoint(pointer.PointerId); Vector3 cornerRotated = testObject.transform.TransformPoint(initialGrabPointInObject); TestUtilities.AssertAboutEqual(cornerRotated, grabPoint, $"Grab point on object changed during rotation using {type}"); } } yield return(hand.SetGesture(ArticulatedHandPose.GestureId.Open)); yield return(hand.Hide()); } }
public void UpdateController(WebXRInputSource controller) { if (!Enabled) { return; } var position = MixedRealityPlayspace.TransformPoint(controller.Position); var rotation = MixedRealityPlayspace.Rotation * controller.Rotation; var pose = new MixedRealityPose(position, rotation); for (int i = 0; i < Interactions?.Length; i++) { switch (Interactions[i].InputType) { case DeviceInputType.SpatialPointer: Interactions[i].PoseData = pose; if (Interactions[i].Changed) { CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction, pose); } break; case DeviceInputType.SpatialGrip: Interactions[i].PoseData = pose; if (Interactions[i].Changed) { CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction, pose); } break; case DeviceInputType.Select: Interactions[i].BoolData = controller.Selected; if (Interactions[i].Changed) { if (Interactions[i].BoolData) { CoreServices.InputSystem?.RaiseOnInputDown(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction); } else { CoreServices.InputSystem?.RaiseOnInputUp(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction); } } break; case DeviceInputType.TriggerPress: Interactions[i].BoolData = controller.Squeezed; if (Interactions[i].Changed) { if (Interactions[i].BoolData) { CoreServices.InputSystem?.RaiseOnInputDown(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction); } else { CoreServices.InputSystem?.RaiseOnInputUp(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction); } } break; } } }
public void UpdateController() { if (!Enabled) { return; } // hand pose var lastState = TrackingState; TrackingState = (hand.IsTracked) ? TrackingState.Tracked : TrackingState.NotTracked; if (lastState != TrackingState) { CoreServices.InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState); } if (TrackingState == TrackingState.Tracked) { var pose = new MixedRealityPose(); pose.Position = MixedRealityPlayspace.TransformPoint(hand.transform.position); pose.Rotation = MixedRealityPlayspace.Rotation * hand.transform.rotation; CoreServices.InputSystem?.RaiseSourcePoseChanged(InputSource, this, pose); } // hand interaction if (Interactions == null) { Debug.LogError($"No interaction configuration for Oculus Quest Hand {ControllerHandedness} Source"); Enabled = false; } if (TrackingState == TrackingState.Tracked) { for (int i = 0; i < Interactions?.Length; i++) { var interaction = Interactions[i]; switch (interaction.InputType) { case DeviceInputType.None: break; case DeviceInputType.SpatialPointer: // hand pointer var pointer = new MixedRealityPose(); pointer.Position = MixedRealityPlayspace.TransformPoint(hand.PointerPose.position); pointer.Rotation = MixedRealityPlayspace.Rotation * hand.PointerPose.rotation; interaction.PoseData = pointer; if (interaction.Changed) { CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interaction.MixedRealityInputAction, pointer); } break; case DeviceInputType.SpatialGrip: if (interaction.AxisType == AxisType.SixDof) { var grip = new MixedRealityPose(); grip.Position = MixedRealityPlayspace.TransformPoint(hand.transform.position); grip.Rotation = MixedRealityPlayspace.Rotation * hand.transform.rotation; interaction.PoseData = grip; if (interaction.Changed) { CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interaction.MixedRealityInputAction, grip); } } break; case DeviceInputType.Select: case DeviceInputType.TriggerPress: interaction.BoolData = hand.GetFingerIsPinching(OVRHand.HandFinger.Index); if (interaction.Changed) { if (interaction.BoolData) { CoreServices.InputSystem?.RaiseOnInputDown(InputSource, ControllerHandedness, interaction.MixedRealityInputAction); } else { CoreServices.InputSystem?.RaiseOnInputUp(InputSource, ControllerHandedness, interaction.MixedRealityInputAction); } } break; case DeviceInputType.IndexFinger: if (jointPose.ContainsKey(TrackedHandJoint.IndexTip)) { var indexFinger = jointPose[TrackedHandJoint.IndexTip]; interaction.PoseData = indexFinger; if (interaction.Changed) { CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, interaction.MixedRealityInputAction, indexFinger); } } break; } } } // hand joint if (TrackingState == TrackingState.Tracked) { for (int i = 0; i < skeleton.Bones.Count; i++) { var bones = skeleton.Bones[i]; var handJoint = convertBoneIdToTrackedHandJoint(bones.Id); var position = MixedRealityPlayspace.TransformPoint(bones.Transform.position); var rotation = MixedRealityPlayspace.Rotation * bones.Transform.rotation; if (jointPose.ContainsKey(handJoint)) { jointPose[handJoint] = new MixedRealityPose(position, rotation); } else { jointPose.Add(handJoint, new MixedRealityPose(position, rotation)); } } CoreServices.InputSystem?.RaiseHandJointsUpdated(InputSource, ControllerHandedness, jointPose); } }
/// <summary> /// Compute the world position corresponding to the input local position in playspace. /// </summary> /// <param name="localPosition">The local position.</param> /// <returns>The world position.</returns> public static Vector3 PositionRelativeToPlayspace(Vector3 localPosition) { return(MixedRealityPlayspace.TransformPoint(localPosition)); }
public IEnumerator TestSimulatedGlobalSelectInputOnPrefab() { // Face the camera in the opposite direction so we don't focus on button MixedRealityPlayspace.PerformTransformation( p => { p.position = Vector3.zero; p.LookAt(Vector3.back); }); // Load interactable prefab GameObject interactableObject; Interactable interactable; Transform translateTargetObject; // Place out of the way of any pointers InstantiateDefaultInteractablePrefab( new Vector3(10f, 0.0f, 0.5f), new Vector3(-90f, 0f, 0f), out interactableObject, out interactable, out translateTargetObject); // Subscribe to interactable's on click so we know the click went through bool wasClicked = false; interactable.OnClick.AddListener(() => { wasClicked = true; }); // Set interactable to global interactable.IsGlobal = true; Vector3 targetStartPosition = translateTargetObject.localPosition; yield return(null); // Find an input source to associate with the input event (doesn't matter which one) IMixedRealityInputSource defaultInputSource = MixedRealityToolkit.InputSystem.DetectedInputSources.FirstOrDefault(); Assert.NotNull(defaultInputSource, "At least one input source must be present for this test to work."); // Add interactable as a global listener // This is only necessary if IsGlobal is being set manually. If it's set in the inspector, interactable will register itself in OnEnable automatically. MixedRealityToolkit.InputSystem.PushModalInputHandler(interactableObject); // Raise a select down input event, then wait for transition to take place MixedRealityToolkit.InputSystem.RaiseOnInputDown(defaultInputSource, Handedness.None, interactable.InputAction); // Wait for at least one frame explicitly to ensure the input goes through yield return(new WaitForFixedUpdate()); float pressStartTime = Time.time; bool wasTranslated = false; while (Time.time < pressStartTime + buttonPressAnimationDelay) { // If the transform is moved at any point during this interval, we were successful yield return(new WaitForFixedUpdate()); wasTranslated |= targetStartPosition != translateTargetObject.localPosition; } // Raise a select up input event, then wait for transition to take place MixedRealityToolkit.InputSystem.RaiseOnInputUp(defaultInputSource, Handedness.Right, interactable.InputAction); // Wait for at least one frame explicitly to ensure the input goes through yield return(new WaitForFixedUpdate()); yield return(new WaitForSeconds(buttonReleaseAnimationDelay)); Assert.True(wasClicked, "Interactable was not clicked."); Assert.True(wasTranslated, "Transform target object was not translated by action."); Assert.False(interactable.HasFocus, "Interactable had focus"); // Remove as global listener MixedRealityToolkit.InputSystem.PopModalInputHandler(); }
public void UpdateController(WebXRInputSource controller) { if (!Enabled) { return; } IsPositionAvailable = IsRotationAvailable = controller.Hand.Available; jointPoses[TrackedHandJoint.Wrist] = GetJointMixedRealityPose(controller.Hand.Joints[WebXRHand.WRIST]); for (int i = WebXRHand.THUMB_METACARPAL; i < WebXRHand.JOINT_COUNT; i++) { var joint = controller.Hand.Joints[i]; jointPoses[(TrackedHandJoint)(i + 2)] = GetJointMixedRealityPose(joint); } jointPoses[TrackedHandJoint.Palm] = new MixedRealityPose((jointPoses[TrackedHandJoint.MiddleMetacarpal].Position + jointPoses[TrackedHandJoint.MiddleMetacarpal].Position) / 2, jointPoses[TrackedHandJoint.MiddleMetacarpal].Rotation); var indexPose = jointPoses[TrackedHandJoint.IndexTip]; bool isSelecting; MixedRealityPose spatialPointerPose; if (controller.IsPositionTracked) { isSelecting = controller.Selected; spatialPointerPose = new MixedRealityPose(MixedRealityPlayspace.TransformPoint(controller.Position), MixedRealityPlayspace.Rotation * controller.Rotation); } else { // Is selecting if thumb tip and index tip are close isSelecting = Vector3.Distance(controller.Hand.Joints[WebXRHand.THUMB_PHALANX_TIP].Position, controller.Hand.Joints[WebXRHand.INDEX_PHALANX_TIP].Position) < 0.04; // The hand ray starts from the middle of thumb tip and index tip HandRay.Update((controller.Hand.Joints[WebXRHand.THUMB_PHALANX_TIP].Position + controller.Hand.Joints[WebXRHand.INDEX_PHALANX_TIP].Position) / 2, new Vector3(0.3f, -0.4f, 0.9f), CameraCache.Main.transform, ControllerHandedness); Ray ray = HandRay.Ray; spatialPointerPose = new MixedRealityPose(ray.origin, Quaternion.LookRotation(ray.direction)); } CoreServices.InputSystem?.RaiseSourcePoseChanged(InputSource, this, spatialPointerPose); CoreServices.InputSystem?.RaiseHandJointsUpdated(InputSource, ControllerHandedness, jointPoses); UpdateVelocity(); for (int i = 0; i < Interactions?.Length; i++) { switch (Interactions[i].InputType) { case DeviceInputType.SpatialPointer: Interactions[i].PoseData = spatialPointerPose; if (Interactions[i].Changed) { CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction, Interactions[i].PoseData); } break; case DeviceInputType.SpatialGrip: Interactions[i].PoseData = indexPose; if (Interactions[i].Changed) { CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction, Interactions[i].PoseData); } break; case DeviceInputType.Select: Interactions[i].BoolData = isSelecting || controller.TargetRayMode == WebXRTargetRayModes.Screen; if (Interactions[i].Changed) { if (Interactions[i].BoolData) { CoreServices.InputSystem?.RaiseOnInputDown(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction); } else { CoreServices.InputSystem?.RaiseOnInputUp(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction); } } break; case DeviceInputType.TriggerPress: Interactions[i].BoolData = isSelecting; if (Interactions[i].Changed) { if (Interactions[i].BoolData) { CoreServices.InputSystem?.RaiseOnInputDown(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction); } else { CoreServices.InputSystem?.RaiseOnInputUp(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction); } } break; case DeviceInputType.IndexFinger: Interactions[i].PoseData = indexPose; if (Interactions[i].Changed) { CoreServices.InputSystem?.RaisePoseInputChanged(InputSource, ControllerHandedness, Interactions[i].MixedRealityInputAction, Interactions[i].PoseData); } break; case DeviceInputType.ThumbStick: HandDefinition?.UpdateCurrentTeleportPose(Interactions[i]); break; } } }
public void Start() { // put it at root of scene MixedRealityPlayspace.AddChild(visualsRoot.transform); visualsRoot.gameObject.name = $"{gameObject.name}_NearTetherVisualsRoot"; }
private void Awake() { markerParent = new GameObject(); markerParent.name = "Boundary Demo Markers"; MixedRealityPlayspace.AddChild(markerParent.transform); }
public IEnumerator TestFollowDirection() { // Instantiate our test GameObject with solver. var testObjects = InstantiateTestSolver <Follow>(); var followSolver = (Follow)testObjects.solver; testObjects.handler.TrackedTargetType = TrackedObjectType.Head; var targetTransform = testObjects.target.transform; // variables and lambdas to test direction remains within bounds var maxXAngle = followSolver.MaxViewHorizontalDegrees / 2; var maxYAngle = followSolver.MaxViewVerticalDegrees / 2; Vector3 directionToHead() => CameraCache.Main.transform.position - targetTransform.position; float xAngle() => (Mathf.Acos(Vector3.Dot(directionToHead(), targetTransform.right)) * Mathf.Rad2Deg) - 90; float yAngle() => 90 - (Mathf.Acos(Vector3.Dot(directionToHead(), targetTransform.up)) * Mathf.Rad2Deg); // Test without rotation TestUtilities.PlayspaceToOriginLookingForward(); yield return(new WaitForFixedUpdate()); yield return(null); Assert.LessOrEqual(Mathf.Abs(xAngle()), maxXAngle, "Follow exceeded the max horizontal angular bounds"); Assert.LessOrEqual(Mathf.Abs(yAngle()), maxYAngle, "Follow exceeded the max vertical angular bounds"); // Test y axis rotation MixedRealityPlayspace.PerformTransformation(p => p.Rotate(Vector3.up, 45)); yield return(new WaitForFixedUpdate()); yield return(null); Assert.LessOrEqual(Mathf.Abs(xAngle()), maxXAngle, "Follow exceeded the max horizontal angular bounds"); Assert.LessOrEqual(Mathf.Abs(yAngle()), maxYAngle, "Follow exceeded the max vertical angular bounds"); // Test x axis rotation MixedRealityPlayspace.PerformTransformation(p => p.Rotate(Vector3.right, 45)); yield return(new WaitForFixedUpdate()); yield return(null); Assert.LessOrEqual(Mathf.Abs(xAngle()), maxXAngle, "Follow exceeded the max horizontal angular bounds"); Assert.LessOrEqual(Mathf.Abs(yAngle()), maxYAngle, "Follow exceeded the max vertical angular bounds"); // Test translation MixedRealityPlayspace.PerformTransformation(p => p.Translate(Vector3.back, Space.World)); yield return(new WaitForFixedUpdate()); yield return(null); Assert.LessOrEqual(Mathf.Abs(xAngle()), maxXAngle, "Follow exceeded the max horizontal angular bounds"); Assert.LessOrEqual(Mathf.Abs(yAngle()), maxYAngle, "Follow exceeded the max vertical angular bounds"); // Test renderer bounds clamp mode. followSolver.AngularClampMode = Follow.AngularClampType.RendererBounds; MixedRealityPlayspace.PerformTransformation(p => p.Rotate(Vector3.up, 180)); yield return(new WaitForFixedUpdate()); yield return(null); Assert.Greater(Vector3.Dot(targetTransform.position - CameraCache.Main.transform.position, CameraCache.Main.transform.forward), 0.0f, "Follow did not clamp angle when using AngularClampType.RendererBounds."); // Test collider bounds clamp mode. followSolver.AngularClampMode = Follow.AngularClampType.ColliderBounds; MixedRealityPlayspace.PerformTransformation(p => p.Rotate(Vector3.up, 0.0f)); yield return(new WaitForFixedUpdate()); yield return(null); Assert.Greater(Vector3.Dot(targetTransform.position - CameraCache.Main.transform.position, CameraCache.Main.transform.forward), 0.0f, "Follow did not clamp angle when using AngularClampType.ColliderBounds."); }