// Checks if the user performed an input gesture since the last call to this method. // Allows the main update loop to check for asynchronous changes to the user // input state. public SpatialInteractionSourceState CheckForInput() { SpatialInteractionSourceState sourceState = this.sourceState; this.sourceState = null; return(sourceState); }
private bool IsButtonPressed(TouchControllerButton button, SpatialInteractionSourceState state) { switch (button) { case TouchControllerButton.Thumbstick: return(state.ControllerProperties.IsThumbstickPressed); case TouchControllerButton.Touchpad: return(state.ControllerProperties.IsTouchpadPressed); case TouchControllerButton.A when state.ControllerProperties.IsTouchpadPressed && hand == SpatialInteractionSourceHandedness.Right: return(ThumbAxis.X >= 0.0f); case TouchControllerButton.B when state.ControllerProperties.IsTouchpadPressed && hand == SpatialInteractionSourceHandedness.Right: return(ThumbAxis.X < 0.0f); case TouchControllerButton.X when state.ControllerProperties.IsTouchpadPressed && hand == SpatialInteractionSourceHandedness.Left: return(ThumbAxis.X < 0.0f); case TouchControllerButton.Y when state.ControllerProperties.IsTouchpadPressed && hand == SpatialInteractionSourceHandedness.Left: return(ThumbAxis.X >= 0.0f); case TouchControllerButton.Trigger: return(state.IsSelectPressed); case TouchControllerButton.Grip: return(state.IsGrasped); case TouchControllerButton.Menu: return(state.IsMenuPressed); default: return(false); } }
private void UpdateGripData(SpatialInteractionSourceState spatialInteractionSourceState, MixedRealityInteractionMapping interactionMapping) { switch (interactionMapping.AxisType) { case AxisType.SixDof: var spatialInteractionSourceLocation = spatialInteractionSourceState.Properties.TryGetLocation(WindowsMixedRealityUtilities.SpatialCoordinateSystem); if (spatialInteractionSourceLocation != null && spatialInteractionSourceLocation.Position.HasValue && spatialInteractionSourceLocation.Orientation.HasValue) { currentGripPosition = spatialInteractionSourceLocation.Position.Value.ToUnity(); currentGripRotation = spatialInteractionSourceLocation.Orientation.Value.ToUnity(); } if (MixedRealityToolkit.TryGetSystem <IMixedRealityCameraSystem>(out var cameraSystem)) { currentGripPose.Position = cameraSystem.MainCameraRig.RigTransform.TransformPoint(currentGripPosition); currentGripPose.Rotation = Quaternion.Euler(cameraSystem.MainCameraRig.RigTransform.TransformDirection(currentGripRotation.eulerAngles)); } else { currentGripPose.Position = currentGripPosition; currentGripPose.Rotation = currentGripRotation; } interactionMapping.PoseData = currentGripPose; break; } }
/// <summary> /// Gets updated hand data for the current frame. /// </summary> /// <param name="spatialInteractionSourceState">Platform provided current input source state for the hand.</param> /// <param name="includeMeshData">If set, hand mesh information will be included in <see cref="HandData.Mesh"/>.</param> /// <param name="handData">The output <see cref="HandData"/>.</param> /// <returns>True, if data conversion was a success.</returns> public bool TryGetHandData(SpatialInteractionSourceState spatialInteractionSourceState, bool includeMeshData, out HandData handData) { // Here we check whether the hand is being tracked at all by the WMR system. HandPose handPose = spatialInteractionSourceState.TryGetHandPose(); if (handPose == null) { handData = default; return(false); } // The hand is being tracked, next we verify it meets our confidence requirements to consider // it tracked. var platformJointPoses = new JointPose[jointIndices.Length]; handData = new HandData { TrackingState = handPose.TryGetJoints(spatialCoordinateSystem, jointIndices, platformJointPoses) ? TrackingState.Tracked : TrackingState.NotTracked, UpdatedAt = DateTimeOffset.UtcNow.Ticks }; // If the hand is tracked per requirements, we get updated joint data // and other data needed for updating the hand controller's state. if (handData.TrackingState == TrackingState.Tracked) { handData.RootPose = GetHandRootPose(platformJointPoses); handData.Joints = GetJointPoses(platformJointPoses, handData.RootPose); handData.PointerPose = GetPointerPose(spatialInteractionSourceState); if (includeMeshData && TryGetUpdatedHandMeshData(spatialInteractionSourceState, handPose, out HandMeshData data)) { handData.Mesh = data; } else { // if hand mesh visualization is disabled make sure to destroy our hand mesh observer // if it has already been created. if (handMeshObservers.ContainsKey(spatialInteractionSourceState.Source.Handedness)) { if (spatialInteractionSourceState.Source.Handedness == SpatialInteractionSourceHandedness.Left) { hasRequestedHandMeshObserverLeftHand = false; } else if (spatialInteractionSourceState.Source.Handedness == SpatialInteractionSourceHandedness.Right) { hasRequestedHandMeshObserverRightHand = false; } handMeshObservers.Remove(spatialInteractionSourceState.Source.Handedness); } handData.Mesh = HandMeshData.Empty; } } // Even if the hand is being tracked by the system but the confidence did not // meet our requirements, we return true. This allows the hand controller and visualizers // to react to tracking loss and keep the hand up for a given time before destroying the controller. return(true); }
public void OnSourcePressed(SpatialInteractionManager sender, SpatialInteractionSourceEventArgs args) { sourceState = args.State; // // TODO: In your app or game engine, rewrite this method to queue // input events in your input class or event handler. // }
private void InteractionManager_SourceLost(SpatialInteractionManager sender, SpatialInteractionSourceEventArgs args) { if (args.State.Source.Handedness == hand) { internalState = DeviceState.Invalid; previousState = null; currentState = null; } }
/// <summary> /// Update the hand data from the device. /// </summary> /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform.</param> private void UpdateHandData(InteractionSourceState interactionSourceState) { #if WINDOWS_UWP || DOTNETWINRT_PRESENT using (UpdateHandDataPerfMarker.Auto()) { // Articulated hand support is only present in the 18362 version and beyond Windows // SDK (which contains the V8 drop of the Universal API Contract). In particular, // the HandPose related APIs are only present on this version and above. if (!articulatedHandApiAvailable) { return; } SpatialInteractionSourceState sourceState = interactionSourceState.source.GetSpatialInteractionSourceState(); if (sourceState == null) { return; } #if WINDOWS_UWP handMeshProvider?.UpdateHandMesh(sourceState); #endif // WINDOWS_UWP HandPose handPose = sourceState.TryGetHandPose(); if (handPose != null && handPose.TryGetJoints(WindowsMixedRealityUtilities.SpatialCoordinateSystem, jointIndices, jointPoses)) { for (int i = 0; i < jointPoses.Length; i++) { Vector3 position = jointPoses[i].Position.ToUnityVector3(); Quaternion rotation = jointPoses[i].Orientation.ToUnityQuaternion(); // We want the joints to follow the playspace, so fold in the playspace transform here to // put the joint pose into world space. position = MixedRealityPlayspace.TransformPoint(position); rotation = MixedRealityPlayspace.Rotation * rotation; TrackedHandJoint trackedHandJoint = ConvertHandJointKindToTrackedHandJoint(jointIndices[i]); if (trackedHandJoint == TrackedHandJoint.IndexTip) { lastIndexTipRadius = jointPoses[i].Radius; } unityJointPoses[trackedHandJoint] = new MixedRealityPose(position, rotation); } handDefinition?.UpdateHandJoints(unityJointPoses); } } #endif // WINDOWS_UWP || DOTNETWINRT_PRESENT }
private void UpdateThumbStickData(SpatialInteractionSourceState spatialInteractionSourceState, MixedRealityInteractionMapping interactionMapping) { switch (interactionMapping.InputType) { case DeviceInputType.ThumbStickPress: interactionMapping.BoolData = spatialInteractionSourceState.ControllerProperties.IsThumbstickPressed; break; case DeviceInputType.ThumbStick: interactionMapping.Vector2Data = new Vector2((float)spatialInteractionSourceState.ControllerProperties.ThumbstickX, (float)spatialInteractionSourceState.ControllerProperties.ThumbstickY); break; } }
private async void SetHandMeshObserver(SpatialInteractionSourceState sourceState) { if (handMeshObservers.ContainsKey(sourceState.Source.Handedness)) { handMeshObservers[sourceState.Source.Handedness] = await sourceState.Source.TryCreateHandMeshObserverAsync(); } else { handMeshObservers.Add(sourceState.Source.Handedness, await sourceState.Source.TryCreateHandMeshObserverAsync()); } hasRequestedHandMeshObserverLeftHand = sourceState.Source.Handedness == SpatialInteractionSourceHandedness.Left; hasRequestedHandMeshObserverRightHand = sourceState.Source.Handedness == SpatialInteractionSourceHandedness.Right; }
private void UpdateTouchPadData(SpatialInteractionSourceState spatialInteractionSourceState, MixedRealityInteractionMapping interactionMapping) { switch (interactionMapping.InputType) { case DeviceInputType.TouchpadTouch: interactionMapping.BoolData = spatialInteractionSourceState.ControllerProperties.IsTouchpadTouched; break; case DeviceInputType.TouchpadPress: interactionMapping.BoolData = spatialInteractionSourceState.ControllerProperties.IsTouchpadPressed; break; case DeviceInputType.Touchpad: interactionMapping.Vector2Data = new Vector2((float)spatialInteractionSourceState.ControllerProperties.TouchpadX, (float)spatialInteractionSourceState.ControllerProperties.TouchpadY); break; } }
/// <summary> /// Gets the hand's spatial pointer <see cref="MixedRealityPose"/> in the camera rig's local coordinate space. /// </summary> /// <param name="spatialInteractionSourceState">Current <see cref="SpatialInteractionSourceState"/> snapshot of the hand.</param> /// <returns>The hand's <see cref="HandData.PointerPose"/> in the camera rig's local coordinate space.</returns> private MixedRealityPose GetPointerPose(SpatialInteractionSourceState spatialInteractionSourceState) { var spatialPointerPose = spatialInteractionSourceState.TryGetPointerPose(spatialCoordinateSystem); if (spatialPointerPose != null) { var interactionSourcePose = spatialPointerPose.TryGetInteractionSourcePose(spatialInteractionSourceState.Source); if (interactionSourcePose != null) { var pointerPosition = RigTransform.InverseTransformPoint(RigTransform.position + RigTransform.rotation * interactionSourcePose.Position.ToUnity()); var pointerRotation = Quaternion.Inverse(RigTransform.rotation) * RigTransform.rotation * interactionSourcePose.Orientation.ToUnity(); return(new MixedRealityPose(pointerPosition, pointerRotation)); } } return(MixedRealityPose.ZeroIdentity); }
private void UpdatePointerData(SpatialInteractionSourceState spatialInteractionSourceState, MixedRealityInteractionMapping interactionMapping) { var spatialPointerPose = spatialInteractionSourceState.TryGetPointerPose(WindowsMixedRealityUtilities.SpatialCoordinateSystem); if (spatialPointerPose != null) { var spatialInteractionSourcePose = spatialPointerPose.TryGetInteractionSourcePose(spatialInteractionSourceState.Source); if (spatialInteractionSourcePose != null) { currentControllerPosition = spatialInteractionSourcePose.Position.ToUnity(); currentControllerRotation = spatialInteractionSourcePose.Orientation.ToUnity(); } } currentPointerPose.Position = currentPointerPosition; currentPointerPose.Rotation = currentPointerRotation; interactionMapping.PoseData = currentPointerPose; }
private void UpdateTriggerData(SpatialInteractionSourceState spatialInteractionSourceState, MixedRealityInteractionMapping interactionMapping) { switch (interactionMapping.InputType) { case DeviceInputType.TriggerPress: interactionMapping.BoolData = spatialInteractionSourceState.IsGrasped; break; case DeviceInputType.Select: interactionMapping.BoolData = spatialInteractionSourceState.IsSelectPressed; break; case DeviceInputType.Trigger: interactionMapping.FloatData = (float)spatialInteractionSourceState.SelectPressedValue; break; case DeviceInputType.TriggerTouch: interactionMapping.BoolData = spatialInteractionSourceState.SelectPressedValue > 0; break; } }
public void Update(PerceptionTimestamp timeStamp, SpatialCoordinateSystem coordinateSystem) { var states = interactionManager.GetDetectedSourcesAtTimestamp(timeStamp); foreach (SpatialInteractionSourceState state in states) { if (state.Source.Handedness == hand) { SpatialInteractionSourceLocation location = state.Properties.TryGetLocation(coordinateSystem); if (location != null) { SetSpatialInteractionSourceLocation(location); } previousState = currentState; currentState = state; internalState = previousState != null ? DeviceState.Valid : DeviceState.Invalid; } } }
private bool IsButtonTouched(TouchControllerButton button, SpatialInteractionSourceState state) { switch (button) { case TouchControllerButton.Touchpad: return(state.ControllerProperties.IsTouchpadTouched); case TouchControllerButton.A when state.ControllerProperties.IsTouchpadPressed && hand == SpatialInteractionSourceHandedness.Right: return(ThumbAxis.X >= 0.0f); case TouchControllerButton.B when state.ControllerProperties.IsTouchpadPressed && hand == SpatialInteractionSourceHandedness.Right: return(ThumbAxis.X < 0.0f); case TouchControllerButton.X when state.ControllerProperties.IsTouchpadPressed && hand == SpatialInteractionSourceHandedness.Left: return(ThumbAxis.X < 0.0f); case TouchControllerButton.Y when state.ControllerProperties.IsTouchpadPressed && hand == SpatialInteractionSourceHandedness.Left: return(ThumbAxis.X >= 0.0f); default: return(false); } }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); #if DRAW_SAMPLE_CONTENT if (stationaryReferenceFrame != null) { // Check for new input state since the last frame. for (int i = 0; i < gamepads.Count; ++i) { bool buttonDownThisUpdate = (gamepads[i].gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A; if (buttonDownThisUpdate && !gamepads[i].buttonAWasPressedLastFrame) { pointerPressed = true; } gamepads[i].buttonAWasPressedLastFrame = buttonDownThisUpdate; } SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); SpatialPointerPose pose = null; if (null != pointerState) { pose = pointerState.TryGetPointerPose(stationaryReferenceFrame.CoordinateSystem); } else if (pointerPressed) { pose = SpatialPointerPose.TryGetAtTimestamp(stationaryReferenceFrame.CoordinateSystem, prediction.Timestamp); } pointerPressed = false; // When a Pressed gesture is detected, the sample hologram will be repositioned // two meters in front of the user. spinningCubeRenderer.PositionHologram(pose); } #endif timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // #if DRAW_SAMPLE_CONTENT spinningCubeRenderer.Update(timer); #endif }); if (!canCommitDirect3D11DepthBuffer) { // On versions of the platform that do not support the CommitDirect3D11DepthBuffer API, we can control // image stabilization by setting a focus point with optional plane normal and velocity. foreach (var cameraPose in prediction.CameraPoses) { #if DRAW_SAMPLE_CONTENT // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. When setting the focus point, put it on or // near content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram. // You can also set the relative velocity and facing of the stabilization // plane using overloads of this method. if (stationaryReferenceFrame != null) { renderingParameters.SetFocusPoint( stationaryReferenceFrame.CoordinateSystem, spinningCubeRenderer.Position ); } #endif } } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem; #if DRAW_SAMPLE_CONTENT // Check for new input state since the last frame. SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); if (null != pointerState) { // When a Pressed gesture is detected, the sample hologram will be repositioned // two meters in front of the user. spinningCubeRenderer.PositionHologram( pointerState.TryGetPointerPose(currentCoordinateSystem) ); } #endif timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // #if DRAW_SAMPLE_CONTENT spinningCubeRenderer.Update(timer); _spatialSurfaceRenderer.Update(); #endif }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { #if DRAW_SAMPLE_CONTENT // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. renderingParameters.SetFocusPoint( currentCoordinateSystem, spinningCubeRenderer.Position ); #endif } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update(HolographicFrame previousFrame) { // TODO: Put CPU work that does not depend on the HolographicCameraPose here. // Apps should wait for the optimal time to begin pose-dependent work. // The platform will automatically adjust the wakeup time to get // the lowest possible latency at high frame rates. For manual // control over latency, use the WaitForNextFrameReadyWithHeadStart // API. // WaitForNextFrameReady and WaitForNextFrameReadyWithHeadStart are the // preferred frame synchronization APIs for Windows Mixed Reality. When // running on older versions of the OS that do not include support for // these APIs, your app can use the WaitForFrameToFinish API for similar // (but not as optimal) behavior. if (canUseWaitForNextFrameReadyAPI) { try { holographicSpace.WaitForNextFrameReady(); } catch (NotImplementedException) { // Catch a specific case where WaitForNextFrameReady() is present but not implemented // and default back to WaitForFrameToFinish() in that case. canUseWaitForNextFrameReadyAPI = false; } } else if (previousFrame != null) { previousFrame.WaitForFrameToFinish(); } // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); #if DRAW_SAMPLE_CONTENT if (stationaryReferenceFrame != null) { // Check for new input state since the last frame. for (int i = 0; i < gamepads.Count; ++i) { bool buttonDownThisUpdate = (gamepads[i].gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A; if (buttonDownThisUpdate && !gamepads[i].buttonAWasPressedLastFrame) { pointerPressed = true; } gamepads[i].buttonAWasPressedLastFrame = buttonDownThisUpdate; } SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); SpatialPointerPose pose = null; if (null != pointerState) { pose = pointerState.TryGetPointerPose(stationaryReferenceFrame.CoordinateSystem); } else if (pointerPressed) { pose = SpatialPointerPose.TryGetAtTimestamp(stationaryReferenceFrame.CoordinateSystem, prediction.Timestamp); } pointerPressed = false; // When a Pressed gesture is detected, the sample hologram will be repositioned // two meters in front of the user. quadRendererR.PositionHologram(pose); quadRendererL.PositionHologram(pose); } #endif timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // #if DRAW_SAMPLE_CONTENT quadRendererR.Update(timer); quadRendererL.Update(timer); #endif }); // On HoloLens 2, the platform can achieve better image stabilization results if it has // a stabilization plane and a depth buffer. // Note that the SetFocusPoint API includes an override which takes velocity as a // parameter. This is recommended for stabilizing holograms in motion. foreach (var cameraPose in prediction.CameraPoses) { #if DRAW_SAMPLE_CONTENT // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. When setting the focus point, put it on or // near content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram. // You can also set the relative velocity and facing of the stabilization // plane using overloads of this method. if (stationaryReferenceFrame != null) { renderingParameters.SetFocusPoint( stationaryReferenceFrame.CoordinateSystem, new System.Numerics.Vector3(0, 0, 0) ); } #endif } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = attachreferenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp);//referenceFrame.CoordinateSystem; #if DRAW_SAMPLE_CONTENT // Check for new input state since the last frame. SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); if (null != pointerState) { // When a Pressed gesture is detected, the sample hologram will be repositioned // two meters in front of the user. spinningCubeRenderer.PositionHologram( // pointerState.TryGetPointerPose(currentCoordinateSystem) SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp) ); } //var downstate = Windows.UI.Core.CoreVirtualKeyStates.Down; //bool rightclick = (Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.Escape) & downstate) == downstate; //System.Diagnostics.Debug.WriteLine("Windows.System.VirtualKey.Escape " + Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.Escape).ToString() + " downstate" + downstate); //System.Diagnostics.Debug.WriteLine("Windows.System.VirtualKey.A " + Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.A).ToString() + " downstate" + downstate); //if (rightclick) //{ // Windows.UI.ViewManagement.ApplicationViewSwitcher.SwitchAsync(VideoGallery.mainId, VideoGallery.appId, Windows.UI.ViewManagement.ApplicationViewSwitchingOptions.ConsolidateViews); //} #endif timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // #if DRAW_SAMPLE_CONTENT spinningCubeRenderer.Update(timer); #endif }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { #if DRAW_SAMPLE_CONTENT // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. Vector3 position = new Vector3(0.0f, 0.0f, -3.0f); renderingParameters.SetFocusPoint( currentCoordinateSystem, position /*spinningCubeRenderer.Position*/ ); #endif } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Update the spatial input state /// </summary> /// <param name="args">The argument</param> private void UpdatedState(SpatialInteractionSourceEventArgs args) { var coordinateSystem = this.CoordinateSystem; this.currentSpatialState.IsDetected = true; this.currentSpatialState.IsSelected = args.State.IsPressed; SpatialInteractionSource source = args.State.Source; this.currentSpatialState.Kind = (SpatialSource)source.Kind; this.currentSpatialState.SourceLossRisk = (float)args.State.Properties.SourceLossRisk; if (this.isMixedRealityControllerAvailable && source.Kind == SpatialInteractionSourceKind.Controller) { SpatialInteractionSourceState sourceState = args.State; SpatialInteractionControllerProperties controllerProperties = sourceState.ControllerProperties; this.currentSpatialState.Handedness = (SpatialInteractionHandedness)source.Handedness; // Buttons this.currentSpatialState.IsThumbstickPressed = controllerProperties.IsThumbstickPressed; this.currentSpatialState.IsTouchpadPressed = controllerProperties.IsTouchpadPressed; this.currentSpatialState.IsTouchpadTouched = controllerProperties.IsTouchpadTouched; this.currentSpatialState.IsGraspPressed = sourceState.IsGrasped; this.currentSpatialState.IsMenuPressed = sourceState.IsMenuPressed; // Trigger this.currentSpatialState.IsSelectTriggerPressed = sourceState.IsSelectPressed; this.currentSpatialState.SelectTriggerValue = (float)sourceState.SelectPressedValue; // Thumbstick Vector2 thumbstickValues = this.currentSpatialState.Thumbstick; thumbstickValues.X = (float)controllerProperties.ThumbstickX; thumbstickValues.Y = (float)controllerProperties.ThumbstickY; this.currentSpatialState.Thumbstick = thumbstickValues; // Touchpad Vector2 touchpadValues = this.currentSpatialState.Touchpad; touchpadValues.X = (float)controllerProperties.TouchpadX; touchpadValues.Y = (float)controllerProperties.TouchpadY; this.currentSpatialState.Touchpad = touchpadValues; // Tip of the controller SpatialPointerInteractionSourcePose pointer = sourceState.TryGetPointerPose(this.CoordinateSystem)?.TryGetInteractionSourcePose(source); if (pointer != null) { pointer.Position.ToWave(out this.currentSpatialState.TipControllerPosition); pointer.ForwardDirection.ToWave(out this.currentSpatialState.TipControllerForward); } } var location = args.State.Properties.TryGetLocation(coordinateSystem); if (location != null) { if (location.Position.HasValue) { location.Position.Value.ToWave(out this.currentSpatialState.Position); } if (location.Orientation.HasValue) { location.Orientation.Value.ToWave(out this.currentSpatialState.Orientation); } if (location.Velocity.HasValue) { location.Velocity.Value.ToWave(out this.currentSpatialState.Velocity); } } this.UpdateGenericController(); }
private void UpdateMenuData(SpatialInteractionSourceState spatialInteractionSourceState, MixedRealityInteractionMapping interactionMapping) => interactionMapping.BoolData = spatialInteractionSourceState.IsMenuPressed;
/// <summary> /// Update the controller data from the provided platform state /// </summary> /// <param name="interactionSourceState">The InteractionSourceState retrieved from the platform</param> public void UpdateController(SpatialInteractionSourceState interactionSourceState) { if (!Enabled) { return; } base.UpdateController(); UpdateControllerData(interactionSourceState); if (Interactions == null) { Debug.LogError($"No interaction configuration for {GetType().Name} {ControllerHandedness}"); Enabled = false; } for (int i = 0; i < Interactions?.Length; i++) { var interactionMapping = Interactions[i]; switch (interactionMapping.InputType) { case DeviceInputType.None: break; case DeviceInputType.SpatialPointer: UpdatePointerData(interactionSourceState, interactionMapping); break; case DeviceInputType.Select: case DeviceInputType.Trigger: case DeviceInputType.TriggerTouch: case DeviceInputType.TriggerPress: UpdateTriggerData(interactionSourceState, interactionMapping); break; case DeviceInputType.SpatialGrip: UpdateGripData(interactionSourceState, interactionMapping); break; case DeviceInputType.ThumbStick: case DeviceInputType.ThumbStickPress: UpdateThumbStickData(interactionSourceState, interactionMapping); break; case DeviceInputType.Touchpad: case DeviceInputType.TouchpadTouch: case DeviceInputType.TouchpadPress: UpdateTouchPadData(interactionSourceState, interactionMapping); break; case DeviceInputType.Menu: UpdateMenuData(interactionSourceState, interactionMapping); break; default: Debug.LogError($"Input [{interactionMapping.Description}.{interactionMapping.InputType}] is not handled for this controller [{GetType().Name}]"); Enabled = false; break; } interactionMapping.RaiseInputAction(InputSource, ControllerHandedness); } LastSourceStateReading = interactionSourceState; }
/// <summary> /// Attempts to get updated hand mesh data. /// </summary> /// <param name="spatialInteractionSourceState">Platform provided current input source state for the hand.</param> /// <param name="handPose">Hand pose information retrieved for joint conversion.</param> /// <param name="data">Mesh information retrieved in case of success.</param> /// <returns>True, if mesh data could be loaded.</returns> private bool TryGetUpdatedHandMeshData(SpatialInteractionSourceState spatialInteractionSourceState, HandPose handPose, out HandMeshData data) { if (!handMeshObservers.ContainsKey(spatialInteractionSourceState.Source.Handedness) && !HasRequestedHandMeshObserver(spatialInteractionSourceState.Source.Handedness)) { SetHandMeshObserver(spatialInteractionSourceState); } if (handMeshObservers.TryGetValue(spatialInteractionSourceState.Source.Handedness, out var handMeshObserver) && handMeshTriangleIndices == null) { var indexCount = handMeshObserver.TriangleIndexCount; var indices = new ushort[indexCount]; handMeshObserver.GetTriangleIndices(indices); handMeshTriangleIndices = new int[indexCount]; Array.Copy(indices, handMeshTriangleIndices, (int)handMeshObserver.TriangleIndexCount); // Compute neutral pose var neutralPoseVertices = new Vector3[handMeshObserver.VertexCount]; var neutralPose = handMeshObserver.NeutralPose; var vertexAndNormals = new HandMeshVertex[handMeshObserver.VertexCount]; var handMeshVertexState = handMeshObserver.GetVertexStateForPose(neutralPose); handMeshVertexState.GetVertices(vertexAndNormals); for (int i = 0; i < handMeshObserver.VertexCount; i++) { neutralPoseVertices[i] = vertexAndNormals[i].Position.ToUnity(); } // Compute UV mapping InitializeHandMeshUVs(neutralPoseVertices); } if (handMeshObserver != null && handMeshTriangleIndices != null) { var vertexAndNormals = new HandMeshVertex[handMeshObserver.VertexCount]; var handMeshVertexState = handMeshObserver.GetVertexStateForPose(handPose); handMeshVertexState.GetVertices(vertexAndNormals); var meshTransform = handMeshVertexState.CoordinateSystem.TryGetTransformTo(spatialCoordinateSystem); if (meshTransform.HasValue) { System.Numerics.Matrix4x4.Decompose(meshTransform.Value, out var scale, out var rotation, out var translation); var handMeshVertices = new Vector3[handMeshObserver.VertexCount]; var handMeshNormals = new Vector3[handMeshObserver.VertexCount]; for (int i = 0; i < handMeshObserver.VertexCount; i++) { handMeshVertices[i] = vertexAndNormals[i].Position.ToUnity(); handMeshNormals[i] = vertexAndNormals[i].Normal.ToUnity(); } data = new HandMeshData( handMeshVertices, handMeshTriangleIndices, handMeshNormals, handMeshUVs); return(true); } } return(false); }
private async void SetHandMeshObserver(SpatialInteractionSourceState sourceState) { handMeshObserver = await sourceState.Source.TryCreateHandMeshObserverAsync(); }
public void UpdateHandMesh(SpatialInteractionSourceState sourceState) { using (UpdateHandMeshPerfMarker.Auto()) { MixedRealityHandTrackingProfile handTrackingProfile = null; MixedRealityInputSystemProfile inputSystemProfile = CoreServices.InputSystem?.InputSystemProfile; if (inputSystemProfile != null) { handTrackingProfile = inputSystemProfile.HandTrackingProfile; } if (handTrackingProfile == null || !handTrackingProfile.EnableHandMeshVisualization) { // If hand mesh visualization is disabled make sure to destroy our hand mesh observer if it has already been created if (handMeshObserver != null) { // Notify that hand mesh has been updated (cleared) HandMeshInfo handMeshInfo = new HandMeshInfo(); CoreServices.InputSystem?.RaiseHandMeshUpdated(InputSource, Handedness, handMeshInfo); hasRequestedHandMeshObserver = false; handMeshObserver = null; } return; } HandPose handPose = sourceState.TryGetHandPose(); // Accessing the hand mesh data involves copying quite a bit of data, so only do it if application requests it. if (handMeshObserver == null && !hasRequestedHandMeshObserver) { SetHandMeshObserver(sourceState); hasRequestedHandMeshObserver = true; } if (handMeshObserver != null && handPose != null) { if (handMeshTriangleIndices == null) { handMeshTriangleIndices = new ushort[handMeshObserver.TriangleIndexCount]; handMeshTriangleIndicesUnity = new int[handMeshObserver.TriangleIndexCount]; handMeshObserver.GetTriangleIndices(handMeshTriangleIndices); Array.Copy(handMeshTriangleIndices, handMeshTriangleIndicesUnity, (int)handMeshObserver.TriangleIndexCount); // Compute neutral pose Vector3[] neutralPoseVertices = new Vector3[handMeshObserver.VertexCount]; HandPose neutralPose = handMeshObserver.NeutralPose; var neutralVertexAndNormals = new HandMeshVertex[handMeshObserver.VertexCount]; HandMeshVertexState handMeshVertexState = handMeshObserver.GetVertexStateForPose(neutralPose); handMeshVertexState.GetVertices(neutralVertexAndNormals); Parallel.For(0, handMeshObserver.VertexCount, i => { neutralVertexAndNormals[i].Position.ConvertToUnityVector3(ref neutralPoseVertices[i]); }); // Compute UV mapping InitializeUVs(neutralPoseVertices); } if (vertexAndNormals == null) { vertexAndNormals = new HandMeshVertex[handMeshObserver.VertexCount]; handMeshVerticesUnity = new Vector3[handMeshObserver.VertexCount]; handMeshNormalsUnity = new Vector3[handMeshObserver.VertexCount]; } if (vertexAndNormals != null && handMeshTriangleIndices != null) { var handMeshVertexState = handMeshObserver.GetVertexStateForPose(handPose); handMeshVertexState.GetVertices(vertexAndNormals); var meshTransform = handMeshVertexState.CoordinateSystem.TryGetTransformTo(WindowsMixedRealityUtilities.SpatialCoordinateSystem); if (meshTransform.HasValue) { System.Numerics.Matrix4x4.Decompose(meshTransform.Value, out System.Numerics.Vector3 scale, out System.Numerics.Quaternion rotation, out System.Numerics.Vector3 translation); Parallel.For(0, handMeshObserver.VertexCount, i => { vertexAndNormals[i].Position.ConvertToUnityVector3(ref handMeshVerticesUnity[i]); vertexAndNormals[i].Normal.ConvertToUnityVector3(ref handMeshNormalsUnity[i]); }); /// Hands should follow the Playspace to accommodate teleporting, so fold in the Playspace transform. Vector3 positionUnity = MixedRealityPlayspace.TransformPoint(translation.ToUnityVector3()); Quaternion rotationUnity = MixedRealityPlayspace.Rotation * rotation.ToUnityQuaternion(); HandMeshInfo handMeshInfo = new HandMeshInfo { vertices = handMeshVerticesUnity, normals = handMeshNormalsUnity, triangles = handMeshTriangleIndicesUnity, uvs = handMeshUVsUnity, position = positionUnity, rotation = rotationUnity }; CoreServices.InputSystem?.RaiseHandMeshUpdated(InputSource, Handedness, handMeshInfo); } } } } }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem referenceFrameCoordinateSystem = attachedReferenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp); var previousNode = currentNode; currentNode = UpdateCurrentNode(referenceFrameCoordinateSystem, prediction.Timestamp); if (currentNode != previousNode) { SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(referenceFrameCoordinateSystem, prediction.Timestamp); } if (targetNode != null) { RebuildTrailToTarget(referenceFrameCoordinateSystem, prediction.Timestamp, currentNode, targetNode); } SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); if (null != pointerState) { Debug.WriteLine($"Setting target {nodes[1].Name}"); targetNode = nodes[1]; } timer.Tick(() => { if (currentNode != previousNode) { dwellTimeAtCurrentNode = 0; } else { dwellTimeAtCurrentNode += timer.ElapsedSeconds; } for (var entityIndex = entities.Count - 1; entityIndex >= 0; entityIndex--) { var entity = entities[entityIndex]; // update rotation of previous one if (entityIndex != entities.Count - 1) { var previousEntity = entities[entityIndex + 1]; var previousEntityPosition = previousEntity.Node.TryGetTransformedPosition(referenceFrameCoordinateSystem); var currentEntityPosition = entity.Node.TryGetTransformedPosition(referenceFrameCoordinateSystem); if (previousEntityPosition.HasValue && currentEntityPosition.HasValue) { var tV = previousEntityPosition.Value; var sV = currentEntityPosition.Value; tV.Y = sV.Y = 0; var diff = sV - tV; var yAngle = Math.Atan2(diff.X, diff.Z); entity.EulerAngles = new Vector3(0, (float)(yAngle * (180 / Math.PI)), 0); } } entity.Update(timer, referenceFrameCoordinateSystem); } }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. //if (spinningCubeSpatialAnchor != null) //{ // //renderingParameters.SetFocusPoint( // //spinningCubeSpatialAnchor.CoordinateSystem, // //spinningCubeRenderer.Position // //); //} //else //{ // //renderingParameters.SetFocusPoint( // //currentCoordinateSystem, // //spinningCubeRenderer.Position // //); //} } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the current hand mesh based on the passed in state of the hand. /// </summary> /// <param name="sourceState">The current hand state.</param> public void UpdateHandMesh(SpatialInteractionSourceState sourceState) { MixedRealityHandTrackingProfile handTrackingProfile = null; MixedRealityInputSystemProfile inputSystemProfile = CoreServices.InputSystem?.InputSystemProfile; if (inputSystemProfile != null) { handTrackingProfile = inputSystemProfile.HandTrackingProfile; } if (handTrackingProfile == null || !handTrackingProfile.EnableHandMeshVisualization) { // If hand mesh visualization is disabled make sure to destroy our hand mesh observer if it has already been created if (handMeshObserver != null) { // Notify that hand mesh has been updated (cleared) HandMeshInfo handMeshInfo = new HandMeshInfo(); CoreServices.InputSystem?.RaiseHandMeshUpdated(inputSource, handedness, handMeshInfo); hasRequestedHandMeshObserver = false; handMeshObserver = null; } return; } HandPose handPose = sourceState.TryGetHandPose(); // Accessing the hand mesh data involves copying quite a bit of data, so only do it if application requests it. if (handMeshObserver == null && !hasRequestedHandMeshObserver) { SetHandMeshObserver(sourceState); hasRequestedHandMeshObserver = true; } if (handMeshObserver != null && handMeshTriangleIndices == null) { uint indexCount = handMeshObserver.TriangleIndexCount; ushort[] indices = new ushort[indexCount]; handMeshObserver.GetTriangleIndices(indices); handMeshTriangleIndices = new int[indexCount]; Array.Copy(indices, handMeshTriangleIndices, (int)handMeshObserver.TriangleIndexCount); // Compute neutral pose Vector3[] neutralPoseVertices = new Vector3[handMeshObserver.VertexCount]; HandPose neutralPose = handMeshObserver.NeutralPose; var vertexAndNormals = new HandMeshVertex[handMeshObserver.VertexCount]; HandMeshVertexState handMeshVertexState = handMeshObserver.GetVertexStateForPose(neutralPose); handMeshVertexState.GetVertices(vertexAndNormals); for (int i = 0; i < handMeshObserver.VertexCount; i++) { neutralPoseVertices[i] = vertexAndNormals[i].Position.ToUnityVector3(); } // Compute UV mapping InitializeUVs(neutralPoseVertices); } if (handPose != null && handMeshObserver != null && handMeshTriangleIndices != null) { var vertexAndNormals = new HandMeshVertex[handMeshObserver.VertexCount]; var handMeshVertexState = handMeshObserver.GetVertexStateForPose(handPose); handMeshVertexState.GetVertices(vertexAndNormals); var meshTransform = handMeshVertexState.CoordinateSystem.TryGetTransformTo(WindowsMixedRealityUtilities.SpatialCoordinateSystem); if (meshTransform.HasValue) { System.Numerics.Vector3 scale; System.Numerics.Quaternion rotation; System.Numerics.Vector3 translation; System.Numerics.Matrix4x4.Decompose(meshTransform.Value, out scale, out rotation, out translation); var handMeshVertices = new Vector3[handMeshObserver.VertexCount]; var handMeshNormals = new Vector3[handMeshObserver.VertexCount]; for (int i = 0; i < handMeshObserver.VertexCount; i++) { handMeshVertices[i] = vertexAndNormals[i].Position.ToUnityVector3(); handMeshNormals[i] = vertexAndNormals[i].Normal.ToUnityVector3(); } HandMeshInfo handMeshInfo = new HandMeshInfo { vertices = handMeshVertices, normals = handMeshNormals, triangles = handMeshTriangleIndices, uvs = handMeshUVs, position = translation.ToUnityVector3(), rotation = rotation.ToUnityQuaternion() }; CoreServices.InputSystem?.RaiseHandMeshUpdated(inputSource, handedness, handMeshInfo); } } }
private void UpdateControllerData(SpatialInteractionSourceState spatialInteractionSourceState) { var lastState = TrackingState; var sourceKind = spatialInteractionSourceState.Source.Kind; lastControllerPose = currentControllerPose; if (sourceKind == SpatialInteractionSourceKind.Hand || (sourceKind == SpatialInteractionSourceKind.Controller && spatialInteractionSourceState.Source.IsPointingSupported)) { // The source is either a hand or a controller that supports pointing. // We can now check for position and rotation. var spatialInteractionSourceLocation = spatialInteractionSourceState.Properties.TryGetLocation(WindowsMixedRealityUtilities.SpatialCoordinateSystem); IsPositionAvailable = spatialInteractionSourceLocation != null && spatialInteractionSourceLocation.Position.HasValue; if (IsPositionAvailable) { currentControllerPosition = spatialInteractionSourceLocation.Position.Value.ToUnity(); IsPositionApproximate = (spatialInteractionSourceLocation.PositionAccuracy == SpatialInteractionSourcePositionAccuracy.Approximate); } else { IsPositionApproximate = false; } IsRotationAvailable = spatialInteractionSourceLocation != null && spatialInteractionSourceLocation.Orientation.HasValue; if (IsRotationAvailable) { currentControllerRotation = spatialInteractionSourceLocation.Orientation.Value.ToUnity(); } // Devices are considered tracked if we receive position OR rotation data from the sensors. TrackingState = (IsPositionAvailable || IsRotationAvailable) ? TrackingState.Tracked : TrackingState.NotTracked; } else { // The input source does not support tracking. TrackingState = TrackingState.NotApplicable; } currentControllerPose.Position = currentControllerPosition; currentControllerPose.Rotation = currentControllerRotation; // Raise input system events if it is enabled. if (lastState != TrackingState) { InputSystem?.RaiseSourceTrackingStateChanged(InputSource, this, TrackingState); } if (TrackingState == TrackingState.Tracked && lastControllerPose != currentControllerPose) { if (IsPositionAvailable && IsRotationAvailable) { InputSystem?.RaiseSourcePoseChanged(InputSource, this, currentControllerPose); } else if (IsPositionAvailable && !IsRotationAvailable) { InputSystem?.RaiseSourcePositionChanged(InputSource, this, currentControllerPosition); } else if (!IsPositionAvailable && IsRotationAvailable) { InputSystem?.RaiseSourceRotationChanged(InputSource, this, currentControllerRotation); } } }