/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem; timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem; SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp); ProcessFrame(currentCoordinateSystem); if (Utils.GetCurrentUnixTimestampMillis() - lastFaceDetectedTimestamp > faceTimeThreshold) { if(pose != null) { var headPosition = pose.Head.Position; var headForward = pose.Head.ForwardDirection; quadRenderer.TargetPosition = headPosition + (2.0f * headForward); } textRenderer.RenderTextOffscreen("No faces detected"); } timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // quadRenderer.Update(pose, timer); }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. if(Utils.GetCurrentUnixTimestampMillis() - lastFaceDetectedTimestamp <= faceTimeThreshold) { renderingParameters.SetFocusPoint( currentCoordinateSystem, quadRenderer.Position, quadRenderer.Forward, quadRenderer.Velocity ); } } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return holographicFrame; }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); #if DRAW_SAMPLE_CONTENT if (stationaryReferenceFrame != null) { // Check for new input state since the last frame. for (int i = 0; i < gamepads.Count; ++i) { bool buttonDownThisUpdate = (gamepads[i].gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A; if (buttonDownThisUpdate && !gamepads[i].buttonAWasPressedLastFrame) { pointerPressed = true; } gamepads[i].buttonAWasPressedLastFrame = buttonDownThisUpdate; } SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); SpatialPointerPose pose = null; if (null != pointerState) { pose = pointerState.TryGetPointerPose(stationaryReferenceFrame.CoordinateSystem); } else if (pointerPressed) { pose = SpatialPointerPose.TryGetAtTimestamp(stationaryReferenceFrame.CoordinateSystem, prediction.Timestamp); } pointerPressed = false; // When a Pressed gesture is detected, the sample hologram will be repositioned // two meters in front of the user. spinningCubeRenderer.PositionHologram(pose); } #endif timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // #if DRAW_SAMPLE_CONTENT spinningCubeRenderer.Update(timer); #endif }); if (!canCommitDirect3D11DepthBuffer) { // On versions of the platform that do not support the CommitDirect3D11DepthBuffer API, we can control // image stabilization by setting a focus point with optional plane normal and velocity. foreach (var cameraPose in prediction.CameraPoses) { #if DRAW_SAMPLE_CONTENT // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. When setting the focus point, put it on or // near content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram. // You can also set the relative velocity and facing of the stabilization // plane using overloads of this method. if (stationaryReferenceFrame != null) { renderingParameters.SetFocusPoint( stationaryReferenceFrame.CoordinateSystem, spinningCubeRenderer.Position ); } #endif } } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { if (MediaPlayer.IsEndOfStream) { ViewManagement.SwitchTo2DViewAsync(); } // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem; var gestureStatus = spatialInputHandler.CheckGestureStatus(); switch (gestureStatus) { case SpatialGestureSettings.None: break; case SpatialGestureSettings.Tap: videoRenderer.Pause(); break; case SpatialGestureSettings.DoubleTap: videoRenderer.Stop(); ViewManagement.SwitchTo2DViewAsync(); break; case SpatialGestureSettings.Hold: videoRenderer.FastForward(spatialInputHandler.HoldTotalTime.TotalSeconds); break; case SpatialGestureSettings.ManipulationTranslate: break; case SpatialGestureSettings.NavigationX: break; case SpatialGestureSettings.NavigationY: break; case SpatialGestureSettings.NavigationZ: break; case SpatialGestureSettings.NavigationRailsX: break; case SpatialGestureSettings.NavigationRailsY: break; case SpatialGestureSettings.NavigationRailsZ: break; default: break; } timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // videoRenderer.Update(timer); }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. renderingParameters.SetFocusPoint( currentCoordinateSystem, videoRenderer.Position ); } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem; #if DRAW_SAMPLE_CONTENT // Check for new input state since the last frame. SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); if (null != pointerState) { // When a Pressed gesture is detected, the sample hologram will be repositioned // two meters in front of the user. spinningCubeRenderer.PositionHologram( pointerState.TryGetPointerPose(currentCoordinateSystem) ); } #endif timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // #if DRAW_SAMPLE_CONTENT spinningCubeRenderer.Update(timer); _spatialSurfaceRenderer.Update(); #endif }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { #if DRAW_SAMPLE_CONTENT // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. renderingParameters.SetFocusPoint( currentCoordinateSystem, spinningCubeRenderer.Position ); #endif } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update(HolographicFrame previousFrame) { // TODO: Put CPU work that does not depend on the HolographicCameraPose here. // Apps should wait for the optimal time to begin pose-dependent work. // The platform will automatically adjust the wakeup time to get // the lowest possible latency at high frame rates. For manual // control over latency, use the WaitForNextFrameReadyWithHeadStart // API. // WaitForNextFrameReady and WaitForNextFrameReadyWithHeadStart are the // preferred frame synchronization APIs for Windows Mixed Reality. When // running on older versions of the OS that do not include support for // these APIs, your app can use the WaitForFrameToFinish API for similar // (but not as optimal) behavior. if (canUseWaitForNextFrameReadyAPI) { try { holographicSpace.WaitForNextFrameReady(); } catch (NotImplementedException) { // Catch a specific case where WaitForNextFrameReady() is present but not implemented // and default back to WaitForFrameToFinish() in that case. canUseWaitForNextFrameReadyAPI = false; } } else if (previousFrame != null) { previousFrame.WaitForFrameToFinish(); } // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); #if DRAW_SAMPLE_CONTENT if (stationaryReferenceFrame != null) { // Check for new input state since the last frame. for (int i = 0; i < gamepads.Count; ++i) { bool buttonDownThisUpdate = (gamepads[i].gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A; if (buttonDownThisUpdate && !gamepads[i].buttonAWasPressedLastFrame) { pointerPressed = true; } gamepads[i].buttonAWasPressedLastFrame = buttonDownThisUpdate; } SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); SpatialPointerPose pose = null; if (null != pointerState) { pose = pointerState.TryGetPointerPose(stationaryReferenceFrame.CoordinateSystem); } else if (pointerPressed) { pose = SpatialPointerPose.TryGetAtTimestamp(stationaryReferenceFrame.CoordinateSystem, prediction.Timestamp); } pointerPressed = false; // When a Pressed gesture is detected, the sample hologram will be repositioned // two meters in front of the user. quadRendererR.PositionHologram(pose); quadRendererL.PositionHologram(pose); } #endif timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // #if DRAW_SAMPLE_CONTENT quadRendererR.Update(timer); quadRendererL.Update(timer); #endif }); // On HoloLens 2, the platform can achieve better image stabilization results if it has // a stabilization plane and a depth buffer. // Note that the SetFocusPoint API includes an override which takes velocity as a // parameter. This is recommended for stabilizing holograms in motion. foreach (var cameraPose in prediction.CameraPoses) { #if DRAW_SAMPLE_CONTENT // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. When setting the focus point, put it on or // near content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram. // You can also set the relative velocity and facing of the stabilization // plane using overloads of this method. if (stationaryReferenceFrame != null) { renderingParameters.SetFocusPoint( stationaryReferenceFrame.CoordinateSystem, new System.Numerics.Vector3(0, 0, 0) ); } #endif } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = attachreferenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp);//referenceFrame.CoordinateSystem; #if DRAW_SAMPLE_CONTENT // Check for new input state since the last frame. SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); if (null != pointerState) { // When a Pressed gesture is detected, the sample hologram will be repositioned // two meters in front of the user. spinningCubeRenderer.PositionHologram( // pointerState.TryGetPointerPose(currentCoordinateSystem) SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp) ); } //var downstate = Windows.UI.Core.CoreVirtualKeyStates.Down; //bool rightclick = (Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.Escape) & downstate) == downstate; //System.Diagnostics.Debug.WriteLine("Windows.System.VirtualKey.Escape " + Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.Escape).ToString() + " downstate" + downstate); //System.Diagnostics.Debug.WriteLine("Windows.System.VirtualKey.A " + Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.A).ToString() + " downstate" + downstate); //if (rightclick) //{ // Windows.UI.ViewManagement.ApplicationViewSwitcher.SwitchAsync(VideoGallery.mainId, VideoGallery.appId, Windows.UI.ViewManagement.ApplicationViewSwitchingOptions.ConsolidateViews); //} #endif timer.Tick(() => { // // TODO: Update scene objects. // // Put time-based updates here. By default this code will run once per frame, // but if you change the StepTimer to use a fixed time step this code will // run as many times as needed to get to the current step. // #if DRAW_SAMPLE_CONTENT spinningCubeRenderer.Update(timer); #endif }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { #if DRAW_SAMPLE_CONTENT // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. Vector3 position = new Vector3(0.0f, 0.0f, -3.0f); renderingParameters.SetFocusPoint( currentCoordinateSystem, position /*spinningCubeRenderer.Position*/ ); #endif } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem referenceFrameCoordinateSystem = referenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp); // remember where we were (changed if the CurrentNode != previousNode) var previousNode = CurrentNode; // update current node the user resides in CurrentNode = UpdateCurrentNode(referenceFrameCoordinateSystem, prediction.Timestamp, NodeRadius); // .. and current gaze SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(referenceFrameCoordinateSystem, prediction.Timestamp); NodePosition = pose.Head.Position; GazeForward = pose.Head.ForwardDirection; GazeUp = pose.Head.UpDirection; var mat = referenceFrameCoordinateSystem.TryGetTransformTo(CurrentNode.Anchor.CoordinateSystem); if (mat.HasValue) { NodePosition = Vector3.Transform(NodePosition, mat.Value); GazeForward = Vector3.TransformNormal(GazeForward, mat.Value); GazeUp = Vector3.TransformNormal(GazeUp, mat.Value); } if (!string.IsNullOrEmpty(requestedSightingTerm)) { var candidates = FindClosestNodesWithSightedItem(referenceFrameCoordinateSystem, pose, requestedSightingTerm); if (candidates != null && candidates.Count > 0) { targetNode = candidates[0]; targetSighting = candidates[0].Sightings.Where(sighting => sighting.Tokens.Any(token => token.Equals(requestedSightingTerm, StringComparison.OrdinalIgnoreCase))).First(); } requestedSightingTerm = string.Empty; } // currently at position if (CurrentNode == targetNode) { if (dwellTimeAtCurrentNode >= 5) { targetNode = null; targetSighting = null; entities.Clear(); Debug.WriteLine("Well done! Assisted the user find their item"); } } if (targetNode != null) { RebuildTrailToTarget(referenceFrameCoordinateSystem, prediction.Timestamp, CurrentNode, targetNode); } ProcessNextFrame(); timer.Tick(() => { dwellTimeAtCurrentNode += timer.ElapsedSeconds; for (var entityIndex = 0; entityIndex < entities.Count; entityIndex++) { var entity = entities[entityIndex]; entity.Update(timer, referenceFrameCoordinateSystem); } }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem referenceFrameCoordinateSystem = attachedReferenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp); var previousNode = currentNode; currentNode = UpdateCurrentNode(referenceFrameCoordinateSystem, prediction.Timestamp); if (currentNode != previousNode) { SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(referenceFrameCoordinateSystem, prediction.Timestamp); } if (targetNode != null) { RebuildTrailToTarget(referenceFrameCoordinateSystem, prediction.Timestamp, currentNode, targetNode); } SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); if (null != pointerState) { Debug.WriteLine($"Setting target {nodes[1].Name}"); targetNode = nodes[1]; } timer.Tick(() => { if (currentNode != previousNode) { dwellTimeAtCurrentNode = 0; } else { dwellTimeAtCurrentNode += timer.ElapsedSeconds; } for (var entityIndex = entities.Count - 1; entityIndex >= 0; entityIndex--) { var entity = entities[entityIndex]; // update rotation of previous one if (entityIndex != entities.Count - 1) { var previousEntity = entities[entityIndex + 1]; var previousEntityPosition = previousEntity.Node.TryGetTransformedPosition(referenceFrameCoordinateSystem); var currentEntityPosition = entity.Node.TryGetTransformedPosition(referenceFrameCoordinateSystem); if (previousEntityPosition.HasValue && currentEntityPosition.HasValue) { var tV = previousEntityPosition.Value; var sV = currentEntityPosition.Value; tV.Y = sV.Y = 0; var diff = sV - tV; var yAngle = Math.Atan2(diff.X, diff.Z); entity.EulerAngles = new Vector3(0, (float)(yAngle * (180 / Math.PI)), 0); } } entity.Update(timer, referenceFrameCoordinateSystem); } }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. //if (spinningCubeSpatialAnchor != null) //{ // //renderingParameters.SetFocusPoint( // //spinningCubeSpatialAnchor.CoordinateSystem, // //spinningCubeRenderer.Position // //); //} //else //{ // //renderingParameters.SetFocusPoint( // //currentCoordinateSystem, // //spinningCubeRenderer.Position // //); //} } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }
/// <summary> /// Updates the application state once per frame. /// </summary> public HolographicFrame Update() { // Before doing the timer update, there is some work to do per-frame // to maintain holographic rendering. First, we will get information // about the current frame. // The HolographicFrame has information that the app needs in order // to update and render the current frame. The app begins each new // frame by calling CreateNextFrame. HolographicFrame holographicFrame = holographicSpace.CreateNextFrame(); // Get a prediction of where holographic cameras will be when this frame // is presented. HolographicFramePrediction prediction = holographicFrame.CurrentPrediction; // Back buffers can change from frame to frame. Validate each buffer, and recreate // resource views and depth buffers as needed. deviceResources.EnsureCameraResources(holographicFrame, prediction); // Next, we get a coordinate system from the attached frame of reference that is // associated with the current frame. Later, this coordinate system is used for // for creating the stereo view matrices when rendering the sample content. SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem; // Check for new input state since the last frame. //foreach (var gamepad in gamepads) //{ // pointerPressed |= ((gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A); //} //SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput(); //SpatialPointerPose pose = null; //if (null != pointerState) //{ // pose = pointerState.TryGetPointerPose(currentCoordinateSystem); //} //else if (pointerPressed) //{ // pose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp); //} //pointerPressed = false; //if (null != pose) //{ // //var angle = Angle(pose.Head.ForwardDirection, new Vector3(0.0f, 0.0f, -1.0f), new Vector3(0.0f, 1.0f, 0.0f)); // //var rotator = Matrix4x4.CreateRotationY(-angle); // //var mover = Matrix4x4.CreateTranslation(pose.Head.Position); // //var transformer = rotator * mover; //} mutex.WaitOne(); var key = mainView.VirtualKey; var count = mainView.KeyCount; mainView.KeyCount = 0; mainView.VirtualKey = Windows.System.VirtualKey.None; mutex.ReleaseMutex(); if (key != Windows.System.VirtualKey.None && count > 0) { mainView.LastKey = key; mainView.OnKeyPressed(key); } timer1.Tick(() => { mainView.Update(timer1); }); timer2.Tick(() => { mainView.Update(SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp)); }); // We complete the frame update by using information about our content positioning // to set the focus point. foreach (var cameraPose in prediction.CameraPoses) { // The HolographicCameraRenderingParameters class provides access to set // the image stabilization parameters. HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose); // SetFocusPoint informs the system about a specific point in your scene to // prioritize for image stabilization. The focus point is set independently // for each holographic camera. // You should set the focus point near the content that the user is looking at. // In this example, we put the focus point at the center of the sample hologram, // since that is the only hologram available for the user to focus on. // You can also set the relative velocity and facing of that content; the sample // hologram is at a fixed point so we only need to indicate its position. if (mainView.Pointers[0] != null) { renderingParameters.SetFocusPoint(currentCoordinateSystem, mainView.Pointers[0].Position); } } // The holographic frame will be used to get up-to-date view and projection matrices and // to present the swap chain. return(holographicFrame); }