public override void Update()
        {
#if WINDOWS_UWP
            if (WindowsMixedRealityUtilities.SpatialCoordinateSystem == null || !WindowsApiChecker.UniversalApiContractV8_IsAvailable)
            {
                return;
            }

            SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
            if (pointerPose != null)
            {
                var eyes = pointerPose.Eyes;
                if ((eyes != null) && (eyes.Gaze.HasValue))
                {
                    Ray newGaze = new Ray(WindowsMixedRealityUtilities.SystemVector3ToUnity(eyes.Gaze.Value.Origin), WindowsMixedRealityUtilities.SystemVector3ToUnity(eyes.Gaze.Value.Direction));

                    if (SmoothEyeTracking)
                    {
                        newGaze = SmoothGaze(newGaze);
                    }

                    InputSystem?.EyeGazeProvider?.UpdateEyeGaze(this, newGaze, eyes.UpdateTimestamp.TargetTime.UtcDateTime);
                }
            }
#endif // WINDOWS_UWP
        }
Exemple #2
0
        /// <inheritdoc />
        public override void Update()
        {
#if (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP
            if (WindowsMixedRealityUtilities.SpatialCoordinateSystem == null || !eyesApiAvailable)
            {
                return;
            }

            SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
            if (pointerPose != null)
            {
                var eyes = pointerPose.Eyes;
                if (eyes != null)
                {
                    InputSystem?.EyeGazeProvider?.UpdateEyeTrackingStatus(this, eyes.IsCalibrationValid);

                    if (eyes.Gaze.HasValue)
                    {
                        Ray newGaze = new Ray(eyes.Gaze.Value.Origin.ToUnityVector3(), eyes.Gaze.Value.Direction.ToUnityVector3());

                        if (SmoothEyeTracking)
                        {
                            newGaze = SmoothGaze(newGaze);
                        }

                        InputSystem?.EyeGazeProvider?.UpdateEyeGaze(this, newGaze, eyes.UpdateTimestamp.TargetTime.UtcDateTime);
                    }
                }
            }
#endif // (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP
        }
Exemple #3
0
        public override void Update()
        {
#if UNITY_WSA
            if (WindowsMixedRealityUtilities.SpatialCoordinateSystem == null || typeof(SpatialPointerPose).GetProperty("Eyes") == null)
            {
                return;
            }

            SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
            if (pointerPose != null)
            {
                var eyes = pointerPose.Eyes;
                if (eyes != null)
                {
                    InputSystem?.EyeGazeProvider?.UpdateEyeTrackingStatus(this, eyes.IsCalibrationValid);

                    if (eyes.Gaze.HasValue)
                    {
                        Ray newGaze = new Ray(WindowsMixedRealityUtilities.SystemVector3ToUnity(eyes.Gaze.Value.Origin), WindowsMixedRealityUtilities.SystemVector3ToUnity(eyes.Gaze.Value.Direction));

                        if (SmoothEyeTracking)
                        {
                            newGaze = SmoothGaze(newGaze);
                        }

                        InputSystem?.EyeGazeProvider?.UpdateEyeGaze(this, newGaze, eyes.UpdateTimestamp.TargetTime.UtcDateTime);
                    }
                }
            }
#endif // UNITY_WSA
        }
Exemple #4
0
        private void sourceUpdate(SpatialInteractionManager manager, SpatialInteractionSourceEventArgs args)
        {
            SpatialCoordinateSystem          currentCoordinateSystem = referenceFrame.CoordinateSystem;
            SpatialInteractionSourceLocation pos = args.State.Properties.TryGetLocation(currentCoordinateSystem);

            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;
            // Get the gaze direction relative to the given coordinate system.
            Vector3            headPosition = (Vector3)pos.Position;
            SpatialPointerPose pose         = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp);

            SpatialInteractionSource source = args.State.Source;

            Vector3 headDirection = pose.Head.ForwardDirection;

            // The hologram is positioned two meters along the user's gaze direction.
            float   distanceFromUser = 0.1f; // meters
            Vector3 gazeAtTwoMeters  = headPosition + (distanceFromUser * headDirection);

            // This will be used as the translation component of the hologram's
            // model transform.
            this.position = gazeAtTwoMeters;
        }
Exemple #5
0
        /// <inheritdoc />
        public override void Update()
        {
            using (UpdatePerfMarker.Auto())
            {
                if (WindowsMixedRealityUtilities.SpatialCoordinateSystem == null || !eyesApiAvailable)
                {
                    return;
                }

                SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
                if (pointerPose != null)
                {
                    var eyes = pointerPose.Eyes;
                    if (eyes != null)
                    {
                        Service?.EyeGazeProvider?.UpdateEyeTrackingStatus(this, eyes.IsCalibrationValid);

                        if (eyes.Gaze.HasValue)
                        {
                            Ray newGaze = new Ray(eyes.Gaze.Value.Origin.ToUnityVector3(), eyes.Gaze.Value.Direction.ToUnityVector3());

                            if (SmoothEyeTracking)
                            {
                                newGaze = SmoothGaze(newGaze);
                            }

                            Service?.EyeGazeProvider?.UpdateEyeGaze(this, newGaze, eyes.UpdateTimestamp.TargetTime.UtcDateTime);
                        }
                    }
                }
            }
        }
Exemple #6
0
    // Update is called once per frame
    void Update()
    {
#if ENABLE_WINMD_SUPPORT
        if (!_isReadyToRender)
        {
            return;
        }

        // The HolographicFrame has information that the app needs in order
        // to update and render the current frame. The app begins each new
        // frame by calling CreateNextFrame.
        //HolographicFrame ^ holographicFrame = m_holographicSpace->CreateNextFrame();

        // Get a prediction of where holographic cameras will be when this frame
        // is presented.
        //HolographicFramePrediction prediction = holographicFrame->CurrentPrediction;

        IntPtr spatialCoordinateSystemPtr               = WorldManager.GetNativeISpatialCoordinateSystemPtr();
        SpatialCoordinateSystem unityWorldOrigin        = Marshal.GetObjectForIUnknown(spatialCoordinateSystemPtr) as SpatialCoordinateSystem;
        SpatialCoordinateSystem currentCoordinateSystem = unityWorldOrigin;

        _isTrackingFaces = _faceTrackerProcessor.IsTrackingFaces();

        if (_isTrackingFaces)
        {
            MediaFrameReference frame = _videoFrameProcessor.GetLatestFrame();
            if (frame == null)
            {
                return;
            }
            var faces = _faceTrackerProcessor.GetLatestFaces();
            ProcessFaces(faces, frame, currentCoordinateSystem);


            TimeSpan currentTimeStamp = frame.SystemRelativeTime.Value.Duration();
            if (currentTimeStamp > _previousFrameTimestamp)
            {
                // TODO: copy to texture
                _previousFrameTimestamp = frame.SystemRelativeTime.Value.Duration();
            }
        }

        SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
#endif
    }
        /// <inheritdoc />
        public override void Update()
        {
            // Override gaze before base.Update() updates the controllers
            if (mixedRealityGazeProviderHeadOverride != null && mixedRealityGazeProviderHeadOverride.UseHeadGazeOverride && WindowsMixedRealityUtilities.SpatialCoordinateSystem != null)
            {
                SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
                if (pointerPose != null)
                {
                    HeadPose head = pointerPose.Head;
                    if (head != null)
                    {
                        mixedRealityGazeProviderHeadOverride.OverrideHeadGaze(head.Position.ToUnityVector3(), head.ForwardDirection.ToUnityVector3());
                    }
                }
            }

            base.Update();
        }
        Node AddNode(SpatialAnchor anchor, PerceptionTimestamp perceptionTimestamp)
        {
            var position = Vector3.Zero;
            var forward  = Vector3.Zero;

            var anchorPose = SpatialPointerPose.TryGetAtTimestamp(anchor.CoordinateSystem, perceptionTimestamp);

            if (anchorPose != null)
            {
                position = anchorPose.Head.Position;
                forward  = anchorPose.Head.ForwardDirection;
            }

            var node = new Node(anchor, position, forward);

            nodes.Add(node);

            return(node);
        }
        /// <inheritdoc/>
        public override void Update()
        {
            Profiler.BeginSample("[MRTK] WindowsMixedRealityDeviceManager.Update");

            base.Update();

#if (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP
            if (mixedRealityGazeProviderHeadOverride != null && mixedRealityGazeProviderHeadOverride.UseHeadGazeOverride)
            {
                SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
                if (pointerPose != null)
                {
                    HeadPose head = pointerPose.Head;
                    if (head != null)
                    {
                        mixedRealityGazeProviderHeadOverride.OverrideHeadGaze(head.Position.ToUnityVector3(), head.ForwardDirection.ToUnityVector3());
                    }
                }
            }
#endif // (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP

            UpdateInteractionManagerReading();

            for (var i = 0; i < numInteractionManagerStates; i++)
            {
                // SourceDetected gets raised when a new controller is detected and, if previously present,
                // when OnEnable is called. Do not create a new controller here.
                var controller = GetOrAddController(interactionManagerStates[i].source, false);

                if (controller != null)
                {
                    controller.UpdateController(interactionManagerStates[i]);
                }
            }

            LastInteractionManagerStateReading = interactionManagerStates;

            Profiler.EndSample(); // Update
        }
Exemple #10
0
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update(HolographicFrame previousFrame)
        {
            // TODO: Put CPU work that does not depend on the HolographicCameraPose here.

            // Apps should wait for the optimal time to begin pose-dependent work.
            // The platform will automatically adjust the wakeup time to get
            // the lowest possible latency at high frame rates. For manual
            // control over latency, use the WaitForNextFrameReadyWithHeadStart
            // API.
            // WaitForNextFrameReady and WaitForNextFrameReadyWithHeadStart are the
            // preferred frame synchronization APIs for Windows Mixed Reality. When
            // running on older versions of the OS that do not include support for
            // these APIs, your app can use the WaitForFrameToFinish API for similar
            // (but not as optimal) behavior.
            if (canUseWaitForNextFrameReadyAPI)
            {
                try
                {
                    holographicSpace.WaitForNextFrameReady();
                }
                catch (NotImplementedException)
                {
                    // Catch a specific case where WaitForNextFrameReady() is present but not implemented
                    // and default back to WaitForFrameToFinish() in that case.
                    canUseWaitForNextFrameReadyAPI = false;
                }
            }
            else if (previousFrame != null)
            {
                previousFrame.WaitForFrameToFinish();
            }

            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

#if DRAW_SAMPLE_CONTENT
            if (stationaryReferenceFrame != null)
            {
                // Check for new input state since the last frame.
                for (int i = 0; i < gamepads.Count; ++i)
                {
                    bool buttonDownThisUpdate = (gamepads[i].gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A;
                    if (buttonDownThisUpdate && !gamepads[i].buttonAWasPressedLastFrame)
                    {
                        pointerPressed = true;
                    }
                    gamepads[i].buttonAWasPressedLastFrame = buttonDownThisUpdate;
                }

                SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
                SpatialPointerPose            pose         = null;
                if (null != pointerState)
                {
                    pose = pointerState.TryGetPointerPose(stationaryReferenceFrame.CoordinateSystem);
                }
                else if (pointerPressed)
                {
                    pose = SpatialPointerPose.TryGetAtTimestamp(stationaryReferenceFrame.CoordinateSystem, prediction.Timestamp);
                }
                pointerPressed = false;

                // When a Pressed gesture is detected, the sample hologram will be repositioned
                // two meters in front of the user.
                quadRendererR.PositionHologram(pose);
                quadRendererL.PositionHologram(pose);
            }
#endif

            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //

#if DRAW_SAMPLE_CONTENT
                quadRendererR.Update(timer);
                quadRendererL.Update(timer);
#endif
            });

            // On HoloLens 2, the platform can achieve better image stabilization results if it has
            // a stabilization plane and a depth buffer.
            // Note that the SetFocusPoint API includes an override which takes velocity as a
            // parameter. This is recommended for stabilizing holograms in motion.
            foreach (var cameraPose in prediction.CameraPoses)
            {
#if DRAW_SAMPLE_CONTENT
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera. When setting the focus point, put it on or
                // near content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram.
                // You can also set the relative velocity and facing of the stabilization
                // plane using overloads of this method.
                if (stationaryReferenceFrame != null)
                {
                    renderingParameters.SetFocusPoint(
                        stationaryReferenceFrame.CoordinateSystem,
                        new System.Numerics.Vector3(0, 0, 0)
                        );
                }
#endif
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = attachreferenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp);//referenceFrame.CoordinateSystem;


#if DRAW_SAMPLE_CONTENT
            // Check for new input state since the last frame.
            SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
            if (null != pointerState)
            {
                // When a Pressed gesture is detected, the sample hologram will be repositioned
                // two meters in front of the user.
                spinningCubeRenderer.PositionHologram(
                    //  pointerState.TryGetPointerPose(currentCoordinateSystem)
                    SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp)
                    );
            }

            //var downstate = Windows.UI.Core.CoreVirtualKeyStates.Down;
            //bool rightclick = (Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.Escape) & downstate) == downstate;
            //System.Diagnostics.Debug.WriteLine("Windows.System.VirtualKey.Escape  " + Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.Escape).ToString() + " downstate" + downstate);
            //System.Diagnostics.Debug.WriteLine("Windows.System.VirtualKey.A  " + Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.A).ToString() + " downstate" + downstate);
            //if (rightclick)
            //{
            //    Windows.UI.ViewManagement.ApplicationViewSwitcher.SwitchAsync(VideoGallery.mainId, VideoGallery.appId, Windows.UI.ViewManagement.ApplicationViewSwitchingOptions.ConsolidateViews);
            //}
#endif



            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //

#if DRAW_SAMPLE_CONTENT
                spinningCubeRenderer.Update(timer);
#endif
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
#if DRAW_SAMPLE_CONTENT
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                Vector3 position = new Vector3(0.0f, 0.0f, -3.0f);
                renderingParameters.SetFocusPoint(
                    currentCoordinateSystem, position
                    /*spinningCubeRenderer.Position*/
                    );
#endif
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
Exemple #12
0
        /// <summary>
        /// Renders the current frame to each holographic display, according to the
        /// current application and spatial positioning state. Returns true if the
        /// frame was rendered to at least one display.
        /// </summary>
        public void UpdateAndDraw()
        {
            HolographicFrame holographicFrame = this.holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            this.deviceResources.EnsureCameraResources(holographicFrame, prediction);

            this.UpdateEyeProperties();

            // Up-to-date frame predictions enhance the effectiveness of image stablization and
            // allow more accurate positioning of holograms.
            holographicFrame.UpdateCurrentPrediction();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            prediction = holographicFrame.CurrentPrediction;

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = this.ReferenceFrame.CoordinateSystem;

            var eyeTexture = this.eyesProperties[0].Texture;

            this.deviceResources.UpdateCameraClipDistance(eyeTexture.NearPlane, eyeTexture.FarPlane);

            holographicFrame.UpdateCurrentPrediction();
            prediction = holographicFrame.CurrentPrediction;

            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                if (this.mixedRealityService.FocusPosition.HasValue)
                {
                    var position = this.mixedRealityService.FocusPosition.Value;

                    if (!this.mixedRealityService.FocusNormal.HasValue)
                    {
                        renderingParameters.SetFocusPoint(currentCoordinateSystem, new System.Numerics.Vector3(position.X, position.Y, position.Z));
                    }
                    else
                    {
                        var normal = this.mixedRealityService.FocusNormal.Value;

                        if (!this.mixedRealityService.FocusVelocity.HasValue)
                        {
                            renderingParameters.SetFocusPoint(
                                currentCoordinateSystem,
                                new System.Numerics.Vector3(position.X, position.Y, position.Z),
                                new System.Numerics.Vector3(normal.X, normal.Y, normal.Z));
                        }
                        else
                        {
                            var velocity = this.mixedRealityService.FocusVelocity.Value;

                            renderingParameters.SetFocusPoint(
                                currentCoordinateSystem,
                                new System.Numerics.Vector3(position.X, position.Y, position.Z),
                                new System.Numerics.Vector3(normal.X, normal.Y, normal.Z),
                                new System.Numerics.Vector3(velocity.X, velocity.Y, velocity.Z));
                        }
                    }
                }

                var pointerPose = SpatialPointerPose.TryGetAtTimestamp(this.ReferenceFrame.CoordinateSystem, prediction.Timestamp);
                if (pointerPose != null)
                {
                    pointerPose.Head.Position.ToWave(out this.headRay.Position);
                    pointerPose.Head.ForwardDirection.ToWave(out this.headRay.Direction);
                }

                var viewTransaform      = cameraPose.TryGetViewTransform(this.ReferenceFrame.CoordinateSystem);
                var projectionTransform = cameraPose.ProjectionTransform;

                if (viewTransaform.HasValue)
                {
                    for (int i = 0; i < 2; i++)
                    {
                        Matrix viewMatrix;
                        Matrix projectionMatrix;

                        if (i == (int)VREyeType.LeftEye)
                        {
                            viewTransaform.Value.Left.ToWave(out viewMatrix);
                            projectionTransform.Left.ToWave(out projectionMatrix);
                        }
                        else
                        {
                            viewTransaform.Value.Right.ToWave(out viewMatrix);
                            projectionTransform.Right.ToWave(out projectionMatrix);
                        }

                        Matrix view;
                        Matrix.Invert(ref viewMatrix, out view);

                        var eyeProperties = this.eyesProperties[i];
                        var eyePose       = eyeProperties.Pose;
                        eyePose.Position = view.Translation;
                        Quaternion.CreateFromRotationMatrix(ref view, out eyePose.Orientation);
                        eyeProperties.Pose       = eyePose;
                        eyeProperties.Projection = projectionMatrix;
                    }

                    var leftEyePose         = this.eyesProperties[(int)VREyeType.LeftEye].Pose;
                    var rightEyePose        = this.eyesProperties[(int)VREyeType.RightEye].Pose;
                    var centerEyeProperties = this.eyesProperties[(int)VREyeType.CenterEye];

                    var centerEyePose = centerEyeProperties.Pose;
                    centerEyePose.Position    = Vector3.Lerp(leftEyePose.Position, rightEyePose.Position, 0.5f);
                    centerEyePose.Orientation = Quaternion.Lerp(leftEyePose.Orientation, rightEyePose.Orientation, 0.5f);
                    centerEyeProperties.Pose  = centerEyePose;
                }
            }

            this.Render();

            this.deviceResources.Present(ref holographicFrame);
        }
        Node UpdateCurrentNode(SpatialCoordinateSystem referenceFrameCoordinateSystem, PerceptionTimestamp perceptionTimestamp, float nodeRadius = 1.0f)
        {
            SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(referenceFrameCoordinateSystem, perceptionTimestamp);

            if (pose == null)
            {
                return(currentNode);
            }

            if (currentNode == null)
            {
                // create current node
                var nodeAnchor = Spatial​Anchor.TryCreateRelativeTo(referenceFrameCoordinateSystem, pose.Head.ForwardDirection * 0.1f);

                if (nodeAnchor == null)
                {
                    Debug.WriteLine($"WARN: Failed to create Anchor");
                    return(null);
                }

                Debug.WriteLine($"Creating new node Head position {pose.Head.Position} and direction {pose.Head.ForwardDirection}");

                AddNode(nodeAnchor, perceptionTimestamp);

                return(nodes[nodes.Count - 1]);
            }
            else
            {
                // outside the current nodes threshold?
                var distance = currentNode.TryGetDistance(referenceFrameCoordinateSystem, pose.Head.Position);
                if (distance.HasValue && distance.Value > nodeRadius)
                {
                    // search for node
                    var closestNodes = GetClosestNodes(referenceFrameCoordinateSystem, pose, nodeRadius);
                    if (closestNodes != null && closestNodes.Count > 0)
                    {
                        foreach (var node in closestNodes)
                        {
                            if (node == currentNode)
                            {
                                continue;
                            }

                            return(node);
                        }
                    }

                    // no node exist... try to create one
                    // position of current node in respect to the reference frame
                    var currentNodesPosition = currentNode.TryGetTransformedPosition(referenceFrameCoordinateSystem);
                    if (currentNodesPosition.HasValue)
                    {
                        var direction = Vector3.Normalize(
                            new Vector3(pose.Head.Position.X, 0f, pose.Head.Position.Z) -
                            new Vector3(currentNodesPosition.Value.X, 0f, currentNodesPosition.Value.Z)
                            );

                        var targetPosition   = currentNodesPosition.Value + direction * nodeRadius;
                        var distanceFromPose = (targetPosition - new Vector3(pose.Head.Position.X, 0f, pose.Head.Position.Z)).Length();

                        var nodeAnchor = Spatial​Anchor.TryCreateRelativeTo(referenceFrameCoordinateSystem, (direction * distanceFromPose));

                        if (nodeAnchor != null)
                        {
                            var newNode = AddNode(nodeAnchor, perceptionTimestamp);

                            // create a new edge connecting the current node and this node
                            edges.Add(new Edge
                            {
                                NodeA = currentNode,
                                NodeB = newNode
                            });

                            Debug.WriteLine($"Creating new node ({newNode.Name}) Head position {pose.Head.Position} and direction {pose.Head.ForwardDirection}, direction from current node {direction}.. Edge created {currentNode.Name}");
                            return(nodes[nodes.Count - 1]);
                        }
                        else
                        {
                            Debug.WriteLine($"WARN: Failed to create Anchor");
                        }
                    }
                }
            }

            return(currentNode);
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.

            SpatialCoordinateSystem referenceFrameCoordinateSystem = referenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp);

            // remember where we were (changed if the CurrentNode != previousNode)
            var previousNode = CurrentNode;

            // update current node the user resides in
            CurrentNode = UpdateCurrentNode(referenceFrameCoordinateSystem, prediction.Timestamp, NodeRadius);

            // .. and current gaze
            SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(referenceFrameCoordinateSystem, prediction.Timestamp);

            NodePosition = pose.Head.Position;
            GazeForward  = pose.Head.ForwardDirection;
            GazeUp       = pose.Head.UpDirection;

            var mat = referenceFrameCoordinateSystem.TryGetTransformTo(CurrentNode.Anchor.CoordinateSystem);

            if (mat.HasValue)
            {
                NodePosition = Vector3.Transform(NodePosition, mat.Value);
                GazeForward  = Vector3.TransformNormal(GazeForward, mat.Value);
                GazeUp       = Vector3.TransformNormal(GazeUp, mat.Value);
            }

            if (!string.IsNullOrEmpty(requestedSightingTerm))
            {
                var candidates = FindClosestNodesWithSightedItem(referenceFrameCoordinateSystem, pose, requestedSightingTerm);

                if (candidates != null && candidates.Count > 0)
                {
                    targetNode     = candidates[0];
                    targetSighting = candidates[0].Sightings.Where(sighting => sighting.Tokens.Any(token => token.Equals(requestedSightingTerm, StringComparison.OrdinalIgnoreCase))).First();
                }

                requestedSightingTerm = string.Empty;
            }

            // currently at position
            if (CurrentNode == targetNode)
            {
                if (dwellTimeAtCurrentNode >= 5)
                {
                    targetNode     = null;
                    targetSighting = null;
                    entities.Clear();
                    Debug.WriteLine("Well done! Assisted the user find their item");
                }
            }

            if (targetNode != null)
            {
                RebuildTrailToTarget(referenceFrameCoordinateSystem, prediction.Timestamp, CurrentNode, targetNode);
            }


            ProcessNextFrame();

            timer.Tick(() =>
            {
                dwellTimeAtCurrentNode += timer.ElapsedSeconds;

                for (var entityIndex = 0; entityIndex < entities.Count; entityIndex++)
                {
                    var entity = entities[entityIndex];
                    entity.Update(timer, referenceFrameCoordinateSystem);
                }
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.

            SpatialCoordinateSystem referenceFrameCoordinateSystem = attachedReferenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp);

            var previousNode = currentNode;

            currentNode = UpdateCurrentNode(referenceFrameCoordinateSystem, prediction.Timestamp);

            if (currentNode != previousNode)
            {
                SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(referenceFrameCoordinateSystem, prediction.Timestamp);
            }

            if (targetNode != null)
            {
                RebuildTrailToTarget(referenceFrameCoordinateSystem, prediction.Timestamp, currentNode, targetNode);
            }

            SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();

            if (null != pointerState)
            {
                Debug.WriteLine($"Setting target {nodes[1].Name}");
                targetNode = nodes[1];
            }

            timer.Tick(() =>
            {
                if (currentNode != previousNode)
                {
                    dwellTimeAtCurrentNode = 0;
                }
                else
                {
                    dwellTimeAtCurrentNode += timer.ElapsedSeconds;
                }

                for (var entityIndex = entities.Count - 1; entityIndex >= 0; entityIndex--)
                {
                    var entity = entities[entityIndex];

                    // update rotation of previous one
                    if (entityIndex != entities.Count - 1)
                    {
                        var previousEntity         = entities[entityIndex + 1];
                        var previousEntityPosition = previousEntity.Node.TryGetTransformedPosition(referenceFrameCoordinateSystem);
                        var currentEntityPosition  = entity.Node.TryGetTransformedPosition(referenceFrameCoordinateSystem);
                        if (previousEntityPosition.HasValue && currentEntityPosition.HasValue)
                        {
                            var tV   = previousEntityPosition.Value;
                            var sV   = currentEntityPosition.Value;
                            tV.Y     = sV.Y = 0;
                            var diff = sV - tV;

                            var yAngle = Math.Atan2(diff.X, diff.Z);

                            entity.EulerAngles = new Vector3(0, (float)(yAngle * (180 / Math.PI)), 0);
                        }
                    }

                    entity.Update(timer, referenceFrameCoordinateSystem);
                }
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                //if (spinningCubeSpatialAnchor != null)
                //{
                //    //renderingParameters.SetFocusPoint(
                //    //spinningCubeSpatialAnchor.CoordinateSystem,
                //    //spinningCubeRenderer.Position
                //    //);
                //}
                //else
                //{
                //    //renderingParameters.SetFocusPoint(
                //    //currentCoordinateSystem,
                //    //spinningCubeRenderer.Position
                //    //);
                //}
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
Exemple #16
0
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem;

#if DRAW_SAMPLE_CONTENT
            // Check for new input state since the last frame.
            foreach (var gamepad in gamepads)
            {
                pointerPressed |= ((gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A);
            }

            SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
            SpatialPointerPose            pose         = null;
            if (null != pointerState)
            {
                pose = pointerState.TryGetPointerPose(currentCoordinateSystem);
            }
            else if (pointerPressed)
            {
                pose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp);
            }
            pointerPressed = false;

            // When a Pressed gesture is detected, the sample hologram will be repositioned
            // two meters in front of the user.
            spinningCubeRenderer.PositionHologram(pose);
#endif

            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //

#if DRAW_SAMPLE_CONTENT
                spinningCubeRenderer.Update(timer);
#endif
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
#if DRAW_SAMPLE_CONTENT
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                renderingParameters.SetFocusPoint(
                    currentCoordinateSystem,
                    spinningCubeRenderer.Position
                    );
#endif
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

#if DRAW_SAMPLE_CONTENT
            if (stationaryReferenceFrame != null)
            {
                // Check for new input state since the last frame.
                for (int i = 0; i < gamepads.Count; ++i)
                {
                    bool buttonDownThisUpdate = (gamepads[i].gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A;
                    if (buttonDownThisUpdate && !gamepads[i].buttonAWasPressedLastFrame)
                    {
                        pointerPressed = true;
                    }
                    gamepads[i].buttonAWasPressedLastFrame = buttonDownThisUpdate;
                }

                SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
                SpatialPointerPose            pose         = null;
                if (null != pointerState)
                {
                    pose = pointerState.TryGetPointerPose(stationaryReferenceFrame.CoordinateSystem);
                }
                else if (pointerPressed)
                {
                    pose = SpatialPointerPose.TryGetAtTimestamp(stationaryReferenceFrame.CoordinateSystem, prediction.Timestamp);
                }
                pointerPressed = false;

                // When a Pressed gesture is detected, the sample hologram will be repositioned
                // two meters in front of the user.
                spinningCubeRenderer.PositionHologram(pose);
            }
#endif

            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //

#if DRAW_SAMPLE_CONTENT
                spinningCubeRenderer.Update(timer);
#endif
            });

            if (!canCommitDirect3D11DepthBuffer)
            {
                // On versions of the platform that do not support the CommitDirect3D11DepthBuffer API, we can control
                // image stabilization by setting a focus point with optional plane normal and velocity.
                foreach (var cameraPose in prediction.CameraPoses)
                {
#if DRAW_SAMPLE_CONTENT
                    // The HolographicCameraRenderingParameters class provides access to set
                    // the image stabilization parameters.
                    HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                    // SetFocusPoint informs the system about a specific point in your scene to
                    // prioritize for image stabilization. The focus point is set independently
                    // for each holographic camera. When setting the focus point, put it on or
                    // near content that the user is looking at.
                    // In this example, we put the focus point at the center of the sample hologram.
                    // You can also set the relative velocity and facing of the stabilization
                    // plane using overloads of this method.
                    if (stationaryReferenceFrame != null)
                    {
                        renderingParameters.SetFocusPoint(
                            stationaryReferenceFrame.CoordinateSystem,
                            spinningCubeRenderer.Position
                            );
                    }
#endif
                }
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
Exemple #18
0
        /// <summary>
        /// Function which checks for new eye tracking data and is called periodically by the timer
        /// </summary>
        /// <param name="source"></param>
        /// <param name="e"></param>
        private void CheckForEyeData(object source, ElapsedEventArgs e)
        {
            // Make sure the previous event isn't still running
            if (System.Threading.Interlocked.CompareExchange(ref fetchDataTimerIsBusy, 1, 0) == 1)
            {
                //Debug.LogError("Previous event still running!");
                return;
            }

            try {
#if (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP
                // Make sure we have the spatial coordinate system (which is cached every update) and the eyes API is available
                if (currentSpatialCoordinateSystem == null || !EyesApiAvailable)
                {
                    //Debug.Log("[UWPDataAccess] No currentSpatialCoordinateSystem or Eyes API not available!");
                    return;
                }

                // Try to get the new pointer data (which includes eye tracking)
                SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(currentSpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
                if (pointerPose != null)
                {
                    // Check if we actually got any eye tracking data
                    var eyes = pointerPose.Eyes;
                    if (eyes != null)
                    {
                        // Unix time stamp from when the eye tracking data we got was acquired
                        long targetTimeUnix = eyes.UpdateTimestamp.TargetTime.ToUnixTimeMilliseconds();

                        // Check if we have new data
                        if (lastEyeDataTimestamp != targetTimeUnix)
                        {
                            // Save new time stamp
                            lastEyeDataTimestamp = targetTimeUnix;

                            // Save the information whether the calibration is valid
                            IsGazeCalibrationValid = eyes.IsCalibrationValid;

                            // If we have eye tracking data announce it in the event, otherwise simply announce Vector3.zero as origin and direction
                            if (eyes.Gaze.HasValue)
                            {
                                dataQueue.Enqueue(new GazeAPIData()
                                {
                                    EyeDataTimestamp         = targetTimeUnix,
                                    EyeDataRelativeTimestamp = eyes.UpdateTimestamp.SystemRelativeTargetTime.TotalMilliseconds,
                                    IsCalibrationValid       = eyes.IsCalibrationValid,
                                    GazeHasValue             = eyes.Gaze.HasValue,
                                    GazeOrigin    = eyes.Gaze.Value.Origin.ToUnityVector3(),
                                    GazeDirection = eyes.Gaze.Value.Direction.ToUnityVector3()
                                });
                            }
                            else
                            {
                                dataQueue.Enqueue(new GazeAPIData()
                                {
                                    EyeDataTimestamp         = targetTimeUnix,
                                    EyeDataRelativeTimestamp = eyes.UpdateTimestamp.SystemRelativeTargetTime.TotalMilliseconds,
                                    IsCalibrationValid       = eyes.IsCalibrationValid,
                                    GazeHasValue             = eyes.Gaze.HasValue,
                                    GazeOrigin    = Vector3.zero,
                                    GazeDirection = Vector3.zero
                                });
                            }
                        }
                    }
                }
#else
                // On all platforms which are not UWP print error
                Debug.Log("[UWPDataAccess] Not on correct platform! Doing nothing!");
#endif
            }
            finally
            {
                fetchDataTimerIsBusy = 0;
            }
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem;

            SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp);            

            ProcessFrame(currentCoordinateSystem);

             if (Utils.GetCurrentUnixTimestampMillis() - lastFaceDetectedTimestamp > faceTimeThreshold)
            {
                if(pose != null)
                {
                    var headPosition = pose.Head.Position;
                    var headForward = pose.Head.ForwardDirection;
                    quadRenderer.TargetPosition = headPosition + (2.0f * headForward);
                }
                                
                textRenderer.RenderTextOffscreen("No faces detected");
            }

            timer.Tick(() => 
            {
            //
            // TODO: Update scene objects.
            //
            // Put time-based updates here. By default this code will run once per frame,
            // but if you change the StepTimer to use a fixed time step this code will
            // run as many times as needed to get to the current step.
            //                

                quadRenderer.Update(pose, timer);
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.

                if(Utils.GetCurrentUnixTimestampMillis() - lastFaceDetectedTimestamp <= faceTimeThreshold)
                {
                    renderingParameters.SetFocusPoint(
                        currentCoordinateSystem,    
                        quadRenderer.Position,
                        quadRenderer.Forward,
                        quadRenderer.Velocity
                    );
                }
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return holographicFrame;
        }
Exemple #20
0
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem;

            // Check for new input state since the last frame.
            //foreach (var gamepad in gamepads)
            //{
            //    pointerPressed |= ((gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A);
            //}

            //SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
            //SpatialPointerPose pose = null;
            //if (null != pointerState)
            //{
            //    pose = pointerState.TryGetPointerPose(currentCoordinateSystem);
            //}
            //else if (pointerPressed)
            //{
            //    pose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp);
            //}
            //pointerPressed = false;

            //if (null != pose)
            //{
            //    //var angle = Angle(pose.Head.ForwardDirection, new Vector3(0.0f, 0.0f, -1.0f), new Vector3(0.0f, 1.0f, 0.0f));
            //    //var rotator = Matrix4x4.CreateRotationY(-angle);
            //    //var mover = Matrix4x4.CreateTranslation(pose.Head.Position);
            //    //var transformer = rotator * mover;
            //}

            mutex.WaitOne();

            var key   = mainView.VirtualKey;
            var count = mainView.KeyCount;

            mainView.KeyCount   = 0;
            mainView.VirtualKey = Windows.System.VirtualKey.None;

            mutex.ReleaseMutex();

            if (key != Windows.System.VirtualKey.None && count > 0)
            {
                mainView.LastKey = key;
                mainView.OnKeyPressed(key);
            }

            timer1.Tick(() =>
            {
                mainView.Update(timer1);
            });

            timer2.Tick(() =>
            {
                mainView.Update(SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp));
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.

                if (mainView.Pointers[0] != null)
                {
                    renderingParameters.SetFocusPoint(currentCoordinateSystem, mainView.Pointers[0].Position);
                }
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }