示例#1
0
 internal void Update(SpatialPointerPose pose)
 {
     foreach (var pointer in Pointers)
     {
         pointer?.Update(pose);
     }
 }
示例#2
0
        private void sourceUpdate(SpatialInteractionManager manager, SpatialInteractionSourceEventArgs args)
        {
            SpatialCoordinateSystem          currentCoordinateSystem = referenceFrame.CoordinateSystem;
            SpatialInteractionSourceLocation pos = args.State.Properties.TryGetLocation(currentCoordinateSystem);

            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;
            // Get the gaze direction relative to the given coordinate system.
            Vector3            headPosition = (Vector3)pos.Position;
            SpatialPointerPose pose         = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp);

            SpatialInteractionSource source = args.State.Source;

            Vector3 headDirection = pose.Head.ForwardDirection;

            // The hologram is positioned two meters along the user's gaze direction.
            float   distanceFromUser = 0.1f; // meters
            Vector3 gazeAtTwoMeters  = headPosition + (distanceFromUser * headDirection);

            // This will be used as the translation component of the hologram's
            // model transform.
            this.position = gazeAtTwoMeters;
        }
示例#3
0
        /// <inheritdoc />
        public override void Update()
        {
            using (UpdatePerfMarker.Auto())
            {
                if (WindowsMixedRealityUtilities.SpatialCoordinateSystem == null || !eyesApiAvailable)
                {
                    return;
                }

                SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
                if (pointerPose != null)
                {
                    var eyes = pointerPose.Eyes;
                    if (eyes != null)
                    {
                        Service?.EyeGazeProvider?.UpdateEyeTrackingStatus(this, eyes.IsCalibrationValid);

                        if (eyes.Gaze.HasValue)
                        {
                            Ray newGaze = new Ray(eyes.Gaze.Value.Origin.ToUnityVector3(), eyes.Gaze.Value.Direction.ToUnityVector3());

                            if (SmoothEyeTracking)
                            {
                                newGaze = SmoothGaze(newGaze);
                            }

                            Service?.EyeGazeProvider?.UpdateEyeGaze(this, newGaze, eyes.UpdateTimestamp.TargetTime.UtcDateTime);
                        }
                    }
                }
            }
        }
        public override void Update()
        {
#if WINDOWS_UWP
            if (WindowsMixedRealityUtilities.SpatialCoordinateSystem == null || !WindowsApiChecker.UniversalApiContractV8_IsAvailable)
            {
                return;
            }

            SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
            if (pointerPose != null)
            {
                var eyes = pointerPose.Eyes;
                if ((eyes != null) && (eyes.Gaze.HasValue))
                {
                    Ray newGaze = new Ray(WindowsMixedRealityUtilities.SystemVector3ToUnity(eyes.Gaze.Value.Origin), WindowsMixedRealityUtilities.SystemVector3ToUnity(eyes.Gaze.Value.Direction));

                    if (SmoothEyeTracking)
                    {
                        newGaze = SmoothGaze(newGaze);
                    }

                    InputSystem?.EyeGazeProvider?.UpdateEyeGaze(this, newGaze, eyes.UpdateTimestamp.TargetTime.UtcDateTime);
                }
            }
#endif // WINDOWS_UWP
        }
示例#5
0
        /// <inheritdoc />
        public override void Update()
        {
#if (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP
            if (WindowsMixedRealityUtilities.SpatialCoordinateSystem == null || !eyesApiAvailable)
            {
                return;
            }

            SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
            if (pointerPose != null)
            {
                var eyes = pointerPose.Eyes;
                if (eyes != null)
                {
                    InputSystem?.EyeGazeProvider?.UpdateEyeTrackingStatus(this, eyes.IsCalibrationValid);

                    if (eyes.Gaze.HasValue)
                    {
                        Ray newGaze = new Ray(eyes.Gaze.Value.Origin.ToUnityVector3(), eyes.Gaze.Value.Direction.ToUnityVector3());

                        if (SmoothEyeTracking)
                        {
                            newGaze = SmoothGaze(newGaze);
                        }

                        InputSystem?.EyeGazeProvider?.UpdateEyeGaze(this, newGaze, eyes.UpdateTimestamp.TargetTime.UtcDateTime);
                    }
                }
            }
#endif // (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP
        }
示例#6
0
        public override void Update()
        {
#if UNITY_WSA
            if (WindowsMixedRealityUtilities.SpatialCoordinateSystem == null || typeof(SpatialPointerPose).GetProperty("Eyes") == null)
            {
                return;
            }

            SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
            if (pointerPose != null)
            {
                var eyes = pointerPose.Eyes;
                if (eyes != null)
                {
                    InputSystem?.EyeGazeProvider?.UpdateEyeTrackingStatus(this, eyes.IsCalibrationValid);

                    if (eyes.Gaze.HasValue)
                    {
                        Ray newGaze = new Ray(WindowsMixedRealityUtilities.SystemVector3ToUnity(eyes.Gaze.Value.Origin), WindowsMixedRealityUtilities.SystemVector3ToUnity(eyes.Gaze.Value.Direction));

                        if (SmoothEyeTracking)
                        {
                            newGaze = SmoothGaze(newGaze);
                        }

                        InputSystem?.EyeGazeProvider?.UpdateEyeGaze(this, newGaze, eyes.UpdateTimestamp.TargetTime.UtcDateTime);
                    }
                }
            }
#endif // UNITY_WSA
        }
 public void PositionHologram(SpatialPointerPose pointerPose)
 {
     if (pointerPose != null)
     {
         headPosition = pointerPose.Head.Position;
         headForward  = pointerPose.Head.ForwardDirection;
         position     = headPosition + 2.0f * headForward;
     }
 }
示例#8
0
        internal override void Update(SpatialPointerPose pose)
        {
            base.Update(pose);

            if (!Locked && pose != null)
            {
                view.DebugString =
                    view.Origo.ToString("0.00") + " "
                    + view.RotationAngle.ToString() + "° "
                    + Position.ToString("0.00");
            }
        }
示例#9
0
 public static void processInput(string input, SpatialPointerPose pointerPose)
 {
     if (null != pointerPose)
     {
         System.Numerics.Vector3 headPosition  = pointerPose.Head.Position / Spritesheet.positionScaleFactor;
         System.Numerics.Vector3 headDirection = pointerPose.Head.ForwardDirection;
         List <Object>           message       = new List <object>()
         {
             input, headPosition, headDirection
         };
         sendMessage(JsonConvert.SerializeObject(message));
     }
 }
示例#10
0
 /// <summary>
 /// Called once per frame, rotates the cube and calculates the model and view matrices.
 /// </summary>
 public void Update(StepTimer timer, SpatialPointerPose pose)
 {
     // Loading is asynchronous. Resources must be created before they can be updated.
     if (!loadingComplete)
     {
         return;
     }
     animationEngine.updateObjects();
     foreach (var sprite in animationEngine.getObjects())
     {
         sprite.Update(timer, deviceResources, pose);
     }
 }
        public void PositionHologram(SpatialPointerPose pointerPose)
        {
            if (pointerPose == null)
            {
                return;
            }

            var headPosition  = pointerPose.Head.Position;
            var headDirection = pointerPose.Head.ForwardDirection;

            var distanceFromUser = 1.0f;

            Position = headPosition + distanceFromUser * headDirection;
        }
示例#12
0
        // Repositions the sample hologram.
        public void Update(SpatialPointerPose pointerPose, StepTimer timer)
        {
            float deltaTime     = (float)timer.ElapsedSeconds;
            float lerpDeltaTime = deltaTime * c_lerpRate;

            if (pointerPose != null)
            {
                // Get the gaze direction relative to the given coordinate system.
                var headPosition = pointerPose.Head.Position;
                var headForward  = pointerPose.Head.ForwardDirection;
                var headBack     = -headForward;
                var headUp       = pointerPose.Head.UpDirection;
                var headRight    = Vector3.Cross(headForward, headUp);

                Forward = headForward;
                Up      = headUp;
                Right   = headRight;

                var prevPosition = position;
                position = Vector3.Lerp(position, targetPosition, lerpDeltaTime);

                velocity = (position - prevPosition) / deltaTime;

                texCoordScale  = Vector2.Lerp(texCoordScale, targetTexCoordScale, lerpDeltaTime);
                texCoordOffset = Vector2.Lerp(texCoordOffset, targetTexCoordOffset, lerpDeltaTime);

                // Calculate our model to world matrix relative to the user's head.
                Matrix4x4 modelRotationTranslation = Matrix4x4.CreateWorld(position, Forward, Up);

                // Scale our 1m quad down to 20cm wide.
                Matrix4x4 modelScale = Matrix4x4.CreateScale(0.2f);

                Matrix4x4 modelTransform = modelScale * modelRotationTranslation;

                // The view and projection matrices are provided by the system; they are associated
                // with holographic cameras, and updated on a per-camera basis.
                // Here, we provide the model transform for the sample hologram. The model transform
                // matrix is transposed to prepare it for the shad(er.
                modelConstantBufferData.model          = Matrix4x4.Transpose(modelTransform);
                modelConstantBufferData.texCoordScale  = texCoordScale;
                modelConstantBufferData.texCoordOffset = texCoordOffset;

                // Use the D3D device context to update Direct3D device-based resources.
                var context = deviceResources.D3DDeviceContext;

                // Update the model transform buffer for the hologram.
                context.UpdateSubresource(ref this.modelConstantBufferData, this.modelConstantBuffer);
            }
        }
        private Matrix4x4 RotateCursor(SpatialPointerPose pose)
        {
            var facingNormal = Vector3.Normalize(-Position);

            var xAxisRotation  = Vector3.Normalize(new Vector3(facingNormal.Z, 0.0f, -facingNormal.X));
            var yAxisRotation  = Vector3.Normalize(Vector3.Cross(facingNormal, xAxisRotation));
            var rotationMatrix = new Matrix4x4(
                xAxisRotation.X, xAxisRotation.Y, xAxisRotation.Z, 1.0f,
                yAxisRotation.X, yAxisRotation.Y, yAxisRotation.Z, 1.0f,
                facingNormal.X, facingNormal.Y, facingNormal.Z, 1.0f,
                0.0f, 0.0f, 0.0f, 1.0f
                );

            return(rotationMatrix);
        }
示例#14
0
        // This function uses a SpatialPointerPose to position the world-locked hologram
        // two meters in front of the user's heading.
        public void PositionHologram(SpatialPointerPose pointerPose)
        {
            if (null != pointerPose)
            {
                // Get the gaze direction relative to the given coordinate system.
                Vector3 headPosition  = pointerPose.Head.Position;
                Vector3 headDirection = pointerPose.Head.ForwardDirection;

                // The hologram is positioned two meters along the user's gaze direction.
                float   distanceFromUser = 2.0f;    // meters
                Vector3 gazeAtTwoMeters  = headPosition + (distanceFromUser * headDirection);

                // This will be used as the translation component of the hologram's
                // model transform.
                this.position = gazeAtTwoMeters;
            }
        }
示例#15
0
    // Update is called once per frame
    void Update()
    {
#if ENABLE_WINMD_SUPPORT
        if (!_isReadyToRender)
        {
            return;
        }

        // The HolographicFrame has information that the app needs in order
        // to update and render the current frame. The app begins each new
        // frame by calling CreateNextFrame.
        //HolographicFrame ^ holographicFrame = m_holographicSpace->CreateNextFrame();

        // Get a prediction of where holographic cameras will be when this frame
        // is presented.
        //HolographicFramePrediction prediction = holographicFrame->CurrentPrediction;

        IntPtr spatialCoordinateSystemPtr               = WorldManager.GetNativeISpatialCoordinateSystemPtr();
        SpatialCoordinateSystem unityWorldOrigin        = Marshal.GetObjectForIUnknown(spatialCoordinateSystemPtr) as SpatialCoordinateSystem;
        SpatialCoordinateSystem currentCoordinateSystem = unityWorldOrigin;

        _isTrackingFaces = _faceTrackerProcessor.IsTrackingFaces();

        if (_isTrackingFaces)
        {
            MediaFrameReference frame = _videoFrameProcessor.GetLatestFrame();
            if (frame == null)
            {
                return;
            }
            var faces = _faceTrackerProcessor.GetLatestFaces();
            ProcessFaces(faces, frame, currentCoordinateSystem);


            TimeSpan currentTimeStamp = frame.SystemRelativeTime.Value.Duration();
            if (currentTimeStamp > _previousFrameTimestamp)
            {
                // TODO: copy to texture
                _previousFrameTimestamp = frame.SystemRelativeTime.Value.Duration();
            }
        }

        SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
#endif
    }
        /// <inheritdoc />
        public override void Update()
        {
            // Override gaze before base.Update() updates the controllers
            if (mixedRealityGazeProviderHeadOverride != null && mixedRealityGazeProviderHeadOverride.UseHeadGazeOverride && WindowsMixedRealityUtilities.SpatialCoordinateSystem != null)
            {
                SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
                if (pointerPose != null)
                {
                    HeadPose head = pointerPose.Head;
                    if (head != null)
                    {
                        mixedRealityGazeProviderHeadOverride.OverrideHeadGaze(head.Position.ToUnityVector3(), head.ForwardDirection.ToUnityVector3());
                    }
                }
            }

            base.Update();
        }
示例#17
0
        public void Update(StepTimer timer, DeviceResources deviceResources, SpatialPointerPose pose)
        {
            if (pose != null)
            {
                cachedResources = deviceResources;
                var headPosition = pose.Head.Position;
                //Calculate the rotation for billboarding
                SharpDX.Vector3 facingNormal = new SharpDX.Vector3(headPosition.X - position.X, headPosition.Y - position.Y, headPosition.Z - position.Z);
                facingNormal.Normalize();

                SharpDX.Vector3 xAxisRotation = new SharpDX.Vector3(facingNormal.Z, 0, -facingNormal.X);
                xAxisRotation.Normalize();
                SharpDX.Vector3 yAxisRotation = SharpDX.Vector3.Cross(facingNormal, xAxisRotation);
                yAxisRotation.Normalize();

                Matrix4x4 modelRotation = new Matrix4x4(xAxisRotation.X, xAxisRotation.Y, xAxisRotation.Z, 0,
                                                        yAxisRotation.X, yAxisRotation.Y, yAxisRotation.Z, 0,
                                                        facingNormal.X, facingNormal.Y, facingNormal.Z, 0,
                                                        0, 0, 0, 1);

                // Position the cube.
                Matrix4x4 modelTranslation = Matrix4x4.CreateTranslation(position);


                // Multiply to get the transform matrix.
                // Note that this transform does not enforce a particular coordinate system. The calling
                // class is responsible for rendering this content in a consistent manner.
                Matrix4x4 modelTransform = modelRotation * modelTranslation;

                // The view and projection matrices are provided by the system; they are associated
                // with holographic cameras, and updated on a per-camera basis.
                // Here, we provide the model transform for the sample hologram. The model transform
                // matrix is transposed to prepare it for the shader.
                this.modelConstantBufferData.model = Matrix4x4.Transpose(modelTransform);


                // Use the D3D device context to update Direct3D device-based resources.
                var context = deviceResources.D3DDeviceContext;

                // Update the model transform buffer for the hologram.
                context.UpdateSubresource(ref this.modelConstantBufferData, this.modelConstantBuffer);
            }
        }
        public void Update(SpatialPointerPose spatialPointerPose)
        {
            if (!_loadingComplete)
            {
                return;
            }
            var cam = spatialPointerPose.Head;

            var modelTranslation = Matrix4x4.CreateTranslation(Position);
            var modelRotation    = RotateCursor(spatialPointerPose);

            var modelTransform = modelRotation * modelTranslation;

            _modelConstantBufferData.model = Matrix4x4.Transpose(modelTransform);

            var context = _deviceResources.D3DDeviceContext;

            context.UpdateSubresource(ref _modelConstantBufferData, _modelConstantBuffer);
        }
        Node AddNode(SpatialAnchor anchor, PerceptionTimestamp perceptionTimestamp)
        {
            var position = Vector3.Zero;
            var forward  = Vector3.Zero;

            var anchorPose = SpatialPointerPose.TryGetAtTimestamp(anchor.CoordinateSystem, perceptionTimestamp);

            if (anchorPose != null)
            {
                position = anchorPose.Head.Position;
                forward  = anchorPose.Head.ForwardDirection;
            }

            var node = new Node(anchor, position, forward);

            nodes.Add(node);

            return(node);
        }
        /// <inheritdoc/>
        public override void Update()
        {
            Profiler.BeginSample("[MRTK] WindowsMixedRealityDeviceManager.Update");

            base.Update();

#if (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP
            if (mixedRealityGazeProviderHeadOverride != null && mixedRealityGazeProviderHeadOverride.UseHeadGazeOverride)
            {
                SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(WindowsMixedRealityUtilities.SpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
                if (pointerPose != null)
                {
                    HeadPose head = pointerPose.Head;
                    if (head != null)
                    {
                        mixedRealityGazeProviderHeadOverride.OverrideHeadGaze(head.Position.ToUnityVector3(), head.ForwardDirection.ToUnityVector3());
                    }
                }
            }
#endif // (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP

            UpdateInteractionManagerReading();

            for (var i = 0; i < numInteractionManagerStates; i++)
            {
                // SourceDetected gets raised when a new controller is detected and, if previously present,
                // when OnEnable is called. Do not create a new controller here.
                var controller = GetOrAddController(interactionManagerStates[i].source, false);

                if (controller != null)
                {
                    controller.UpdateController(interactionManagerStates[i]);
                }
            }

            LastInteractionManagerStateReading = interactionManagerStates;

            Profiler.EndSample(); // Update
        }
        public IList <Node> FindClosestNodesWithSightedItem(SpatialCoordinateSystem referenceFrameCoordinateSystem, SpatialPointerPose pose, string sightingItem)
        {
            var filteredNodes = nodes.Where(node =>
            {
                return(node.Sightings.Any(sighting =>
                {
                    return sighting.Tokens.Any(token => token.Equals(sightingItem, StringComparison.OrdinalIgnoreCase));
                }));
            });

            if (filteredNodes != null)
            {
                return(filteredNodes.OrderBy(node =>
                {
                    return node.TryGetDistance(referenceFrameCoordinateSystem, pose.Head.Position);
                }).ToList());
            }

            return(null);
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem;

            SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp);            

            ProcessFrame(currentCoordinateSystem);

             if (Utils.GetCurrentUnixTimestampMillis() - lastFaceDetectedTimestamp > faceTimeThreshold)
            {
                if(pose != null)
                {
                    var headPosition = pose.Head.Position;
                    var headForward = pose.Head.ForwardDirection;
                    quadRenderer.TargetPosition = headPosition + (2.0f * headForward);
                }
                                
                textRenderer.RenderTextOffscreen("No faces detected");
            }

            timer.Tick(() => 
            {
            //
            // TODO: Update scene objects.
            //
            // Put time-based updates here. By default this code will run once per frame,
            // but if you change the StepTimer to use a fixed time step this code will
            // run as many times as needed to get to the current step.
            //                

                quadRenderer.Update(pose, timer);
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.

                if(Utils.GetCurrentUnixTimestampMillis() - lastFaceDetectedTimestamp <= faceTimeThreshold)
                {
                    renderingParameters.SetFocusPoint(
                        currentCoordinateSystem,    
                        quadRenderer.Position,
                        quadRenderer.Forward,
                        quadRenderer.Velocity
                    );
                }
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return holographicFrame;
        }
示例#23
0
        /// <summary>
        /// Renders the current frame to each holographic display, according to the
        /// current application and spatial positioning state. Returns true if the
        /// frame was rendered to at least one display.
        /// </summary>
        public void UpdateAndDraw()
        {
            HolographicFrame holographicFrame = this.holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            this.deviceResources.EnsureCameraResources(holographicFrame, prediction);

            this.UpdateEyeProperties();

            // Up-to-date frame predictions enhance the effectiveness of image stablization and
            // allow more accurate positioning of holograms.
            holographicFrame.UpdateCurrentPrediction();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            prediction = holographicFrame.CurrentPrediction;

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = this.ReferenceFrame.CoordinateSystem;

            var eyeTexture = this.eyesProperties[0].Texture;

            this.deviceResources.UpdateCameraClipDistance(eyeTexture.NearPlane, eyeTexture.FarPlane);

            holographicFrame.UpdateCurrentPrediction();
            prediction = holographicFrame.CurrentPrediction;

            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                if (this.mixedRealityService.FocusPosition.HasValue)
                {
                    var position = this.mixedRealityService.FocusPosition.Value;

                    if (!this.mixedRealityService.FocusNormal.HasValue)
                    {
                        renderingParameters.SetFocusPoint(currentCoordinateSystem, new System.Numerics.Vector3(position.X, position.Y, position.Z));
                    }
                    else
                    {
                        var normal = this.mixedRealityService.FocusNormal.Value;

                        if (!this.mixedRealityService.FocusVelocity.HasValue)
                        {
                            renderingParameters.SetFocusPoint(
                                currentCoordinateSystem,
                                new System.Numerics.Vector3(position.X, position.Y, position.Z),
                                new System.Numerics.Vector3(normal.X, normal.Y, normal.Z));
                        }
                        else
                        {
                            var velocity = this.mixedRealityService.FocusVelocity.Value;

                            renderingParameters.SetFocusPoint(
                                currentCoordinateSystem,
                                new System.Numerics.Vector3(position.X, position.Y, position.Z),
                                new System.Numerics.Vector3(normal.X, normal.Y, normal.Z),
                                new System.Numerics.Vector3(velocity.X, velocity.Y, velocity.Z));
                        }
                    }
                }

                var pointerPose = SpatialPointerPose.TryGetAtTimestamp(this.ReferenceFrame.CoordinateSystem, prediction.Timestamp);
                if (pointerPose != null)
                {
                    pointerPose.Head.Position.ToWave(out this.headRay.Position);
                    pointerPose.Head.ForwardDirection.ToWave(out this.headRay.Direction);
                }

                var viewTransaform      = cameraPose.TryGetViewTransform(this.ReferenceFrame.CoordinateSystem);
                var projectionTransform = cameraPose.ProjectionTransform;

                if (viewTransaform.HasValue)
                {
                    for (int i = 0; i < 2; i++)
                    {
                        Matrix viewMatrix;
                        Matrix projectionMatrix;

                        if (i == (int)VREyeType.LeftEye)
                        {
                            viewTransaform.Value.Left.ToWave(out viewMatrix);
                            projectionTransform.Left.ToWave(out projectionMatrix);
                        }
                        else
                        {
                            viewTransaform.Value.Right.ToWave(out viewMatrix);
                            projectionTransform.Right.ToWave(out projectionMatrix);
                        }

                        Matrix view;
                        Matrix.Invert(ref viewMatrix, out view);

                        var eyeProperties = this.eyesProperties[i];
                        var eyePose       = eyeProperties.Pose;
                        eyePose.Position = view.Translation;
                        Quaternion.CreateFromRotationMatrix(ref view, out eyePose.Orientation);
                        eyeProperties.Pose       = eyePose;
                        eyeProperties.Projection = projectionMatrix;
                    }

                    var leftEyePose         = this.eyesProperties[(int)VREyeType.LeftEye].Pose;
                    var rightEyePose        = this.eyesProperties[(int)VREyeType.RightEye].Pose;
                    var centerEyeProperties = this.eyesProperties[(int)VREyeType.CenterEye];

                    var centerEyePose = centerEyeProperties.Pose;
                    centerEyePose.Position    = Vector3.Lerp(leftEyePose.Position, rightEyePose.Position, 0.5f);
                    centerEyePose.Orientation = Quaternion.Lerp(leftEyePose.Orientation, rightEyePose.Orientation, 0.5f);
                    centerEyeProperties.Pose  = centerEyePose;
                }
            }

            this.Render();

            this.deviceResources.Present(ref holographicFrame);
        }
示例#24
0
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

#if DRAW_SAMPLE_CONTENT
            if (stationaryReferenceFrame != null)
            {
                // Check for new input state since the last frame.
                for (int i = 0; i < gamepads.Count; ++i)
                {
                    bool buttonDownThisUpdate = (gamepads[i].gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A;
                    if (buttonDownThisUpdate && !gamepads[i].buttonAWasPressedLastFrame)
                    {
                        pointerPressed = true;
                    }
                    gamepads[i].buttonAWasPressedLastFrame = buttonDownThisUpdate;
                }

                SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
                SpatialPointerPose            pose         = null;
                if (null != pointerState)
                {
                    pose = pointerState.TryGetPointerPose(stationaryReferenceFrame.CoordinateSystem);
                }
                else if (pointerPressed)
                {
                    pose = SpatialPointerPose.TryGetAtTimestamp(stationaryReferenceFrame.CoordinateSystem, prediction.Timestamp);
                }
                pointerPressed = false;

                // When a Pressed gesture is detected, the sample hologram will be repositioned
                // two meters in front of the user.
                spinningCubeRenderer.PositionHologram(pose);
            }
#endif

            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //

#if DRAW_SAMPLE_CONTENT
                spinningCubeRenderer.Update(timer);
#endif
            });

            if (!canCommitDirect3D11DepthBuffer)
            {
                // On versions of the platform that do not support the CommitDirect3D11DepthBuffer API, we can control
                // image stabilization by setting a focus point with optional plane normal and velocity.
                foreach (var cameraPose in prediction.CameraPoses)
                {
#if DRAW_SAMPLE_CONTENT
                    // The HolographicCameraRenderingParameters class provides access to set
                    // the image stabilization parameters.
                    HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                    // SetFocusPoint informs the system about a specific point in your scene to
                    // prioritize for image stabilization. The focus point is set independently
                    // for each holographic camera. When setting the focus point, put it on or
                    // near content that the user is looking at.
                    // In this example, we put the focus point at the center of the sample hologram.
                    // You can also set the relative velocity and facing of the stabilization
                    // plane using overloads of this method.
                    if (stationaryReferenceFrame != null)
                    {
                        renderingParameters.SetFocusPoint(
                            stationaryReferenceFrame.CoordinateSystem,
                            spinningCubeRenderer.Position
                            );
                    }
#endif
                }
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
示例#25
0
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem;

#if DRAW_SAMPLE_CONTENT
            // Check for new input state since the last frame.
            foreach (var gamepad in gamepads)
            {
                pointerPressed |= ((gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A);
            }

            SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
            SpatialPointerPose            pose         = null;
            if (null != pointerState)
            {
                pose = pointerState.TryGetPointerPose(currentCoordinateSystem);
            }
            else if (pointerPressed)
            {
                pose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp);
            }
            pointerPressed = false;

            // When a Pressed gesture is detected, the sample hologram will be repositioned
            // two meters in front of the user.
            spinningCubeRenderer.PositionHologram(pose);
#endif

            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //

#if DRAW_SAMPLE_CONTENT
                spinningCubeRenderer.Update(timer);
#endif
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
#if DRAW_SAMPLE_CONTENT
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                renderingParameters.SetFocusPoint(
                    currentCoordinateSystem,
                    spinningCubeRenderer.Position
                    );
#endif
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
示例#26
0
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem;

            spatialInputHandler.setCoordinateSystem(currentCoordinateSystem);

            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //
                SpatialPointerPose playerPosition = Windows.UI.Input.Spatial.SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp);
                spritesRenderer.Update(timer, playerPosition);
                ClockworkSocket.processInput("move", playerPosition);
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                //renderingParameters.SetFocusPoint(
                //    currentCoordinateSystem,
                //    spritesRenderer.Position
                //    );
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = attachreferenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp);//referenceFrame.CoordinateSystem;


#if DRAW_SAMPLE_CONTENT
            // Check for new input state since the last frame.
            SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
            if (null != pointerState)
            {
                // When a Pressed gesture is detected, the sample hologram will be repositioned
                // two meters in front of the user.
                spinningCubeRenderer.PositionHologram(
                    //  pointerState.TryGetPointerPose(currentCoordinateSystem)
                    SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp)
                    );
            }

            //var downstate = Windows.UI.Core.CoreVirtualKeyStates.Down;
            //bool rightclick = (Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.Escape) & downstate) == downstate;
            //System.Diagnostics.Debug.WriteLine("Windows.System.VirtualKey.Escape  " + Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.Escape).ToString() + " downstate" + downstate);
            //System.Diagnostics.Debug.WriteLine("Windows.System.VirtualKey.A  " + Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.A).ToString() + " downstate" + downstate);
            //if (rightclick)
            //{
            //    Windows.UI.ViewManagement.ApplicationViewSwitcher.SwitchAsync(VideoGallery.mainId, VideoGallery.appId, Windows.UI.ViewManagement.ApplicationViewSwitchingOptions.ConsolidateViews);
            //}
#endif



            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //

#if DRAW_SAMPLE_CONTENT
                spinningCubeRenderer.Update(timer);
#endif
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
#if DRAW_SAMPLE_CONTENT
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                Vector3 position = new Vector3(0.0f, 0.0f, -3.0f);
                renderingParameters.SetFocusPoint(
                    currentCoordinateSystem, position
                    /*spinningCubeRenderer.Position*/
                    );
#endif
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
示例#28
0
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update(HolographicFrame previousFrame)
        {
            // TODO: Put CPU work that does not depend on the HolographicCameraPose here.

            // Apps should wait for the optimal time to begin pose-dependent work.
            // The platform will automatically adjust the wakeup time to get
            // the lowest possible latency at high frame rates. For manual
            // control over latency, use the WaitForNextFrameReadyWithHeadStart
            // API.
            // WaitForNextFrameReady and WaitForNextFrameReadyWithHeadStart are the
            // preferred frame synchronization APIs for Windows Mixed Reality. When
            // running on older versions of the OS that do not include support for
            // these APIs, your app can use the WaitForFrameToFinish API for similar
            // (but not as optimal) behavior.
            if (canUseWaitForNextFrameReadyAPI)
            {
                try
                {
                    holographicSpace.WaitForNextFrameReady();
                }
                catch (NotImplementedException)
                {
                    // Catch a specific case where WaitForNextFrameReady() is present but not implemented
                    // and default back to WaitForFrameToFinish() in that case.
                    canUseWaitForNextFrameReadyAPI = false;
                }
            }
            else if (previousFrame != null)
            {
                previousFrame.WaitForFrameToFinish();
            }

            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

#if DRAW_SAMPLE_CONTENT
            if (stationaryReferenceFrame != null)
            {
                // Check for new input state since the last frame.
                for (int i = 0; i < gamepads.Count; ++i)
                {
                    bool buttonDownThisUpdate = (gamepads[i].gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A;
                    if (buttonDownThisUpdate && !gamepads[i].buttonAWasPressedLastFrame)
                    {
                        pointerPressed = true;
                    }
                    gamepads[i].buttonAWasPressedLastFrame = buttonDownThisUpdate;
                }

                SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
                SpatialPointerPose            pose         = null;
                if (null != pointerState)
                {
                    pose = pointerState.TryGetPointerPose(stationaryReferenceFrame.CoordinateSystem);
                }
                else if (pointerPressed)
                {
                    pose = SpatialPointerPose.TryGetAtTimestamp(stationaryReferenceFrame.CoordinateSystem, prediction.Timestamp);
                }
                pointerPressed = false;

                // When a Pressed gesture is detected, the sample hologram will be repositioned
                // two meters in front of the user.
                quadRendererR.PositionHologram(pose);
                quadRendererL.PositionHologram(pose);
            }
#endif

            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //

#if DRAW_SAMPLE_CONTENT
                quadRendererR.Update(timer);
                quadRendererL.Update(timer);
#endif
            });

            // On HoloLens 2, the platform can achieve better image stabilization results if it has
            // a stabilization plane and a depth buffer.
            // Note that the SetFocusPoint API includes an override which takes velocity as a
            // parameter. This is recommended for stabilizing holograms in motion.
            foreach (var cameraPose in prediction.CameraPoses)
            {
#if DRAW_SAMPLE_CONTENT
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera. When setting the focus point, put it on or
                // near content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram.
                // You can also set the relative velocity and facing of the stabilization
                // plane using overloads of this method.
                if (stationaryReferenceFrame != null)
                {
                    renderingParameters.SetFocusPoint(
                        stationaryReferenceFrame.CoordinateSystem,
                        new System.Numerics.Vector3(0, 0, 0)
                        );
                }
#endif
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
 public IList <Node> GetClosestNodes(SpatialCoordinateSystem referenceFrameCoordinateSystem, SpatialPointerPose pose, float nodeRadius = 0.5f)
 {
     return(nodes.OrderBy(node =>
     {
         return node.TryGetDistance(referenceFrameCoordinateSystem, pose.Head.Position);
     }).Where(node =>
     {
         return node.TryGetDistance(referenceFrameCoordinateSystem, pose.Head.Position) <= nodeRadius;
     }).ToList());
 }
示例#30
0
        /// <summary>
        /// Function which checks for new eye tracking data and is called periodically by the timer
        /// </summary>
        /// <param name="source"></param>
        /// <param name="e"></param>
        private void CheckForEyeData(object source, ElapsedEventArgs e)
        {
            // Make sure the previous event isn't still running
            if (System.Threading.Interlocked.CompareExchange(ref fetchDataTimerIsBusy, 1, 0) == 1)
            {
                //Debug.LogError("Previous event still running!");
                return;
            }

            try {
#if (UNITY_WSA && DOTNETWINRT_PRESENT) || WINDOWS_UWP
                // Make sure we have the spatial coordinate system (which is cached every update) and the eyes API is available
                if (currentSpatialCoordinateSystem == null || !EyesApiAvailable)
                {
                    //Debug.Log("[UWPDataAccess] No currentSpatialCoordinateSystem or Eyes API not available!");
                    return;
                }

                // Try to get the new pointer data (which includes eye tracking)
                SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(currentSpatialCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
                if (pointerPose != null)
                {
                    // Check if we actually got any eye tracking data
                    var eyes = pointerPose.Eyes;
                    if (eyes != null)
                    {
                        // Unix time stamp from when the eye tracking data we got was acquired
                        long targetTimeUnix = eyes.UpdateTimestamp.TargetTime.ToUnixTimeMilliseconds();

                        // Check if we have new data
                        if (lastEyeDataTimestamp != targetTimeUnix)
                        {
                            // Save new time stamp
                            lastEyeDataTimestamp = targetTimeUnix;

                            // Save the information whether the calibration is valid
                            IsGazeCalibrationValid = eyes.IsCalibrationValid;

                            // If we have eye tracking data announce it in the event, otherwise simply announce Vector3.zero as origin and direction
                            if (eyes.Gaze.HasValue)
                            {
                                dataQueue.Enqueue(new GazeAPIData()
                                {
                                    EyeDataTimestamp         = targetTimeUnix,
                                    EyeDataRelativeTimestamp = eyes.UpdateTimestamp.SystemRelativeTargetTime.TotalMilliseconds,
                                    IsCalibrationValid       = eyes.IsCalibrationValid,
                                    GazeHasValue             = eyes.Gaze.HasValue,
                                    GazeOrigin    = eyes.Gaze.Value.Origin.ToUnityVector3(),
                                    GazeDirection = eyes.Gaze.Value.Direction.ToUnityVector3()
                                });
                            }
                            else
                            {
                                dataQueue.Enqueue(new GazeAPIData()
                                {
                                    EyeDataTimestamp         = targetTimeUnix,
                                    EyeDataRelativeTimestamp = eyes.UpdateTimestamp.SystemRelativeTargetTime.TotalMilliseconds,
                                    IsCalibrationValid       = eyes.IsCalibrationValid,
                                    GazeHasValue             = eyes.Gaze.HasValue,
                                    GazeOrigin    = Vector3.zero,
                                    GazeDirection = Vector3.zero
                                });
                            }
                        }
                    }
                }
#else
                // On all platforms which are not UWP print error
                Debug.Log("[UWPDataAccess] Not on correct platform! Doing nothing!");
#endif
            }
            finally
            {
                fetchDataTimerIsBusy = 0;
            }
        }