예제 #1
0
    /// <summary>
    /// Modified from https://github.com/VulcanTechnologies/HoloLensCameraStream/blob/master/HoloLensCameraStream/Plugin%20Project/VideoCaptureSample.cs
    /// Customized for HoloLens 2. Compared with HoloLens 1, HoloLens 2 does not (I didn't find) have the
    /// access to the viewTransformGuid. Therefore, the extrinsics of the camera is treated as identity.
    /// Note that the API to get the cameraCoordinateSystem is also different from HoloLens 1.
    /// </summary>
    /// <param name="matrix">The transform matrix used to convert between coordinate spaces.
    /// The matrix will have to be converted to a Unity matrix before it can be used by methods in the UnityEngine namespace.
    /// See https://forum.unity3d.com/threads/locatable-camera-in-unity.398803/ for details.</param>
    public bool HL2TryGetCameraToWorldMatrix(MediaFrameReference frameReference, out float[] outMatrix)
    {
        if (worldOrigin == null)
        {
            outMatrix = GetIdentityMatrixFloatArray();
            return(false);
        }

        SpatialCoordinateSystem cameraCoordinateSystem = frameReference.CoordinateSystem;

        if (cameraCoordinateSystem == null)
        {
            outMatrix = GetIdentityMatrixFloatArray();
            return(false);
        }

        System.Numerics.Matrix4x4?cameraCoordsToUnityCoordsMatrix = cameraCoordinateSystem.TryGetTransformTo(worldOrigin);
        if (cameraCoordsToUnityCoordsMatrix == null)
        {
            outMatrix = GetIdentityMatrixFloatArray();
            return(false);
        }

        System.Numerics.Matrix4x4 cameraCoordsToUnityCoords = System.Numerics.Matrix4x4.Transpose(cameraCoordsToUnityCoordsMatrix.Value);

        // Change from right handed coordinate system to left handed UnityEngine
        cameraCoordsToUnityCoords.M31 *= -1f;
        cameraCoordsToUnityCoords.M32 *= -1f;
        cameraCoordsToUnityCoords.M33 *= -1f;
        cameraCoordsToUnityCoords.M34 *= -1f;

        outMatrix = ConvertMatrixToFloatArray(cameraCoordsToUnityCoords);

        return(true);
    }
예제 #2
0
        private System.Numerics.Matrix4x4 GetSceneToWorldTransform()
        {
            var result = System.Numerics.Matrix4x4.Identity;

            if (Application.isEditor && !isRemoting)
            {
                return(result);
            }

            SpatialCoordinateSystem sceneOrigin = SpatialGraphInteropPreview.CreateCoordinateSystemForNode(sceneOriginId);

            var nativePtr = UnityEngine.XR.WSA.WorldManager.GetNativeISpatialCoordinateSystemPtr();
            SpatialCoordinateSystem worldOrigin = SpatialCoordinateSystem.FromNativePtr(nativePtr);

            var sceneToWorld = sceneOrigin.TryGetTransformTo(worldOrigin);

            if (sceneToWorld.HasValue)
            {
                result = sceneToWorld.Value; // numerics
            }
            else
            {
                Debug.LogError("Getting coordinate system failed!");
            }

            return(result);
        }
예제 #3
0
        /// <summary>
        /// Takes in a spatial node id and returns the transformation matrix that specifies the transformation from the spatial node to the Unity world.
        /// </summary>
        /// <param name="nodeId">Id of the spatial node.</param>
        /// <param name="runOnDevice">True if the application is running on a hololens device</param>
        /// <returns>Transformation matrix from the spatial node to the Unity world.</returns>
        public static System.Numerics.Matrix4x4?GetSceneToUnityTransform(Guid nodeId, bool runOnDevice)
        {
            System.Numerics.Matrix4x4?sceneToUnityTransform = System.Numerics.Matrix4x4.Identity;

#if WINDOWS_UWP
            // Only get the spatial coordinate if we are running on device
            if (runOnDevice)
            {
                Logger.Log("TransformUtils.GetSceneToUnityTransform: About to create a coordinate system for node id: " + nodeId);
                SpatialCoordinateSystem sceneSpatialCoordinateSystem = Windows.Perception.Spatial.Preview.SpatialGraphInteropPreview.CreateCoordinateSystemForNode(nodeId);
                SpatialCoordinateSystem unitySpatialCoordinateSystem = (SpatialCoordinateSystem)System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(
                    UnityEngine.XR.WSA.WorldManager.GetNativeISpatialCoordinateSystemPtr());

                sceneToUnityTransform = sceneSpatialCoordinateSystem.TryGetTransformTo(unitySpatialCoordinateSystem);

                if (sceneToUnityTransform != null)
                {
                    sceneToUnityTransform = TransformUtils.ConvertRightHandedMatrix4x4ToLeftHanded(sceneToUnityTransform.Value);
                }
                else
                {
                    Logger.LogWarning("TransformUtils.GetSceneToUnityTransform: Scene to Unity transform is null. Not good.");
                }
            }
#endif
            return(sceneToUnityTransform);
        }
예제 #4
0
 internal void UpdateFromColor(SpatialCoordinateSystem colorSystem)
 {
     _colorSystem = colorSystem;
     if (_depthSystem != null)
     {
         _depthToColor = _depthSystem.TryGetTransformTo(_colorSystem);
         _colorToDepth = _colorSystem.TryGetTransformTo(_depthSystem);
     }
 }
        /// <summary>
        /// This returns the transform matrix at the time the photo was captured, if location data if available.
        /// If it's not, that is probably an indication that the HoloLens is not tracking and its location is not known.
        /// It could also mean the VideoCapture stream is not running.
        /// If location data is unavailable then the camera to world matrix will be set to the identity matrix.
        /// </summary>
        /// <param name="matrix">The transform matrix used to convert between coordinate spaces.
        /// The matrix will have to be converted to a Unity matrix before it can be used by methods in the UnityEngine namespace.
        /// See https://forum.unity3d.com/threads/locatable-camera-in-unity.398803/ for details.</param>
        public bool TryGetCameraToWorldMatrix(out float[] outMatrix)
        {
            if (frameReference.Properties.ContainsKey(viewTransformGuid) == false)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            if (worldOrigin == null)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            Matrix4x4 cameraViewTransform = ConvertByteArrayToMatrix4x4(frameReference.Properties[viewTransformGuid] as byte[]);

            if (cameraViewTransform == null)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            SpatialCoordinateSystem cameraCoordinateSystem = frameReference.Properties[cameraCoordinateSystemGuid] as SpatialCoordinateSystem;

            if (cameraCoordinateSystem == null)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            Matrix4x4?cameraCoordsToUnityCoordsMatrix = cameraCoordinateSystem.TryGetTransformTo(worldOrigin);

            if (cameraCoordsToUnityCoordsMatrix == null)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            // Transpose the matrices to obtain a proper transform matrix
            cameraViewTransform = Matrix4x4.Transpose(cameraViewTransform);
            Matrix4x4 cameraCoordsToUnityCoords = Matrix4x4.Transpose(cameraCoordsToUnityCoordsMatrix.Value);

            Matrix4x4 viewToWorldInCameraCoordsMatrix;

            Matrix4x4.Invert(cameraViewTransform, out viewToWorldInCameraCoordsMatrix);
            Matrix4x4 viewToWorldInUnityCoordsMatrix = Matrix4x4.Multiply(cameraCoordsToUnityCoords, viewToWorldInCameraCoordsMatrix);

            // Change from right handed coordinate system to left handed UnityEngine
            viewToWorldInUnityCoordsMatrix.M31 *= -1f;
            viewToWorldInUnityCoordsMatrix.M32 *= -1f;
            viewToWorldInUnityCoordsMatrix.M33 *= -1f;
            viewToWorldInUnityCoordsMatrix.M34 *= -1f;

            outMatrix = ConvertMatrixToFloatArray(viewToWorldInUnityCoordsMatrix);

            return(true);
        }
        /// <summary>
        /// This returns the transform matrix at the time the photo was captured, if location data if available.
        /// If it's not, that is probably an indication that the HoloLens is not tracking and its location is not known.
        /// It could also mean the VideoCapture stream is not running.
        /// If location data is unavailable then the camera to world matrix will be set to the identity matrix.
        /// </summary>
        /// <param name="matrix">The transform matrix used to convert between coordinate spaces.
        /// The matrix will have to be converted to a Unity matrix before it can be used by methods in the UnityEngine namespace.
        /// See https://forum.unity3d.com/threads/locatable-camera-in-unity.398803/ for details.</param>
        public bool TryGetCameraToWorldMatrix(out float[] outMatrix)
        {
            if (frameReference.Properties.ContainsKey(viewTransformGuid) == false)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            if (worldOrigin == null)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            Matrix4x4 cameraViewTransform = ConvertByteArrayToMatrix4x4(frameReference.Properties[viewTransformGuid] as byte[]);

            if (cameraViewTransform == null)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            SpatialCoordinateSystem cameraCoordinateSystem = frameReference.Properties[cameraCoordinateSystemGuid] as SpatialCoordinateSystem;

            if (cameraCoordinateSystem == null)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            Matrix4x4?cameraCoordsToUnityCoordsMatrix = cameraCoordinateSystem.TryGetTransformTo(worldOrigin);

            if (cameraCoordsToUnityCoordsMatrix == null)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            Matrix4x4 worldToViewInCameraCoordsMatrix;

            Matrix4x4.Invert(cameraViewTransform, out worldToViewInCameraCoordsMatrix);
            Matrix4x4 worldToViewInUnityCoordsMatrix  = Matrix4x4.Multiply(cameraCoordsToUnityCoordsMatrix.Value, worldToViewInCameraCoordsMatrix);
            Matrix4x4 viewToWorldInCameraCoordsMatrix = Matrix4x4.Transpose(worldToViewInUnityCoordsMatrix);

            viewToWorldInCameraCoordsMatrix.M31 *= -1f;
            viewToWorldInCameraCoordsMatrix.M32 *= -1f;
            viewToWorldInCameraCoordsMatrix.M33 *= -1f;
            viewToWorldInCameraCoordsMatrix.M34 *= -1f;

            outMatrix = ConvertMatrixToFloatArray(viewToWorldInCameraCoordsMatrix);
            return(true);
        }
예제 #7
0
        /// <summary>
        /// Attempt to retrieve the current transform matrix.
        /// </summary>
        /// <returns>Non-null matrix on success.</returns>
        private System.Numerics.Matrix4x4? GetNewMatrix()
        {
            Debug.Assert(rootCoordinateSystem != null);

            // Get the relative transform from the unity origin
            System.Numerics.Matrix4x4?newMatrix = coordinateSystem.TryGetTransformTo(rootCoordinateSystem);

            SimpleConsole.AddLine(trace, $"Got new matrix {(newMatrix == null ? "null" : newMatrix.ToString())}");
            if (newMatrix == null)
            {
                SimpleConsole.AddLine(log, "Coord: Got null newMatrix");
            }
            return(newMatrix);
        }
예제 #8
0
 public Matrix4x4 GetWorldToCameraMatrix(SpatialCoordinateSystem originCoordinateSystem)
 {
     lock (TransformLock)
     {
         Forward = new Vector4(-Vector3.UnitZ, 0.0f);
         if (CoordinateSystem == null)
         {
             return(Matrix4x4.Identity);
         }
         var transform = originCoordinateSystem.TryGetTransformTo(CoordinateSystem) ?? Matrix4x4.Identity;
         Matrix4x4.Invert(transform * ViewMatrix, out var inverseMatrix);
         Forward = Vector4.Transform(Forward, inverseMatrix);
         return(transform * ViewMatrix * ProjectionMatrix);
     }
 }
예제 #9
0
    // Get transform matrices from the MediaFrameReference
    public static Tuple <Matrix4x4, Matrix4x4, Matrix4x4> GetTransforms(MediaFrameReference colorFrameRef, SpatialCoordinateSystem unityWorldCoordinateSystem)
    {
        SpatialCoordinateSystem spatialCoordinateSystem = null;
        Matrix4x4 projectionTransform = Matrix4x4.Identity; // intrinsics; does not change
        Matrix4x4 viewTransform       = Matrix4x4.Identity; // extrinsics; changes per frame

        // TODO: Unity has CameraToWorldMatrix provided by PhotoCaptureFrame class... Cam space -> world space
        // also has worldToCameraMatrix, can it replace cameraCoordinateSystem transforms?
        // UnityEngine.Matrix4x4 camToWorld = UnityEngine.Camera.main.cameraToWorldMatrix;

        object value;

        if (colorFrameRef.Properties.TryGetValue(MFSampleExtension_Spatial_CameraCoordinateSystem, out value))
        {
            spatialCoordinateSystem = value as SpatialCoordinateSystem;
        }
        if (colorFrameRef.Properties.TryGetValue(MFSampleExtension_Spatial_CameraProjectionTransform, out value))
        {
            projectionTransform = ByteArrayToMatrix(value as byte[]);
        }
        if (colorFrameRef.Properties.TryGetValue(MFSampleExtension_Spatial_CameraViewTransform, out value))
        {
            viewTransform = ByteArrayToMatrix(value as byte[]);
        }

        // Transform: Camera Coord System -> Unity world coord
        // See https://github.com/Microsoft/MixedRealityToolkit-Unity/blob/96cc9ab8998280edcd6871f41e89584030ee4f26/Assets/HoloToolkit-Preview/QRTracker/Scripts/SpatialGraphCoordinateSystem.cs#L94
        var cameraRGBToWorldTransform = spatialCoordinateSystem.TryGetTransformTo(unityWorldCoordinateSystem);

        if (cameraRGBToWorldTransform == null)
        {
            return(null);
        }
        Matrix4x4 frameToOrigin = cameraRGBToWorldTransform.Value;

        return(Tuple.Create(frameToOrigin, projectionTransform, viewTransform));
    }
        private CameraExtrinsics GetExtrinsics(SpatialCoordinateSystem frameCoordinateSystem)
        {
            if (frameCoordinateSystem == null)
            {
                return(null);
            }

            CameraExtrinsics extrinsics = null;

            if (rootCoordinateSystem == null)
            {
                return(null);
            }

            System.Numerics.Matrix4x4?worldMatrix = frameCoordinateSystem.TryGetTransformTo(rootCoordinateSystem);

            if (worldMatrix.HasValue)
            {
                WindowsVector3    position;
                WindowsVector3    scale;
                WindowsQuaternion rotation;
                WindowsMatrix4x4.Decompose(worldMatrix.Value, out scale, out rotation, out position);

                WindowsVector3 forward = WindowsVector3.Transform(-WindowsVector3.UnitZ, rotation);
                WindowsVector3 up      = WindowsVector3.Transform(WindowsVector3.UnitY, rotation);

                Matrix4x4 unityWorldMatrix = Matrix4x4.TRS(WindowsVectorToUnityVector(position), Quaternion.LookRotation(WindowsVectorToUnityVector(forward), WindowsVectorToUnityVector(up)), Vector3.one);

                extrinsics = new CameraExtrinsics()
                {
                    ViewFromWorld = unityWorldMatrix
                };
            }

            return(extrinsics);
        }
        void ProcessFrame(SpatialCoordinateSystem worldCoordinateSystem)
        {
            if (!IsInValidateStateToProcessFrame())
            {
                return;
            }

            // obtain the details of the last frame captured 
            FrameGrabber.Frame frame = frameGrabber.LastFrame;

            if (frame.mediaFrameReference == null)
            {
                return;
            }

            MediaFrameReference mediaFrameReference = frame.mediaFrameReference;

            SpatialCoordinateSystem cameraCoordinateSystem = mediaFrameReference.CoordinateSystem;
            CameraIntrinsics cameraIntrinsics = mediaFrameReference.VideoMediaFrame.CameraIntrinsics;

            Matrix4x4? cameraToWorld = cameraCoordinateSystem.TryGetTransformTo(worldCoordinateSystem);

            if (!cameraToWorld.HasValue)
            {
                return;
            }

            // padding 
            float averageFaceWidthInMeters = 0.15f;

            float pixelsPerMeterAlongX = cameraIntrinsics.FocalLength.X;
            float averagePixelsForFaceAt1Meter = pixelsPerMeterAlongX * averageFaceWidthInMeters;

            // Place the label 25cm above the center of the face.
            Vector3 labelOffsetInWorldSpace = new Vector3(0.0f, 0.25f, 0.0f);            

            frameAnalyzer.AnalyzeFrame(frame.mediaFrameReference, (status, detectedPersons) =>
            {
                if(status > 0 && detectedPersons.Count > 0)
                {
                    FrameAnalyzer.Bounds? bestRect = null;
                    Vector3 bestRectPositionInCameraSpace = Vector3.Zero;
                    float bestDotProduct = -1.0f;
                    FrameAnalyzer.DetectedPerson bestPerson = null; 

                    foreach (var dp in detectedPersons)
                    {
                        Debug.WriteLine($"Detected person: {dp.ToString()}");

                        Point faceRectCenterPoint = new Point(
                            dp.bounds.left + dp.bounds.width /2, 
                            dp.bounds.top + dp.bounds.height / 2
                            );

                        // Calculate the vector towards the face at 1 meter.
                        Vector2 centerOfFace = cameraIntrinsics.UnprojectAtUnitDepth(faceRectCenterPoint);

                        // Add the Z component and normalize.
                        Vector3 vectorTowardsFace = Vector3.Normalize(new Vector3(centerOfFace.X, centerOfFace.Y, -1.0f));

                        // Get the dot product between the vector towards the face and the gaze vector.
                        // The closer the dot product is to 1.0, the closer the face is to the middle of the video image.
                        float dotFaceWithGaze = Vector3.Dot(vectorTowardsFace, -Vector3.UnitZ);                        

                        // Pick the faceRect that best matches the users gaze.
                        if (dotFaceWithGaze > bestDotProduct)
                        {
                            // Estimate depth using the ratio of the current faceRect width with the average faceRect width at 1 meter.
                            float estimatedFaceDepth = averagePixelsForFaceAt1Meter / (float)dp.bounds.width;

                            // Scale the vector towards the face by the depth, and add an offset for the label.
                            Vector3 targetPositionInCameraSpace = vectorTowardsFace * estimatedFaceDepth;

                            bestDotProduct = dotFaceWithGaze;
                            bestRect = dp.bounds;
                            bestRectPositionInCameraSpace = targetPositionInCameraSpace;
                            bestPerson = dp; 
                        }                         
                    }

                    if (bestRect.HasValue)
                    {
                        // Transform the cube from Camera space to World space.
                        Vector3 bestRectPositionInWorldspace = Vector3.Transform(bestRectPositionInCameraSpace, cameraToWorld.Value);
                        Vector3 labelPosition = bestRectPositionInWorldspace + labelOffsetInWorldSpace;                          

                        quadRenderer.TargetPosition = labelPosition;
                        textRenderer.RenderTextOffscreen($"{bestPerson.name}, {bestPerson.gender}, Age: {bestPerson.age}");

                        lastFaceDetectedTimestamp = Utils.GetCurrentUnixTimestampMillis();
                    }               
                }
            }); 
        }
예제 #12
0
    public async Task EvaluateVideoFrameAsync(VideoFrame frame, VideoMediaFrame VideoFrame, SpatialCoordinateSystem worldCoordinateSystem, SpatialCoordinateSystem cameraCoordinateSystem) // <-- 2
    {
        if (frame != null)
        {
            try
            {
                TimeRecorder.Restart();

                // A matrix to transform camera coordinate system to world coordinate system
                Matrix4x4 cameraToWorld = (Matrix4x4)cameraCoordinateSystem.TryGetTransformTo(worldCoordinateSystem);

                // Internal orientation of camera
                CameraIntrinsics cameraIntrinsics = VideoFrame.CameraIntrinsics;

                // The frame of depth camera
                DepthMediaFrame depthFrame = VideoFrame.DepthMediaFrame;

                // not working, cause error
                // DepthCorrelatedCoordinateMapper depthFrameMapper = depthFrame.TryCreateCoordinateMapper(cameraIntrinsics, cameraCoordinateSystem);

                ONNXModelInput inputData = new ONNXModelInput();
                inputData.Data = frame;
                var output = await Model.EvaluateAsync(inputData).ConfigureAwait(false); // <-- 3

                TimeRecorder.Stop();

                string timeStamp = $"({DateTime.Now})";
                // $" Evaluation took {TimeRecorder.ElapsedMilliseconds}ms\n";

                int count = 0;

                foreach (var prediction in output)
                {
                    var product = prediction.TagName;     // <-- 4
                    var loss    = prediction.Probability; // <-- 5

                    if (loss > 0.5f)
                    {
                        float left   = prediction.BoundingBox.Left;
                        float top    = prediction.BoundingBox.Top;
                        float right  = prediction.BoundingBox.Left + prediction.BoundingBox.Width;
                        float bottom = prediction.BoundingBox.Top + prediction.BoundingBox.Height;
                        float x      = prediction.BoundingBox.Left + prediction.BoundingBox.Width / 2;
                        float y      = prediction.BoundingBox.Top + prediction.BoundingBox.Height / 2;

                        Direct3DSurfaceDescription pixelData = frame.Direct3DSurface.Description;
                        int height = pixelData.Height;
                        int width  = pixelData.Width;

                        Vector3 ImageToWorld(float X, float Y)
                        {
                            // remove image distortion
                            // Point objectCenterPoint = cameraIntrinsics.UndistortPoint(new Point(x, y));
                            // screen space -> camera space
                            // unproject pixel coordinate of object center towards a plane that is one meter from the camera
                            Vector2 objectCenter = cameraIntrinsics.UnprojectAtUnitDepth(new Point(X * width, Y * height));

                            // construct a ray towards object
                            Vector3 vectorTowardsObject = Vector3.Normalize(new Vector3(objectCenter.X, objectCenter.Y, -1.0f));

                            // estimate the vending machine distance by its width
                            // less accurate than use depth frame
                            // magic number 940 pixels in width for an average vending machine at 2m
                            // float estimatedVendingMachineDepth = (0.94f / prediction.BoundingBox.Width) * 2;
                            float estimatedVendingMachineDepth = (0.3f / prediction.BoundingBox.Width) * 1;

                            // times the vector towards object by the distance to get object's vector in camera coordinate system
                            Vector3 vectorToObject = vectorTowardsObject * estimatedVendingMachineDepth;

                            // camera space -> world space
                            // tranform the object postion from camera coordinate system to world coordinate system
                            Vector3 targetPositionInWorldSpace = Vector3.Transform(vectorToObject, cameraToWorld);

                            return(targetPositionInWorldSpace);
                        }


                        Vector3 objectCenterInWorld = ImageToWorld(x, y);
                        Vector3 objectTopLeft       = ImageToWorld(left, top);
                        Vector3 objectTopRight      = ImageToWorld(right, top);
                        Vector3 objectBotLeft       = ImageToWorld(left, bottom);
                        float   widthInWorld        = Vector3.Distance(objectTopLeft, objectTopRight);
                        float   heightInWorld       = widthInWorld / (width * prediction.BoundingBox.Width) * (height * prediction.BoundingBox.Height);
                        var     lossStr             = (loss * 100.0f).ToString("#0.00") + "%";
                        // lossStr = $"{prediction.BoundingBox.Width*width}X{prediction.BoundingBox.Height*height}";
                        UnityApp.StoreNetworkResult(timeStamp, product, lossStr, objectCenterInWorld.X, objectCenterInWorld.Y, objectCenterInWorld.Z, widthInWorld, heightInWorld);
                    }
                }
            }
            catch (Exception ex)
            {
                var err_message = $"{ex.Message}";
                ModifyText(err_message);
            }
        }
    }
예제 #13
0
    private void ProcessFaces(List <BitmapBounds> faces, MediaFrameReference frame, SpatialCoordinateSystem worldCoordSystem)
    {
        VideoMediaFrameFormat   videoFormat            = frame.VideoMediaFrame.VideoFormat;
        SpatialCoordinateSystem cameraCoordinateSystem = frame.CoordinateSystem;
        CameraIntrinsics        cameraIntrinsics       = frame.VideoMediaFrame.CameraIntrinsics;

        System.Numerics.Matrix4x4?cameraToWorld = cameraCoordinateSystem.TryGetTransformTo(worldCoordSystem);

        // If we can't locate the world, this transform will be null.
        if (!cameraToWorld.HasValue)
        {
            return;
        }

        float textureWidthInv  = 1.0f / videoFormat.Width;
        float textureHeightInv = 1.0f / videoFormat.Height;

        // The face analysis returns very "tight fitting" rectangles.
        // We add some padding to make the visuals more appealing.
        int   paddingForFaceRect       = 24;
        float averageFaceWidthInMeters = 0.15f;

        float pixelsPerMeterAlongX         = cameraIntrinsics.FocalLength.X;
        float averagePixelsForFaceAt1Meter = pixelsPerMeterAlongX * averageFaceWidthInMeters;

        // Place the cube 25cm above the center of the face.
        System.Numerics.Vector3 cubeOffsetInWorldSpace = new System.Numerics.Vector3(0.0f, 0.25f, 0.0f);
        BitmapBounds            bestRect = new BitmapBounds();

        System.Numerics.Vector3 bestRectPositionInCameraSpace = System.Numerics.Vector3.Zero;
        float bestDotProduct = -1.0f;

        foreach (BitmapBounds faceRect in faces)
        {
            Point faceRectCenterPoint = new Point(faceRect.X + faceRect.Width / 2u, faceRect.Y + faceRect.Height / 2u);

            // Calculate the vector towards the face at 1 meter.
            System.Numerics.Vector2 centerOfFace = cameraIntrinsics.UnprojectAtUnitDepth(faceRectCenterPoint);

            // Add the Z component and normalize.
            System.Numerics.Vector3 vectorTowardsFace = System.Numerics.Vector3.Normalize(new System.Numerics.Vector3(centerOfFace.X, centerOfFace.Y, -1.0f));

            // Estimate depth using the ratio of the current faceRect width with the average faceRect width at 1 meter.
            float estimatedFaceDepth = averagePixelsForFaceAt1Meter / faceRect.Width;

            // Get the dot product between the vector towards the face and the gaze vector.
            // The closer the dot product is to 1.0, the closer the face is to the middle of the video image.
            float dotFaceWithGaze = System.Numerics.Vector3.Dot(vectorTowardsFace, -System.Numerics.Vector3.UnitZ);

            // Scale the vector towards the face by the depth, and add an offset for the cube.
            System.Numerics.Vector3 targetPositionInCameraSpace = vectorTowardsFace * estimatedFaceDepth;

            // Pick the faceRect that best matches the users gaze.
            if (dotFaceWithGaze > bestDotProduct)
            {
                bestDotProduct = dotFaceWithGaze;
                bestRect       = faceRect;
                bestRectPositionInCameraSpace = targetPositionInCameraSpace;
            }
        }

        // Transform the cube from Camera space to World space.
        System.Numerics.Vector3 bestRectPositionInWorldspace = System.Numerics.Vector3.Transform(bestRectPositionInCameraSpace, cameraToWorld.Value);

        cubeRenderer.SetTargetPosition(bestRectPositionInWorldspace + cubeOffsetInWorldSpace);

        // Texture Coordinates are [0,1], but our FaceRect is [0,Width] and [0,Height], so we need to normalize these coordinates
        // We also add padding for the faceRects to make it more visually appealing.
        float normalizedWidth  = (bestRect.Width + paddingForFaceRect * 2u) * textureWidthInv;
        float normalizedHeight = (bestRect.Height + paddingForFaceRect * 2u) * textureHeightInv;
        float normalizedX      = (bestRect.X - paddingForFaceRect) * textureWidthInv;
        float normalizedY      = (bestRect.Y - paddingForFaceRect) * textureHeightInv;
    }
        private void UpdateLocation()
        {
            if (CurrentState == UnityEngine.XR.WSA.PositionalLocatorState.Active)
            {
#if WINDOWS_UWP
                if (CoordinateSystem == null)
                {
                    CoordinateSystem = Windows.Perception.Spatial.Preview.SpatialGraphInteropPreview.CreateCoordinateSystemForNode(id);
                }

                if (CoordinateSystem != null)
                {
                    Quaternion rotation    = Quaternion.identity;
                    Vector3    translation = new Vector3(0.0f, 0.0f, 0.0f);

                    SpatialCoordinateSystem rootSpatialCoordinateSystem = (SpatialCoordinateSystem)System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(UnityEngine.XR.WSA.WorldManager.GetNativeISpatialCoordinateSystemPtr());

                    // Get the relative transform from the unity origin
                    System.Numerics.Matrix4x4?relativePose = CoordinateSystem.TryGetTransformTo(rootSpatialCoordinateSystem);

                    if (relativePose != null)
                    {
                        System.Numerics.Vector3    scale;
                        System.Numerics.Quaternion rotation1;
                        System.Numerics.Vector3    translation1;

                        System.Numerics.Matrix4x4 newMatrix = relativePose.Value;

                        // Platform coordinates are all right handed and unity uses left handed matrices. so we convert the matrix
                        // from rhs-rhs to lhs-lhs
                        // Convert from right to left coordinate system
                        newMatrix.M13 = -newMatrix.M13;
                        newMatrix.M23 = -newMatrix.M23;
                        newMatrix.M43 = -newMatrix.M43;

                        newMatrix.M31 = -newMatrix.M31;
                        newMatrix.M32 = -newMatrix.M32;
                        newMatrix.M34 = -newMatrix.M34;

                        System.Numerics.Matrix4x4.Decompose(newMatrix, out scale, out rotation1, out translation1);
                        translation = new Vector3(translation1.X, translation1.Y, translation1.Z);
                        rotation    = new Quaternion(rotation1.X, rotation1.Y, rotation1.Z, rotation1.W);
                        Pose pose = new Pose(translation, rotation);

                        // If there is a parent to the camera that means we are using teleport and we should not apply the teleport
                        // to these objects so apply the inverse
                        if (CameraCache.Main.transform.parent != null)
                        {
                            pose = pose.GetTransformedBy(CameraCache.Main.transform.parent);
                        }

                        gameObject.transform.SetPositionAndRotation(pose.position, pose.rotation);
                        //Debug.Log("Id= " + id + " QRPose = " +  pose.position.ToString("F7") + " QRRot = "  +  pose.rotation.ToString("F7"));
                    }
                }
                else
                {
                    gameObject.SetActive(false);
                }
#endif // WINDOWS_UWP
            }
#endif // UNITY_EDITOR || UNITY_WSA
        }
예제 #15
0
파일: Operators.cs 프로젝트: sandrist/psi
        /// <summary>
        /// Converts a <see cref="SpatialCoordinateSystem"/> in HoloLens basis to a <see cref="CoordinateSystem"/> in \psi basis.
        /// </summary>
        /// <param name="spatialCoordinateSystem">The <see cref="SpatialCoordinateSystem"/>.</param>
        /// <returns>The <see cref="CoordinateSystem"/>.</returns>
        public static CoordinateSystem TryConvertSpatialCoordinateSystemToPsiCoordinateSystem(this SpatialCoordinateSystem spatialCoordinateSystem)
        {
            var worldPose = spatialCoordinateSystem.TryGetTransformTo(MixedReality.WorldSpatialCoordinateSystem);

            return(worldPose.HasValue ? worldPose.Value.RebaseToMathNetCoordinateSystem() : null);
        }
예제 #16
0
        public void OnHandUpdate(CameraParameter cameraParam, SpatialCoordinateSystem CoordinateSystem, IList <Hand> hands)
        {
            lock (this)
            {
                if (m_spatialCoordinateSystem != null)
                {
                    //Start a new frame
                    foreach (HandDetected hand in m_handsDetected)
                    {
                        hand.NewDetection = true;
                    }

                    //For each detected hand
                    foreach (Hand hand in hands)
                    {
                        //Get the needed transformation matrices to convert hand in image space to camera and world space
                        System.Numerics.Matrix4x4?cameraToWorld = CoordinateSystem.TryGetTransformTo(m_spatialCoordinateSystem).Value;
                        System.Numerics.Matrix4x4 viewToCamera;
                        System.Numerics.Matrix4x4.Invert(cameraParam.CameraViewTransform, out viewToCamera);
                        if (cameraToWorld == null)
                        {
                            cameraToWorld = System.Numerics.Matrix4x4.Identity;
                        }

                        //Hand in camera space
                        System.Numerics.Vector4 handVecCamera = System.Numerics.Vector4.Transform(new System.Numerics.Vector4(hand.PalmX, hand.PalmY, hand.PalmZ, 1.0f), viewToCamera);
                        Vector3 unityHandCamera = new Vector3(handVecCamera.X, handVecCamera.Y, handVecCamera.Z) / handVecCamera.W;

                        //Wrist in camera space
                        System.Numerics.Vector4 wristVecCamera = System.Numerics.Vector4.Transform(new System.Numerics.Vector4(hand.WristX, hand.WristY, hand.WristZ, 1.0f), viewToCamera);
                        Vector3 unityWristCamera = new Vector3(wristVecCamera.X, wristVecCamera.Y, wristVecCamera.Z) / wristVecCamera.W;

                        //Add offsets in the ROI
                        float[] roi = new float[4];
                        roi[0] = hand.WristROIMinX - 10;
                        roi[1] = hand.WristROIMinY - 10;
                        roi[2] = hand.WristROIMaxX + 10;
                        roi[3] = hand.WristROIMaxY + 10;

                        //check if we already know it
                        bool         created      = false;
                        HandDetected handDetected = null;
                        foreach (HandDetected hd in m_handsDetected)
                        {
                            if (!hd.IsDetected && hd.HandCollision(roi) && (hd.CameraSpacePosition - unityHandCamera).magnitude <= 0.10) //Test the ROI and the magnitude in the position (no more than 5 cm)
                            {
                                handDetected = hd;
                                break;
                            }
                        }

                        //If not, this is a new hand!
                        if (handDetected == null)
                        {
                            handDetected = new HandDetected();
                            handDetected.NewDetection = true;
                            m_handsDetected.Add(handDetected);
                            created = true;
                        }

                        float smoothness = m_smoothness;
                        if (created == true)
                        {
                            smoothness = 0.0f;
                        }

                        //Smooth the hand
                        Vector3 smoothPosCamera         = unityHandCamera * (1.0f - smoothness) + handDetected.CameraSpacePosition * smoothness; //Smooth the position
                        System.Numerics.Vector4 handVec = System.Numerics.Vector4.Transform(new System.Numerics.Vector4(smoothPosCamera.x, smoothPosCamera.y, smoothPosCamera.z, 1.0f), cameraToWorld.Value);
                        Vector3 unityHandVec            = new Vector3(handVec.X, handVec.Y, -handVec.Z) / handVec.W;

                        //Smooth the wrist
                        Vector3 smoothWristCamera        = unityWristCamera * (1.0f - smoothness) + handDetected.CameraSpaceWristPosition * smoothness; //Smooth the position
                        System.Numerics.Vector4 wristVec = System.Numerics.Vector4.Transform(new System.Numerics.Vector4(smoothWristCamera.x, smoothWristCamera.y, smoothWristCamera.z, 1.0f), cameraToWorld.Value);
                        Vector3 unityWristVec            = new Vector3(wristVec.X, wristVec.Y, -wristVec.Z) / wristVec.W;

                        handDetected.PushPosition(unityHandVec, unityWristVec, smoothPosCamera, smoothWristCamera, roi);

                        //Clear fingers information
                        handDetected.Fingers.Clear();
                        handDetected.UppestFinger = null;

                        FingerDetected formerFinger = handDetected.UppestFinger;

                        if (hand.Fingers.Count > 0)
                        {
                            //Conver each fingers detected
                            foreach (Finger f in hand.Fingers)
                            {
                                //Register the finger position
                                System.Numerics.Vector4 fingerVec = System.Numerics.Vector4.Transform(new System.Numerics.Vector4(f.TipX, f.TipY, f.TipZ, 1.0f), viewToCamera);
                                fingerVec = System.Numerics.Vector4.Transform(fingerVec, cameraToWorld.Value);
                                Vector3 unityFingerVec = new Vector3(fingerVec.X, fingerVec.Y, -fingerVec.Z) / fingerVec.W;
                                handDetected.Fingers.Add(new FingerDetected(unityFingerVec));
                            }

                            //Detect the uppest finger
                            float minFY = hand.Fingers[0].TipY;
                            handDetected.UppestFinger = handDetected.Fingers[0];

                            for (int j = 1; j < handDetected.Fingers.Count; j++)
                            {
                                if (minFY > hand.Fingers[0].TipY)
                                {
                                    minFY = hand.Fingers[0].TipY;
                                    handDetected.UppestFinger = handDetected.Fingers[j];
                                }
                            }
                        }
                    }
                }

                int i = 0;
                while (i < m_handsDetected.Count)
                {
                    HandDetected hd = m_handsDetected[i];
                    //Handle non detected hands
                    if (!hd.IsDetected)
                    {
                        hd.PushUndetection();

                        //Delete the non valid hands
                        if (!hd.IsValid)
                        {
                            m_handsDetected.RemoveAt(i);
                            continue;
                        }
                    }
                    i++;
                }
            }
        }
예제 #17
0
        /// <summary>
        /// Tries to obtain the QRCode location in Unity Space.
        /// The position component of the location matrix will be at the top left of the QRCode
        /// The orientation of the location matrix will reflect the following axii:
        /// x axis: horizontal with the QRCode.
        /// y axis: positive direction down the QRCode.
        /// z axis: positive direction outward from the QRCode.
        /// /// Note: This function should be called from the main thread
        /// </summary>
        /// <param name="coordinateSystem">QRCode SpatialCoordinateSystem</param>
        /// <param name="location">Output location for the QRCode in Unity Space</param>
        /// <returns>returns true if the QRCode was located</returns>
        public bool TryGetLocationForQRCode(SpatialCoordinateSystem coordinateSystem, out Matrix4x4 location)
        {
            location = Matrix4x4.identity;
            if (coordinateSystem != null)
            {
                try
                {
                    var appSpatialCoordinateSystem = (SpatialCoordinateSystem)System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(UnityEngine.XR.WSA.WorldManager.GetNativeISpatialCoordinateSystemPtr());
                    if (appSpatialCoordinateSystem != null)
                    {
                        // Get the relative transform from the unity origin
                        System.Numerics.Matrix4x4?relativePose = coordinateSystem.TryGetTransformTo(appSpatialCoordinateSystem);
                        if (relativePose != null)
                        {
                            System.Numerics.Matrix4x4 newMatrix = relativePose.Value;

                            // Platform coordinates are all right handed and unity uses left handed matrices. so we convert the matrix
                            // from rhs-rhs to lhs-lhs
                            // Convert from right to left coordinate system
                            newMatrix.M13 = -newMatrix.M13;
                            newMatrix.M23 = -newMatrix.M23;
                            newMatrix.M43 = -newMatrix.M43;

                            newMatrix.M31 = -newMatrix.M31;
                            newMatrix.M32 = -newMatrix.M32;
                            newMatrix.M34 = -newMatrix.M34;

                            System.Numerics.Vector3    winrtScale;
                            System.Numerics.Quaternion winrtRotation;
                            System.Numerics.Vector3    winrtTranslation;
                            System.Numerics.Matrix4x4.Decompose(newMatrix, out winrtScale, out winrtRotation, out winrtTranslation);

                            var translation = new Vector3(winrtTranslation.X, winrtTranslation.Y, winrtTranslation.Z);
                            var rotation    = new Quaternion(winrtRotation.X, winrtRotation.Y, winrtRotation.Z, winrtRotation.W);
                            location = Matrix4x4.TRS(translation, rotation, Vector3.one);

                            return(true);
                        }
                        else
                        {
                            Debug.LogWarning("QRCode location unknown or not yet available.");
                            return(false);
                        }
                    }
                    else
                    {
                        Debug.LogWarning("Failed to obtain coordinate system for application");
                        return(false);
                    }
                }
                catch (Exception e)
                {
                    Debug.LogWarning($"Note: TryGetLocationForQRCode needs to be called from main thread: {e}");
                    return(false);
                }
            }
            else
            {
                Debug.LogWarning("Failed to obtain coordinate system for QRCode");
                return(false);
            }
        }
예제 #18
0
        private void UpdateLocation(Guid spatialGraphNodeId, float physicalSideLength)
        {
            // if (CurrentState != PositionalLocatorState.Active)
            // {
            //     PositionAcquisitionFailed?.Invoke(this, null);
            //     return;
            // }

            System.Numerics.Matrix4x4?relativePose = System.Numerics.Matrix4x4.Identity;
#if WINDOWS_UWP
            SpatialCoordinateSystem coordinateSystem = Microsoft.Windows.Perception.Spatial.Preview.SpatialGraphInteropPreview.CreateCoordinateSystemForNode(spatialGraphNodeId);

            if (coordinateSystem == null)
            {
                PositionAcquisitionFailed?.Invoke(this, null);
                return;
            }

            SpatialCoordinateSystem rootSpatialCoordinateSystem = Microsoft.Windows.Perception.Spatial.SpatialCoordinateSystem.FromNativePtr(UnityEngine.XR.WindowsMR.WindowsMREnvironment.OriginSpatialCoordinateSystem);

            // Get the relative transform from the unity origin
            relativePose = coordinateSystem.TryGetTransformTo(rootSpatialCoordinateSystem);
#endif

            if (relativePose == null)
            {
                PositionAcquisitionFailed?.Invoke(this, null);
                return;
            }

            System.Numerics.Matrix4x4 newMatrix = relativePose.Value;

            // Platform coordinates are all right handed and unity uses left handed matrices. so we convert the matrix
            // from rhs-rhs to lhs-lhs
            // Convert from right to left coordinate system
            newMatrix.M13 = -newMatrix.M13;
            newMatrix.M23 = -newMatrix.M23;
            newMatrix.M43 = -newMatrix.M43;

            newMatrix.M31 = -newMatrix.M31;
            newMatrix.M32 = -newMatrix.M32;
            newMatrix.M34 = -newMatrix.M34;

            System.Numerics.Vector3    scale;
            System.Numerics.Quaternion rotation1;
            System.Numerics.Vector3    translation1;

            System.Numerics.Matrix4x4.Decompose(newMatrix, out scale, out rotation1, out translation1);
            var translation = new Vector3(translation1.X, translation1.Y, translation1.Z);
            var rotation    = new Quaternion(rotation1.X, rotation1.Y, rotation1.Z, rotation1.W);
            var pose        = new Pose(translation, rotation);

            // If there is a parent to the camera that means we are using teleport and we should not apply the teleport
            // to these objects so apply the inverse
            if (CameraCache.Main.transform.parent != null)
            {
                pose = pose.GetTransformedBy(CameraCache.Main.transform.parent);
            }
            // Rotate 90 degrees 'forward' over 'right' so 'up' is pointing straight up from the QR code
            pose.rotation *= Quaternion.Euler(90, 0, 0);

            // Move the anchor point to the *center* of the QR code
            var deltaToCenter = physicalSideLength * 0.5f;
            pose.position += (pose.rotation * (deltaToCenter * Vector3.right) -
                              pose.rotation * (deltaToCenter * Vector3.forward));
            gameObject.transform.SetPositionAndRotation(pose.position, pose.rotation);
            PositionAcquired?.Invoke(this, pose);
        }
예제 #19
0
        private bool GetPoseFromSpatialNode(System.Guid nodeId, out Pose pose)
        {
            bool found = false;

            pose = Pose.identity;

#if WINDOWS_UWP
            CoordinateSystem = Windows.Perception.Spatial.Preview.SpatialGraphInteropPreview.CreateCoordinateSystemForNode(nodeId);


            if (CoordinateSystem != null)
            {
                info.text += "\ngot coordinate";
                Quaternion rotation    = Quaternion.identity;
                Vector3    translation = new Vector3(0.0f, 0.0f, 0.0f);

                SpatialCoordinateSystem rootSpatialCoordinateSystem = (SpatialCoordinateSystem)System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(UnityEngine.XR.WSA.WorldManager.GetNativeISpatialCoordinateSystemPtr());

                // Get the relative transform from the unity origin
                System.Numerics.Matrix4x4?relativePose = CoordinateSystem.TryGetTransformTo(rootSpatialCoordinateSystem);

                if (relativePose != null)
                {
                    info.text += "\n got relative pose";
                    System.Numerics.Vector3    scale;
                    System.Numerics.Quaternion rotation1;
                    System.Numerics.Vector3    translation1;

                    System.Numerics.Matrix4x4 newMatrix = relativePose.Value;

                    // Platform coordinates are all right handed and unity uses left handed matrices. so we convert the matrix
                    // from rhs-rhs to lhs-lhs
                    // Convert from right to left coordinate system
                    newMatrix.M13 = -newMatrix.M13;
                    newMatrix.M23 = -newMatrix.M23;
                    newMatrix.M43 = -newMatrix.M43;

                    newMatrix.M31 = -newMatrix.M31;
                    newMatrix.M32 = -newMatrix.M32;
                    newMatrix.M34 = -newMatrix.M34;

                    System.Numerics.Matrix4x4.Decompose(newMatrix, out scale, out rotation1, out translation1);
                    translation = new Vector3(translation1.X, translation1.Y, translation1.Z);
                    rotation    = new Quaternion(rotation1.X, rotation1.Y, rotation1.Z, rotation1.W);
                    pose        = new Pose(translation, rotation);
                    found       = true;


                    // can be used later using gameObject.transform.SetPositionAndRotation(pose.position, pose.rotation);
                    //Debug.Log("Id= " + id + " QRPose = " +  pose.position.ToString("F7") + " QRRot = "  +  pose.rotation.ToString("F7"));
                }
                else
                {
                    info.text += "\nrelative pos NULL";
                }
            }
            else
            {
                info.text += "\ncannot retrieve coordinate";
            }
#endif
            return(found);
        }
예제 #20
0
        public void OnHandUpdate(CameraParameter cameraParam, SpatialCoordinateSystem CoordinateSystem, IList <Hand> hands)
        {
            lock (this)
            {
                if (m_spatialCoordinateSystem != null)
                {
                    //Start a new frame
                    foreach (HandDetected hand in m_handsDetected)
                    {
                        hand.NewDetection = true;
                    }

                    //For each detected hand
                    foreach (Hand hand in hands)
                    {
                        //Add offsets in the ROI
                        float[] roi = new float[4];
                        roi[0] = hand.WristROIMinX - 10;
                        roi[1] = hand.WristROIMinY - 10;
                        roi[2] = hand.WristROIMaxX + 10;
                        roi[3] = hand.WristROIMaxY + 10;

                        //check if we already know it
                        HandDetected handDetected = null;
                        foreach (HandDetected hd in m_handsDetected)
                        {
                            if (!hd.IsDetected && hd.HandCollision(roi))
                            {
                                handDetected = hd;
                                break;
                            }
                        }

                        //If not, this is a new hand!
                        if (handDetected == null)
                        {
                            handDetected = new HandDetected(m_smoothness);
                            handDetected.NewDetection = true;
                            m_handsDetected.Add(handDetected);
                        }

                        //Compute the hand 3D position in the left-handed coordinate system
                        System.Numerics.Matrix4x4?cameraToWorld = CoordinateSystem.TryGetTransformTo(m_spatialCoordinateSystem).Value;
                        System.Numerics.Matrix4x4 viewToCamera;
                        System.Numerics.Matrix4x4.Invert(cameraParam.CameraViewTransform, out viewToCamera);
                        if (cameraToWorld == null)
                        {
                            cameraToWorld = System.Numerics.Matrix4x4.Identity;
                        }

                        System.Numerics.Vector4 handVec = System.Numerics.Vector4.Transform(new System.Numerics.Vector4(hand.PalmX, hand.PalmY, hand.PalmZ, 1.0f), viewToCamera);
                        handVec = System.Numerics.Vector4.Transform(handVec, cameraToWorld.Value);
                        Vector3 unityHandVec = new Vector3(handVec.X, handVec.Y, -handVec.Z) / handVec.W;

                        System.Numerics.Vector4 wristVec = System.Numerics.Vector4.Transform(new System.Numerics.Vector4(hand.WristX, hand.WristY, hand.WristZ, 1.0f), viewToCamera);
                        wristVec = System.Numerics.Vector4.Transform(wristVec, cameraToWorld.Value);
                        Vector3 unityWristVec = new Vector3(wristVec.X, wristVec.Y, -wristVec.Z) / wristVec.W;

                        handDetected.PushPosition(unityHandVec, unityWristVec, roi);

                        //Clear fingers information
                        handDetected.Fingers.Clear();
                        handDetected.UppestFinger = null;

                        FingerDetected formerFinger = handDetected.UppestFinger;

                        if (hand.Fingers.Count > 0)
                        {
                            //Conver each fingers detected
                            foreach (Finger f in hand.Fingers)
                            {
                                //Register the finger position
                                System.Numerics.Vector4 fingerVec = System.Numerics.Vector4.Transform(new System.Numerics.Vector4(f.TipX, f.TipY, f.TipZ, 1.0f), viewToCamera);
                                fingerVec = System.Numerics.Vector4.Transform(fingerVec, cameraToWorld.Value);
                                Vector3 unityFingerVec = new Vector3(fingerVec.X, fingerVec.Y, -fingerVec.Z) / fingerVec.W;
                                handDetected.Fingers.Add(new FingerDetected(unityFingerVec));
                            }

                            //Detect the uppest finger
                            float minFY = hand.Fingers[0].TipY;
                            handDetected.UppestFinger = handDetected.Fingers[0];

                            for (int i = 1; i < handDetected.Fingers.Count; i++)
                            {
                                if (minFY > hand.Fingers[0].TipY)
                                {
                                    minFY = hand.Fingers[0].TipY;
                                    handDetected.UppestFinger = handDetected.Fingers[i];
                                }
                            }

                            //Apply smoothness on this particular finger
                            if (formerFinger != null)
                            {
                                handDetected.UppestFinger.Position = (1.0f - m_smoothness) * handDetected.UppestFinger.Position + m_smoothness * formerFinger.Position;
                            }
                        }
                    }
                }

                for (int i = 0; i < m_handsDetected.Count; i++)
                {
                    HandDetected hd = m_handsDetected[i];
                    //Handle non detected hands
                    if (!hd.IsDetected)
                    {
                        hd.PushUndetection();

                        //Delete the non valid hands
                        if (!hd.IsValid)
                        {
                            m_handsDetected.RemoveAt(i);
                            i--;
                            continue;
                        }
                    }
                }
            }
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.

            SpatialCoordinateSystem referenceFrameCoordinateSystem = referenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp);

            // remember where we were (changed if the CurrentNode != previousNode)
            var previousNode = CurrentNode;

            // update current node the user resides in
            CurrentNode = UpdateCurrentNode(referenceFrameCoordinateSystem, prediction.Timestamp, NodeRadius);

            // .. and current gaze
            SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(referenceFrameCoordinateSystem, prediction.Timestamp);

            NodePosition = pose.Head.Position;
            GazeForward  = pose.Head.ForwardDirection;
            GazeUp       = pose.Head.UpDirection;

            var mat = referenceFrameCoordinateSystem.TryGetTransformTo(CurrentNode.Anchor.CoordinateSystem);

            if (mat.HasValue)
            {
                NodePosition = Vector3.Transform(NodePosition, mat.Value);
                GazeForward  = Vector3.TransformNormal(GazeForward, mat.Value);
                GazeUp       = Vector3.TransformNormal(GazeUp, mat.Value);
            }

            if (!string.IsNullOrEmpty(requestedSightingTerm))
            {
                var candidates = FindClosestNodesWithSightedItem(referenceFrameCoordinateSystem, pose, requestedSightingTerm);

                if (candidates != null && candidates.Count > 0)
                {
                    targetNode     = candidates[0];
                    targetSighting = candidates[0].Sightings.Where(sighting => sighting.Tokens.Any(token => token.Equals(requestedSightingTerm, StringComparison.OrdinalIgnoreCase))).First();
                }

                requestedSightingTerm = string.Empty;
            }

            // currently at position
            if (CurrentNode == targetNode)
            {
                if (dwellTimeAtCurrentNode >= 5)
                {
                    targetNode     = null;
                    targetSighting = null;
                    entities.Clear();
                    Debug.WriteLine("Well done! Assisted the user find their item");
                }
            }

            if (targetNode != null)
            {
                RebuildTrailToTarget(referenceFrameCoordinateSystem, prediction.Timestamp, CurrentNode, targetNode);
            }


            ProcessNextFrame();

            timer.Tick(() =>
            {
                dwellTimeAtCurrentNode += timer.ElapsedSeconds;

                for (var entityIndex = 0; entityIndex < entities.Count; entityIndex++)
                {
                    var entity = entities[entityIndex];
                    entity.Update(timer, referenceFrameCoordinateSystem);
                }
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }