internal VideoCaptureSample(MediaFrameReference frameReference, SpatialCoordinateSystem worldOrigin)
        {
            if (frameReference == null)
            {
                throw new ArgumentNullException("frameReference.");
            }

            this.frameReference = frameReference;
            this.worldOrigin    = worldOrigin;

            bitmap = frameReference.VideoMediaFrame.SoftwareBitmap;
        }
        /// <summary>
        /// This returns the transform matrix at the time the photo was captured, if location data if available.
        /// If it's not, that is probably an indication that the HoloLens is not tracking and its location is not known.
        /// It could also mean the VideoCapture stream is not running.
        /// If location data is unavailable then the camera to world matrix will be set to the identity matrix.
        /// </summary>
        /// <param name="matrix">The transform matrix used to convert between coordinate spaces.
        /// The matrix will have to be converted to a Unity matrix before it can be used by methods in the UnityEngine namespace.
        /// See https://forum.unity3d.com/threads/locatable-camera-in-unity.398803/ for details.</param>
        public bool TryGetCameraToWorldMatrix(out float[] outMatrix)
        {
            if (frameReference.Properties.ContainsKey(viewTransformGuid) == false)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            if (worldOrigin == null)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            Matrix4x4 cameraViewTransform = ConvertByteArrayToMatrix4x4(frameReference.Properties[viewTransformGuid] as byte[]);

            if (cameraViewTransform == null)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            SpatialCoordinateSystem cameraCoordinateSystem = frameReference.Properties[cameraCoordinateSystemGuid] as SpatialCoordinateSystem;

            if (cameraCoordinateSystem == null)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            Matrix4x4?cameraCoordsToUnityCoordsMatrix = cameraCoordinateSystem.TryGetTransformTo(worldOrigin);

            if (cameraCoordsToUnityCoordsMatrix == null)
            {
                outMatrix = GetIdentityMatrixFloatArray();
                return(false);
            }

            Matrix4x4 worldToViewInCameraCoordsMatrix;

            Matrix4x4.Invert(cameraViewTransform, out worldToViewInCameraCoordsMatrix);
            Matrix4x4 worldToViewInUnityCoordsMatrix  = Matrix4x4.Multiply(cameraCoordsToUnityCoordsMatrix.Value, worldToViewInCameraCoordsMatrix);
            Matrix4x4 viewToWorldInCameraCoordsMatrix = Matrix4x4.Transpose(worldToViewInUnityCoordsMatrix);

            viewToWorldInCameraCoordsMatrix.M31 *= -1f;
            viewToWorldInCameraCoordsMatrix.M32 *= -1f;
            viewToWorldInCameraCoordsMatrix.M33 *= -1f;
            viewToWorldInCameraCoordsMatrix.M34 *= -1f;

            outMatrix = ConvertMatrixToFloatArray(viewToWorldInCameraCoordsMatrix);
            return(true);
        }
        // Use this for initialization
        void Start()
        {
#if WINDOWS_UWP
            if (CoordinateSystem == null)
            {
                CoordinateSystem = Windows.Perception.Spatial.Preview.SpatialGraphInteropPreview.CreateCoordinateSystemForNode(id);
                if (CoordinateSystem == null)
                {
                    Debug.Log("Id= " + id + " Failed to acquire coordinate system");
                }
            }
#endif
        }
        private void Start()
        {
#if UNITY_EDITOR || UNITY_WSA
            UnityEngine.XR.WSA.WorldManager.OnPositionalLocatorStateChanged += WorldManager_OnPositionalLocatorStateChanged;
            CurrentState = UnityEngine.XR.WSA.WorldManager.state;

#if WINDOWS_UWP
            if (CoordinateSystem == null)
            {
                CoordinateSystem = Windows.Perception.Spatial.Preview.SpatialGraphInteropPreview.CreateCoordinateSystemForNode(id);
            }
#endif // WINDOWS_UWP
#endif // UNITY_EDITOR || UNITY_WSA
        }
Example #5
0
        private static SpatialCoordinateSystem RetrieveWorldOriginFromPointer(IntPtr worldOriginPtr)
        {
            if (worldOriginPtr == IntPtr.Zero)
            {
                throw new ArgumentException("World origin pointer is zero");
            }
            SpatialCoordinateSystem spatialCoordinateSystem = Marshal.GetObjectForIUnknown(worldOriginPtr) as SpatialCoordinateSystem;

            if (spatialCoordinateSystem == null)
            {
                throw new InvalidCastException("Failed to retrieve world origin from pointer");
            }
            return(spatialCoordinateSystem);
        }
Example #6
0
 public Matrix4x4 GetWorldToCameraMatrix(SpatialCoordinateSystem originCoordinateSystem)
 {
     lock (TransformLock)
     {
         Forward = new Vector4(-Vector3.UnitZ, 0.0f);
         if (CoordinateSystem == null)
         {
             return(Matrix4x4.Identity);
         }
         var transform = originCoordinateSystem.TryGetTransformTo(CoordinateSystem) ?? Matrix4x4.Identity;
         Matrix4x4.Invert(transform * ViewMatrix, out var inverseMatrix);
         Forward = Vector4.Transform(Forward, inverseMatrix);
         return(transform * ViewMatrix * ProjectionMatrix);
     }
 }
        /// <summary>
        /// use node 0 as the relative reference point for y
        /// </summary>
        /// <param name="referenceFrameCoordinateSystem"></param>
        /// <returns></returns>
        Entity GetReferenceEntitySpatialCoordinateSystem(SpatialCoordinateSystem referenceFrameCoordinateSystem)
        {
            if (nodes.Count == 0)
            {
                return(null);
            }

            var entity = new Entity($"base_node");

            entity.Node     = nodes[0];
            entity.Renderer = nodeRenderer;

            entity.UpdateTransform(referenceFrameCoordinateSystem);

            return(entity);
        }
        public HoloLensCamera(CaptureMode captureMode, PixelFormat pixelFormat = PixelFormat.BGRA8)
        {
            desiredPixelFormat = pixelFormat;
            CameraType         = CameraType.Invalid;
            CaptureMode        = captureMode;

#if CAN_USE_UNITY_TYPES && UNITY_WSA && CAN_USE_UWP_TYPES
            IntPtr coordinateSystemPtr;
            // this must be done from the main thread, so done in
            coordinateSystemPtr = UnityEngine.XR.WSA.WorldManager.GetNativeISpatialCoordinateSystemPtr();
            if (coordinateSystemPtr != null)
            {
                rootCoordinateSystem = WinRTExtensions.GetSpatialCoordinateSystem(coordinateSystemPtr);
            }
#endif
        }
        /// <summary>
        /// We position the entities relative to the first node to keep the entities
        /// positioned consistent
        /// </summary>
        /// <param name="referenceFrameCoordinateSystem"></param>
        /// <returns></returns>
        Entity GetRootEntity(SpatialCoordinateSystem referenceFrameCoordinateSystem)
        {
            if (nodes.Count == 0)
            {
                return(null);
            }

            var entity = new Entity($"node_root");

            entity.Node     = nodes[0];
            entity.Renderer = nodeRenderer;
            entity.Position = new Vector3(0, EntityOffsetY, 0);
            entity.UpdateTransform(referenceFrameCoordinateSystem);

            return(entity);
        }
    public void StartPullCameraFrames()
    {
        System.Diagnostics.Debug.WriteLine("StartPullCameraFrames");
        Task.Run(async() =>
        {
            var ModelHelper = new ONNXModelHelper(UnityApp);
            System.Diagnostics.Debug.WriteLine("model inited");
            for (; ;)  // Forever = While the app runs
            {
                FramesCaptured++;
                await Task.Delay(PredictionFrequency);
                using (var frameReference = CameraFrameReader.TryAcquireLatestFrame())
                    using (var videoFrame = frameReference?.VideoMediaFrame?.GetVideoFrame())
                    {
                        if (videoFrame == null)
                        {
                            System.Diagnostics.Debug.WriteLine("frame is null");
                            continue; //ignoring frame
                        }
                        if (videoFrame.Direct3DSurface == null)
                        {
                            System.Diagnostics.Debug.WriteLine("d3d surface is null");
                            videoFrame.Dispose();
                            continue; //ignoring frame
                        }
                        try
                        {
                            System.Diagnostics.Debug.WriteLine("trying to evaluate");
                            SpatialCoordinateSystem worldCoordinateSystem = m_referenceFrame.CoordinateSystem;
                            Matrix4x4 cameraToWorld           = (Matrix4x4)frameReference.CoordinateSystem.TryGetTransformTo(worldCoordinateSystem);
                            CameraIntrinsics cameraIntrinsics = frameReference.VideoMediaFrame.CameraIntrinsics;
                            DepthMediaFrame depthFrame        = frameReference.VideoMediaFrame.DepthMediaFrame;

                            await ModelHelper.EvaluateVideoFrameAsync(videoFrame, frameReference.VideoMediaFrame, worldCoordinateSystem, frameReference.CoordinateSystem).ConfigureAwait(false);
                        }
                        catch (Exception ex)
                        {
                            System.Diagnostics.Debug.WriteLine(ex.Message);
                        }
                        finally
                        {
                        }
                    }
            }
        });
    }
        public SpatialSurfaceRenderer(DeviceResources deviceResources, SpatialCoordinateSystem spatialCoordinateSystem)
        {
            _deviceResources         = deviceResources;
            _spatialCoordinateSystem = spatialCoordinateSystem;
            CheckAccess();

            var desc = RasterizerStateDescription.Default();

            desc.FillMode = FillMode.Wireframe;

            _state = new RasterizerState(deviceResources.D3DDevice, desc);

            SurfaceMeshList.CoordinateSystem = _spatialCoordinateSystem;
            _surfaceMeshList.DirectXDevice   = deviceResources.D3DDevice;

            CreateDeviceDependentResourcesAsync();
        }
Example #12
0
    // Update is called once per frame
    void Update()
    {
#if ENABLE_WINMD_SUPPORT
        if (!_isReadyToRender)
        {
            return;
        }

        // The HolographicFrame has information that the app needs in order
        // to update and render the current frame. The app begins each new
        // frame by calling CreateNextFrame.
        //HolographicFrame ^ holographicFrame = m_holographicSpace->CreateNextFrame();

        // Get a prediction of where holographic cameras will be when this frame
        // is presented.
        //HolographicFramePrediction prediction = holographicFrame->CurrentPrediction;

        IntPtr spatialCoordinateSystemPtr               = WorldManager.GetNativeISpatialCoordinateSystemPtr();
        SpatialCoordinateSystem unityWorldOrigin        = Marshal.GetObjectForIUnknown(spatialCoordinateSystemPtr) as SpatialCoordinateSystem;
        SpatialCoordinateSystem currentCoordinateSystem = unityWorldOrigin;

        _isTrackingFaces = _faceTrackerProcessor.IsTrackingFaces();

        if (_isTrackingFaces)
        {
            MediaFrameReference frame = _videoFrameProcessor.GetLatestFrame();
            if (frame == null)
            {
                return;
            }
            var faces = _faceTrackerProcessor.GetLatestFaces();
            ProcessFaces(faces, frame, currentCoordinateSystem);


            TimeSpan currentTimeStamp = frame.SystemRelativeTime.Value.Duration();
            if (currentTimeStamp > _previousFrameTimestamp)
            {
                // TODO: copy to texture
                _previousFrameTimestamp = frame.SystemRelativeTime.Value.Duration();
            }
        }

        SpatialPointerPose pointerPose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, PerceptionTimestampHelper.FromHistoricalTargetTime(DateTimeOffset.Now));
#endif
    }
Example #13
0
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem;

            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
        int RebuildTrailToTarget(SpatialCoordinateSystem referenceFrameCoordinateSystem, PerceptionTimestamp perceptionTimestamp, Node startNode, Node endNode, int lookAhead = 3)
        {
            Debug.WriteLine($"RebuildTrailToTarget {startNode.Name} -> {endNode.Name}");

            entities.Clear();

            var trail = new List <Node>();

            if (startNode == endNode)
            {
                trail.Add(startNode);
            }
            else
            {
                BuildPath(endNode, startNode, trail, new List <Node>());
            }

            if (trail.Count == 0)
            {
                Debug.WriteLine($"Unable to find Path for startNode {startNode.Name} and {endNode.Name}");
                return(-1);
            }

            Debug.WriteLine($"Creating trials {trail.ToArray()}");

            var baseEntity = GetReferenceEntitySpatialCoordinateSystem(referenceFrameCoordinateSystem);

            for (var i = 0; i < Math.Min(trail.Count, lookAhead); i++)
            {
                var node = trail[i];

                var entity = new Entity($"node_{i}");
                entity.Node     = node;
                entity.Renderer = nodeRenderer;

                entity.UpdateTransform(referenceFrameCoordinateSystem);
                // offset from baseEntity (to keep the y positions uniform and consistent)
                var targetPosition = baseEntity.Transform.Translation;
                entity.Position = new Vector3(0, (targetPosition - entity.Transform.Translation).Y, 0f);

                entities.Add(entity);
            }

            return(1);
        }
        public IList <Node> FindClosestNodesWithSightedItem(SpatialCoordinateSystem referenceFrameCoordinateSystem, SpatialPointerPose pose, string sightingItem)
        {
            var filteredNodes = nodes.Where(node =>
            {
                return(node.Sightings.Any(sighting =>
                {
                    return sighting.Tokens.Any(token => token.Equals(sightingItem, StringComparison.OrdinalIgnoreCase));
                }));
            });

            if (filteredNodes != null)
            {
                return(filteredNodes.OrderBy(node =>
                {
                    return node.TryGetDistance(referenceFrameCoordinateSystem, pose.Head.Position);
                }).ToList());
            }

            return(null);
        }
Example #16
0
        public async void Initialize(SpatialCoordinateSystem coordinateSystem)
        {
            CoordinateSystem = coordinateSystem;
            var requestStatus = await SpatialSurfaceObserver.RequestAccessAsync();

            if (requestStatus == SpatialPerceptionAccessStatus.Allowed)
            {
                SurfaceObserver = new SpatialSurfaceObserver();
                var boundingBox = new SpatialBoundingBox()
                {
                    Center  = Vector3.Zero,
                    Extents = new Vector3(10.0f, 10.0f, 2.5f)
                };
                SurfaceObserver.SetBoundingVolume(SpatialBoundingVolume.FromBox(coordinateSystem, boundingBox));
                await CreateDeviceDenpendantResources();

                SurfaceObserver.ObservedSurfacesChanged += OnObservedSurfacesChanged;
                Active = true;
            }
        }
Example #17
0
        void IRenderLoopHost.OnRenderLoop_PrepareRendering(EngineDevice device)
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = m_holoSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = m_referenceFrame.CoordinateSystem;
        }
Example #18
0
        internal async void setUpSpatialMapping(SpatialCoordinateSystem coordinateSystem)
        {
            if (!Windows.Foundation.Metadata.ApiInformation.IsApiContractPresent("Windows.Foundation.UniversalApiContract", 4) || SpatialSurfaceObserver.IsSupported())
            {
                SpatialPerceptionAccessStatus status = await SpatialSurfaceObserver.RequestAccessAsync();

                if (status == SpatialPerceptionAccessStatus.Allowed)
                {
                    SpatialSurfaceObserver observer    = new SpatialSurfaceObserver();
                    SpatialBoundingBox     boundingBox = new SpatialBoundingBox()
                    {
                        Center  = new System.Numerics.Vector3(0, 0, 0),
                        Extents = new System.Numerics.Vector3(40, 40, 5)
                    };
                    SpatialBoundingVolume bounds = SpatialBoundingVolume.FromBox(coordinateSystem, boundingBox);
                    observer.SetBoundingVolume(bounds);
                    observer.ObservedSurfacesChanged += new TypedEventHandler <SpatialSurfaceObserver, object>(ClockworkSocket.SurfacesChanged);
                }
            }
        }
        void CreateEntitiesForAllNodes(SpatialCoordinateSystem referenceFrameCoordinateSystem)
        {
            entities.Clear();

            var rootEntity = GetRootEntity(referenceFrameCoordinateSystem);

            var i = 0;

            foreach (var node in this.nodes)
            {
                var entity = new Entity($"node_{i}");
                entity.Node     = node;
                entity.Renderer = nodeRenderer;
                entity.UpdateTransform(referenceFrameCoordinateSystem);
                var targetPosition = rootEntity.Transform.Translation;
                entity.Position = new Vector3(0, (targetPosition - entity.Transform.Translation).Y, 0f);
                entities.Add(entity);

                i += 1;
            }
        }
        internal VideoCaptureSample(MediaFrameReference frameReference, SpatialCoordinateSystem worldOrigin)
        {
            if (frameReference == null)
            {
                throw new ArgumentNullException("frameReference.");
            }

            this.frameReference = frameReference;
            this.worldOrigin    = worldOrigin;

            // When Windows.Media.Devices.Core.CameraIntrinsics is out of prerelease, use this instead
            //cameraIntrinsics = new CameraIntrinsics(frameReference.VideoMediaFrame.CameraIntrinsics);

            byte[]  rawIntrinsics  = frameReference.Properties[cameraIntrinsicsGuid] as byte[];
            float[] intrinsicArray = ConvertByteArrayToFloatArray(rawIntrinsics);
            cameraIntrinsics = new CameraIntrinsics(intrinsicArray);

            bitmap      = frameReference.VideoMediaFrame.SoftwareBitmap;
            FrameWidth  = bitmap.PixelWidth;
            FrameHeight = bitmap.PixelHeight;
        }
Example #21
0
        public void UpdateTransform(SpatialCoordinateSystem referenceFrameCoordinateSystem)
        {
            if (!Enabled)
            {
                return;
            }

            var trans = Node.GetTransform(referenceFrameCoordinateSystem);

            if (trans.HasValue)
            {
                Matrix4x4 modelTranslation = Matrix4x4.CreateTranslation(Position);
                Matrix4x4 modelRotation    = Matrix4x4.CreateFromYawPitchRoll(
                    DegreeToRadian(EulerAngles.Y),
                    DegreeToRadian(EulerAngles.X),
                    DegreeToRadian(EulerAngles.Z));
                Matrix4x4 modelScale = Matrix4x4.CreateScale(Scale);

                transform = (modelScale * modelRotation * modelTranslation) * trans.Value;
            }
        }
Example #22
0
        /// <summary>
        /// Cache the coordinate system for the QR code's spatial node, and the root.
        /// </summary>
        /// <returns></returns>
        private bool CheckCoordinateSystem()
        {
#if WINDOWS_UWP
            if (coordinateSystem == null)
            {
                SimpleConsole.AddLine(trace, $"Creating coord for {spatialNodeId}");
                coordinateSystem = global::Windows.Perception.Spatial.Preview.SpatialGraphInteropPreview.CreateCoordinateSystemForNode(SpatialNodeId);
            }

            if (rootCoordinateSystem == null)
            {
                rootCoordinateSystem = System.Runtime.InteropServices.Marshal.GetObjectForIUnknown(
                    UnityEngine.XR.WSA.WorldManager.GetNativeISpatialCoordinateSystemPtr()
                    ) as SpatialCoordinateSystem;
                SimpleConsole.AddLine(trace, $"Getting root coordinate system {(rootCoordinateSystem == null ? "null" : "succeeded")}");
            }

            return(coordinateSystem != null);
#else // WINDOWS_UWP
            return(false);
#endif // WINDOWS_UWP
        }
        public void Update(PerceptionTimestamp timeStamp, SpatialCoordinateSystem coordinateSystem)
        {
            var states = interactionManager.GetDetectedSourcesAtTimestamp(timeStamp);

            foreach (SpatialInteractionSourceState state in states)
            {
                if (state.Source.Handedness == hand)
                {
                    SpatialInteractionSourceLocation location = state.Properties.TryGetLocation(coordinateSystem);

                    if (location != null)
                    {
                        SetSpatialInteractionSourceLocation(location);
                    }

                    previousState = currentState;
                    currentState  = state;

                    internalState = previousState != null ? DeviceState.Valid : DeviceState.Invalid;
                }
            }
        }
        int RebuildTrailToTarget(SpatialCoordinateSystem referenceFrameCoordinateSystem, PerceptionTimestamp perceptionTimestamp, Node startNode, Node endNode,
                                 int lookAhead = 100)
        {
            Debug.WriteLine($"RebuildTrailToTarget {startNode.Name} -> {endNode.Name}");

            entities.Clear();

            Stack <Node> trail = new Stack <Node>();

            BuildPath(endNode, startNode, trail);

            if (trail.Count == 0)
            {
                Debug.WriteLine($"Unable to find Path for startNode {startNode.Name} and {endNode.Name}");
                return(-1);
            }

            var rootEntity = GetRootEntity(referenceFrameCoordinateSystem);

            int i = 0;

            while (i < lookAhead && trail.Count > 0)
            {
                var node = trail.Pop();

                var entity = new Entity($"node_{i}");
                entity.Node     = node;
                entity.Renderer = entity.Node == targetNode ? targetNodeRenderer : nodeRenderer;
                entity.UpdateTransform(referenceFrameCoordinateSystem);
                var targetPosition = rootEntity.Transform.Translation;
                entity.Position = new Vector3(0, (targetPosition - entity.Transform.Translation).Y, 0f);
                entities.Add(entity);

                i += 1;
            }

            return(1);
        }
Example #25
0
    // Get transform matrices from the MediaFrameReference
    public static Tuple <Matrix4x4, Matrix4x4, Matrix4x4> GetTransforms(MediaFrameReference colorFrameRef, SpatialCoordinateSystem unityWorldCoordinateSystem)
    {
        SpatialCoordinateSystem spatialCoordinateSystem = null;
        Matrix4x4 projectionTransform = Matrix4x4.Identity; // intrinsics; does not change
        Matrix4x4 viewTransform       = Matrix4x4.Identity; // extrinsics; changes per frame

        // TODO: Unity has CameraToWorldMatrix provided by PhotoCaptureFrame class... Cam space -> world space
        // also has worldToCameraMatrix, can it replace cameraCoordinateSystem transforms?
        // UnityEngine.Matrix4x4 camToWorld = UnityEngine.Camera.main.cameraToWorldMatrix;

        object value;

        if (colorFrameRef.Properties.TryGetValue(MFSampleExtension_Spatial_CameraCoordinateSystem, out value))
        {
            spatialCoordinateSystem = value as SpatialCoordinateSystem;
        }
        if (colorFrameRef.Properties.TryGetValue(MFSampleExtension_Spatial_CameraProjectionTransform, out value))
        {
            projectionTransform = ByteArrayToMatrix(value as byte[]);
        }
        if (colorFrameRef.Properties.TryGetValue(MFSampleExtension_Spatial_CameraViewTransform, out value))
        {
            viewTransform = ByteArrayToMatrix(value as byte[]);
        }

        // Transform: Camera Coord System -> Unity world coord
        // See https://github.com/Microsoft/MixedRealityToolkit-Unity/blob/96cc9ab8998280edcd6871f41e89584030ee4f26/Assets/HoloToolkit-Preview/QRTracker/Scripts/SpatialGraphCoordinateSystem.cs#L94
        var cameraRGBToWorldTransform = spatialCoordinateSystem.TryGetTransformTo(unityWorldCoordinateSystem);

        if (cameraRGBToWorldTransform == null)
        {
            return(null);
        }
        Matrix4x4 frameToOrigin = cameraRGBToWorldTransform.Value;

        return(Tuple.Create(frameToOrigin, projectionTransform, viewTransform));
    }
        private CameraExtrinsics GetExtrinsics(SpatialCoordinateSystem frameCoordinateSystem)
        {
            if (frameCoordinateSystem == null)
            {
                return(null);
            }

            CameraExtrinsics extrinsics = null;

            if (rootCoordinateSystem == null)
            {
                return(null);
            }

            System.Numerics.Matrix4x4?worldMatrix = frameCoordinateSystem.TryGetTransformTo(rootCoordinateSystem);

            if (worldMatrix.HasValue)
            {
                WindowsVector3    position;
                WindowsVector3    scale;
                WindowsQuaternion rotation;
                WindowsMatrix4x4.Decompose(worldMatrix.Value, out scale, out rotation, out position);

                WindowsVector3 forward = WindowsVector3.Transform(-WindowsVector3.UnitZ, rotation);
                WindowsVector3 up      = WindowsVector3.Transform(WindowsVector3.UnitY, rotation);

                Matrix4x4 unityWorldMatrix = Matrix4x4.TRS(WindowsVectorToUnityVector(position), Quaternion.LookRotation(WindowsVectorToUnityVector(forward), WindowsVectorToUnityVector(up)), Vector3.one);

                extrinsics = new CameraExtrinsics()
                {
                    ViewFromWorld = unityWorldMatrix
                };
            }

            return(extrinsics);
        }
        void ProcessFrame(SpatialCoordinateSystem worldCoordinateSystem)
        {
            if (!IsInValidateStateToProcessFrame())
            {
                return;
            }

            // obtain the details of the last frame captured 
            FrameGrabber.Frame frame = frameGrabber.LastFrame;

            if (frame.mediaFrameReference == null)
            {
                return;
            }

            MediaFrameReference mediaFrameReference = frame.mediaFrameReference;

            SpatialCoordinateSystem cameraCoordinateSystem = mediaFrameReference.CoordinateSystem;
            CameraIntrinsics cameraIntrinsics = mediaFrameReference.VideoMediaFrame.CameraIntrinsics;

            Matrix4x4? cameraToWorld = cameraCoordinateSystem.TryGetTransformTo(worldCoordinateSystem);

            if (!cameraToWorld.HasValue)
            {
                return;
            }

            // padding 
            float averageFaceWidthInMeters = 0.15f;

            float pixelsPerMeterAlongX = cameraIntrinsics.FocalLength.X;
            float averagePixelsForFaceAt1Meter = pixelsPerMeterAlongX * averageFaceWidthInMeters;

            // Place the label 25cm above the center of the face.
            Vector3 labelOffsetInWorldSpace = new Vector3(0.0f, 0.25f, 0.0f);            

            frameAnalyzer.AnalyzeFrame(frame.mediaFrameReference, (status, detectedPersons) =>
            {
                if(status > 0 && detectedPersons.Count > 0)
                {
                    FrameAnalyzer.Bounds? bestRect = null;
                    Vector3 bestRectPositionInCameraSpace = Vector3.Zero;
                    float bestDotProduct = -1.0f;
                    FrameAnalyzer.DetectedPerson bestPerson = null; 

                    foreach (var dp in detectedPersons)
                    {
                        Debug.WriteLine($"Detected person: {dp.ToString()}");

                        Point faceRectCenterPoint = new Point(
                            dp.bounds.left + dp.bounds.width /2, 
                            dp.bounds.top + dp.bounds.height / 2
                            );

                        // Calculate the vector towards the face at 1 meter.
                        Vector2 centerOfFace = cameraIntrinsics.UnprojectAtUnitDepth(faceRectCenterPoint);

                        // Add the Z component and normalize.
                        Vector3 vectorTowardsFace = Vector3.Normalize(new Vector3(centerOfFace.X, centerOfFace.Y, -1.0f));

                        // Get the dot product between the vector towards the face and the gaze vector.
                        // The closer the dot product is to 1.0, the closer the face is to the middle of the video image.
                        float dotFaceWithGaze = Vector3.Dot(vectorTowardsFace, -Vector3.UnitZ);                        

                        // Pick the faceRect that best matches the users gaze.
                        if (dotFaceWithGaze > bestDotProduct)
                        {
                            // Estimate depth using the ratio of the current faceRect width with the average faceRect width at 1 meter.
                            float estimatedFaceDepth = averagePixelsForFaceAt1Meter / (float)dp.bounds.width;

                            // Scale the vector towards the face by the depth, and add an offset for the label.
                            Vector3 targetPositionInCameraSpace = vectorTowardsFace * estimatedFaceDepth;

                            bestDotProduct = dotFaceWithGaze;
                            bestRect = dp.bounds;
                            bestRectPositionInCameraSpace = targetPositionInCameraSpace;
                            bestPerson = dp; 
                        }                         
                    }

                    if (bestRect.HasValue)
                    {
                        // Transform the cube from Camera space to World space.
                        Vector3 bestRectPositionInWorldspace = Vector3.Transform(bestRectPositionInCameraSpace, cameraToWorld.Value);
                        Vector3 labelPosition = bestRectPositionInWorldspace + labelOffsetInWorldSpace;                          

                        quadRenderer.TargetPosition = labelPosition;
                        textRenderer.RenderTextOffscreen($"{bestPerson.name}, {bestPerson.gender}, Age: {bestPerson.age}");

                        lastFaceDetectedTimestamp = Utils.GetCurrentUnixTimestampMillis();
                    }               
                }
            }); 
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem;

            SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp);            

            ProcessFrame(currentCoordinateSystem);

             if (Utils.GetCurrentUnixTimestampMillis() - lastFaceDetectedTimestamp > faceTimeThreshold)
            {
                if(pose != null)
                {
                    var headPosition = pose.Head.Position;
                    var headForward = pose.Head.ForwardDirection;
                    quadRenderer.TargetPosition = headPosition + (2.0f * headForward);
                }
                                
                textRenderer.RenderTextOffscreen("No faces detected");
            }

            timer.Tick(() => 
            {
            //
            // TODO: Update scene objects.
            //
            // Put time-based updates here. By default this code will run once per frame,
            // but if you change the StepTimer to use a fixed time step this code will
            // run as many times as needed to get to the current step.
            //                

                quadRenderer.Update(pose, timer);
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.

                if(Utils.GetCurrentUnixTimestampMillis() - lastFaceDetectedTimestamp <= faceTimeThreshold)
                {
                    renderingParameters.SetFocusPoint(
                        currentCoordinateSystem,    
                        quadRenderer.Position,
                        quadRenderer.Forward,
                        quadRenderer.Velocity
                    );
                }
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return holographicFrame;
        }
Example #29
0
 private static extern void GetSpatialCoordinateSystem(IntPtr nativePtr, out SpatialCoordinateSystem coordinateSystem);
Example #30
0
        /// <summary>
        /// Updates the constant buffer for the display with view and projection
        /// matrices for the current frame.
        /// </summary>
        public void UpdateViewProjectionBuffer(
            DeviceResources deviceResources,
            HolographicCameraPose cameraPose,
            SpatialCoordinateSystem coordinateSystem
            )
        {
            // The system changes the viewport on a per-frame basis for system optimizations.
            _d3DViewport.X        = (float)cameraPose.Viewport.Left;
            _d3DViewport.Y        = (float)cameraPose.Viewport.Top;
            _d3DViewport.Width    = (float)cameraPose.Viewport.Width;
            _d3DViewport.Height   = (float)cameraPose.Viewport.Height;
            _d3DViewport.MinDepth = 0;
            _d3DViewport.MaxDepth = 1;

            // The projection transform for each frame is provided by the HolographicCameraPose.
            var cameraProjectionTransform = cameraPose.ProjectionTransform;

            // Get a container object with the view and projection matrices for the given
            // pose in the given coordinate system.
            var viewTransformContainer = cameraPose.TryGetViewTransform(coordinateSystem);

            // If TryGetViewTransform returns null, that means the pose and coordinate system
            // cannot be understood relative to one another; content cannot be rendered in this
            // coordinate system for the duration of the current frame.
            // This usually means that positional tracking is not active for the current frame, in
            // which case it is possible to use a SpatialLocatorAttachedFrameOfReference to render
            // content that is not world-locked instead.
            var viewProjectionConstantBufferData = new ViewProjectionConstantBuffer();
            var viewTransformAcquired            = viewTransformContainer.HasValue;

            if (viewTransformAcquired)
            {
                // Otherwise, the set of view transforms can be retrieved.
                var viewCoordinateSystemTransform = viewTransformContainer.Value;

                // Update the view matrices. Holographic cameras (such as Microsoft HoloLens) are
                // constantly moving relative to the world. The view matrices need to be updated
                // every frame.
                viewProjectionConstantBufferData.ViewProjectionLeft = Matrix4x4.Transpose(
                    viewCoordinateSystemTransform.Left * cameraProjectionTransform.Left
                    );
                viewProjectionConstantBufferData.ViewProjectionRight = Matrix4x4.Transpose(
                    viewCoordinateSystemTransform.Right * cameraProjectionTransform.Right
                    );
            }

            // Use the D3D device context to update Direct3D device-based resources.
            var context = deviceResources.D3DDeviceContext;

            // Loading is asynchronous. Resources must be created before they can be updated.
            if (context == null || _viewProjectionConstantBuffer == null || !viewTransformAcquired)
            {
                _framePending = false;
            }
            else
            {
                // Update the view and projection matrices.
                context.UpdateSubresource(ref viewProjectionConstantBufferData, _viewProjectionConstantBuffer);

                _framePending = true;
            }
        }