/// <summary>
        /// Updates resources associated with a holographic camera's swap chain.
        /// The app does not access the swap chain directly, but it does create
        /// resource views for the back buffer.
        /// </summary>
        public void CreateResourcesForBackBuffer(
            DeviceResources deviceResources,
            HolographicCameraRenderingParameters cameraParameters)
        {
            var device = deviceResources.D3DDevice;

            // Get the WinRT object representing the holographic camera's back buffer.
            IDirect3DSurface surface = cameraParameters.Direct3D11BackBuffer;

            // Get a DXGI interface for the holographic camera's back buffer.
            // Holographic cameras do not provide the DXGI swap chain, which is owned
            // by the system. The Direct3D back buffer resource is provided using WinRT
            // interop APIs.
            InteropStatics.IDirect3DDxgiInterfaceAccess surfaceDxgiInterfaceAccess = surface as InteropStatics.IDirect3DDxgiInterfaceAccess;
            IntPtr   pResource = surfaceDxgiInterfaceAccess.GetInterface(InteropStatics.ID3D11Resource);
            Resource resource  = SharpDX.CppObject.FromPointer <Resource>(pResource);

            Marshal.Release(pResource);

            // Get a Direct3D interface for the holographic camera's back buffer.
            Texture2D cameraBackBuffer = resource.QueryInterface <Texture2D>();

            // Determine if the back buffer has changed. If so, ensure that the render target view
            // is for the current back buffer.
            if ((this.d3dBackBuffer == null) || (this.d3dBackBuffer.NativePointer != cameraBackBuffer.NativePointer))
            {
                // This can change every frame as the system moves to the next buffer in the
                // swap chain. This mode of operation will occur when certain rendering modes
                // are activated.
                this.d3dBackBuffer = cameraBackBuffer;

                // Get the DXGI format for the back buffer.
                // This information can be accessed by the app using CameraResources::GetBackBufferDXGIFormat().
                Texture2DDescription backBufferDesc = this.BackBufferTexture2D.Description;
                // backBufferDesc.SampleDescription = new SharpDX.DXGI.SampleDescription(8, 8);
                this.dxgiFormat = backBufferDesc.Format;

                // Check for render target size changes.
                Size currentSize = this.holographicCamera.RenderTargetSize;
                if (this.d3dRenderTargetSize != currentSize)
                {
                    // Set render target size.
                    this.d3dRenderTargetSize = this.HolographicCamera.RenderTargetSize;
                }
            }

            // Create the constant buffer, if needed.
            if (this.viewProjectionConstantBuffer == null)
            {
                // Create a constant buffer to store view and projection matrices for the camera.
                ViewProjectionConstantBuffer viewProjectionConstantBufferData = new ViewProjectionConstantBuffer();
                this.viewProjectionConstantBuffer = this.ToDispose(SharpDX.Direct3D11.Buffer.Create(
                                                                       device,
                                                                       BindFlags.ConstantBuffer,
                                                                       ref viewProjectionConstantBufferData));
            }
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem;

            SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp);            

            ProcessFrame(currentCoordinateSystem);

             if (Utils.GetCurrentUnixTimestampMillis() - lastFaceDetectedTimestamp > faceTimeThreshold)
            {
                if(pose != null)
                {
                    var headPosition = pose.Head.Position;
                    var headForward = pose.Head.ForwardDirection;
                    quadRenderer.TargetPosition = headPosition + (2.0f * headForward);
                }
                                
                textRenderer.RenderTextOffscreen("No faces detected");
            }

            timer.Tick(() => 
            {
            //
            // TODO: Update scene objects.
            //
            // Put time-based updates here. By default this code will run once per frame,
            // but if you change the StepTimer to use a fixed time step this code will
            // run as many times as needed to get to the current step.
            //                

                quadRenderer.Update(pose, timer);
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.

                if(Utils.GetCurrentUnixTimestampMillis() - lastFaceDetectedTimestamp <= faceTimeThreshold)
                {
                    renderingParameters.SetFocusPoint(
                        currentCoordinateSystem,    
                        quadRenderer.Position,
                        quadRenderer.Forward,
                        quadRenderer.Velocity
                    );
                }
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return holographicFrame;
        }
        /// <summary>
        /// Renders the current frame to each holographic display, according to the
        /// current application and spatial positioning state. Returns true if the
        /// frame was rendered to at least one display.
        /// </summary>
        public bool Render(HolographicFrame holographicFrame)
        {
            // Don't try to render anything before the first Update.
            if (timer.FrameCount == 0)
            {
                return(false);
            }

            //
            // TODO: Add code for pre-pass rendering here.
            //
            // Take care of any tasks that are not specific to an individual holographic
            // camera. This includes anything that doesn't need the final view or projection
            // matrix, such as lighting maps.
            //

            // Up-to-date frame predictions enhance the effectiveness of image stablization and
            // allow more accurate positioning of holograms.
            holographicFrame.UpdateCurrentPrediction();
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Lock the set of holographic camera resources, then draw to each camera
            // in this frame.
            return(deviceResources.UseHolographicCameraResources(
                       (Dictionary <uint, CameraResources> cameraResourceDictionary) =>
            {
                bool atLeastOneCameraRendered = false;

                foreach (var cameraPose in prediction.CameraPoses)
                {
                    // This represents the device-based resources for a HolographicCamera.
                    CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id];

                    // Get the device context.
                    var context = deviceResources.D3DDeviceContext;
                    var renderTargetView = cameraResources.BackBufferRenderTargetView;
                    var depthStencilView = cameraResources.DepthStencilView;

                    // Set render targets to the current holographic camera.
                    context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView);

                    // Clear the back buffer and depth stencil view.
                    if (canGetHolographicDisplayForCamera &&
                        cameraPose.HolographicCamera.Display.IsOpaque)
                    {
                        SharpDX.Mathematics.Interop.RawColor4 cornflowerBlue = new SharpDX.Mathematics.Interop.RawColor4(0.392156899f, 0.58431375f, 0.929411829f, 1.0f);
                        context.ClearRenderTargetView(renderTargetView, cornflowerBlue);
                    }
                    else
                    {
                        SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f);
                        context.ClearRenderTargetView(renderTargetView, transparent);
                    }
                    context.ClearDepthStencilView(
                        depthStencilView,
                        SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil,
                        1.0f,
                        0);

                    //
                    // TODO: Replace the sample content with your own content.
                    //
                    // Notes regarding holographic content:
                    //    * For drawing, remember that you have the potential to fill twice as many pixels
                    //      in a stereoscopic render target as compared to a non-stereoscopic render target
                    //      of the same resolution. Avoid unnecessary or repeated writes to the same pixel,
                    //      and only draw holograms that the user can see.
                    //    * To help occlude hologram geometry, you can create a depth map using geometry
                    //      data obtained via the surface mapping APIs. You can use this depth map to avoid
                    //      rendering holograms that are intended to be hidden behind tables, walls,
                    //      monitors, and so on.
                    //    * On HolographicDisplays that are transparent, black pixels will appear transparent
                    //      to the user. On such devices, you should clear the screen to Transparent as shown
                    //      above. You should still use alpha blending to draw semitransparent holograms.
                    //


                    // The view and projection matrices for each holographic camera will change
                    // every frame. This function refreshes the data in the constant buffer for
                    // the holographic camera indicated by cameraPose.
                    if (stationaryReferenceFrame != null)
                    {
                        cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, stationaryReferenceFrame.CoordinateSystem);
                    }

                    // Attach the view/projection constant buffer for this camera to the graphics pipeline.
                    bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources);

#if DRAW_SAMPLE_CONTENT
                    // Only render world-locked content when positional tracking is active.
                    if (cameraActive)
                    {
                        // Draw the sample hologram.
                        spinningCubeRenderer.Render();

                        if (canCommitDirect3D11DepthBuffer)
                        {
                            // On versions of the platform that support the CommitDirect3D11DepthBuffer API, we can
                            // provide the depth buffer to the system, and it will use depth information to stabilize
                            // the image at a per-pixel level.
                            HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);
                            SharpDX.Direct3D11.Texture2D depthBuffer = cameraResources.DepthBufferTexture2D;

                            // Direct3D interop APIs are used to provide the buffer to the WinRT API.
                            SharpDX.DXGI.Resource1 depthStencilResource = depthBuffer.QueryInterface <SharpDX.DXGI.Resource1>();
                            SharpDX.DXGI.Surface2 depthDxgiSurface = new SharpDX.DXGI.Surface2(depthStencilResource, 0);
                            IDirect3DSurface depthD3DSurface = InteropStatics.CreateDirect3DSurface(depthDxgiSurface.NativePointer);
                            if (depthD3DSurface != null)
                            {
                                // Calling CommitDirect3D11DepthBuffer causes the system to queue Direct3D commands to
                                // read the depth buffer. It will then use that information to stabilize the image as
                                // the HolographicFrame is presented.
                                renderingParameters.CommitDirect3D11DepthBuffer(depthD3DSurface);
                            }
                        }
                    }
#endif
                    atLeastOneCameraRendered = true;
                }

                return atLeastOneCameraRendered;
            }));
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

#if DRAW_SAMPLE_CONTENT
            if (stationaryReferenceFrame != null)
            {
                // Check for new input state since the last frame.
                for (int i = 0; i < gamepads.Count; ++i)
                {
                    bool buttonDownThisUpdate = (gamepads[i].gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A;
                    if (buttonDownThisUpdate && !gamepads[i].buttonAWasPressedLastFrame)
                    {
                        pointerPressed = true;
                    }
                    gamepads[i].buttonAWasPressedLastFrame = buttonDownThisUpdate;
                }

                SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
                SpatialPointerPose            pose         = null;
                if (null != pointerState)
                {
                    pose = pointerState.TryGetPointerPose(stationaryReferenceFrame.CoordinateSystem);
                }
                else if (pointerPressed)
                {
                    pose = SpatialPointerPose.TryGetAtTimestamp(stationaryReferenceFrame.CoordinateSystem, prediction.Timestamp);
                }
                pointerPressed = false;

                // When a Pressed gesture is detected, the sample hologram will be repositioned
                // two meters in front of the user.
                spinningCubeRenderer.PositionHologram(pose);
            }
#endif

            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //

#if DRAW_SAMPLE_CONTENT
                spinningCubeRenderer.Update(timer);
#endif
            });

            if (!canCommitDirect3D11DepthBuffer)
            {
                // On versions of the platform that do not support the CommitDirect3D11DepthBuffer API, we can control
                // image stabilization by setting a focus point with optional plane normal and velocity.
                foreach (var cameraPose in prediction.CameraPoses)
                {
#if DRAW_SAMPLE_CONTENT
                    // The HolographicCameraRenderingParameters class provides access to set
                    // the image stabilization parameters.
                    HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                    // SetFocusPoint informs the system about a specific point in your scene to
                    // prioritize for image stabilization. The focus point is set independently
                    // for each holographic camera. When setting the focus point, put it on or
                    // near content that the user is looking at.
                    // In this example, we put the focus point at the center of the sample hologram.
                    // You can also set the relative velocity and facing of the stabilization
                    // plane using overloads of this method.
                    if (stationaryReferenceFrame != null)
                    {
                        renderingParameters.SetFocusPoint(
                            stationaryReferenceFrame.CoordinateSystem,
                            spinningCubeRenderer.Position
                            );
                    }
#endif
                }
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
Beispiel #5
0
        /// <summary>
        /// Updates resources associated with a holographic camera's swap chain.
        /// The app does not access the swap chain directly, but it does create
        /// resource views for the back buffer.
        /// </summary>
        public void CreateResourcesForBackBuffer(
            DeviceResources deviceResources,
            HolographicCameraRenderingParameters cameraParameters
            )
        {
            var device = deviceResources.D3DDevice;

            // Get the WinRT object representing the holographic camera's back buffer.
            var surface = cameraParameters.Direct3D11BackBuffer;

            // Get a DXGI interface for the holographic camera's back buffer.
            // Holographic cameras do not provide the DXGI swap chain, which is owned
            // by the system. The Direct3D back buffer resource is provided using WinRT
            // interop APIs.
            var surfaceDxgiInterfaceAccess = surface as InteropStatics.IDirect3DDxgiInterfaceAccess;
            var pResource = surfaceDxgiInterfaceAccess.GetInterface(InteropStatics.Id3D11Resource);
            var resource  = CppObject.FromPointer <Resource>(pResource);

            Marshal.Release(pResource);

            // Get a Direct3D interface for the holographic camera's back buffer.
            var cameraBackBuffer = resource.QueryInterface <Texture2D>();

            // Determine if the back buffer has changed. If so, ensure that the render target view
            // is for the current back buffer.
            if ((null == _d3DBackBuffer) || (_d3DBackBuffer.NativePointer != cameraBackBuffer.NativePointer))
            {
                // This can change every frame as the system moves to the next buffer in the
                // swap chain. This mode of operation will occur when certain rendering modes
                // are activated.
                _d3DBackBuffer = cameraBackBuffer;

                // Create a render target view of the back buffer.
                // Creating this resource is inexpensive, and is better than keeping track of
                // the back buffers in order to pre-allocate render target views for each one.
                _d3DRenderTargetView = this.ToDispose(new RenderTargetView(device, BackBufferTexture2D));

                // Get the DXGI format for the back buffer.
                // This information can be accessed by the app using CameraResources::GetBackBufferDXGIFormat().
                var backBufferDesc = BackBufferTexture2D.Description;
                _dxgiFormat = backBufferDesc.Format;

                // Check for render target size changes.
                var currentSize = _holographicCamera.RenderTargetSize;
                if (_d3DRenderTargetSize != currentSize)
                {
                    // Set render target size.
                    _d3DRenderTargetSize = HolographicCamera.RenderTargetSize;

                    // A new depth stencil view is also needed.
                    this.RemoveAndDispose(ref _d3DDepthStencilView);
                }
            }

            // Refresh depth stencil resources, if needed.
            if (null == DepthStencilView)
            {
                // Create a depth stencil view for use with 3D rendering if needed.
                var depthStencilDesc = new Texture2DDescription {
                    Format            = SharpDX.DXGI.Format.D16_UNorm,
                    Width             = (int)RenderTargetSize.Width,
                    Height            = (int)RenderTargetSize.Height,
                    ArraySize         = IsRenderingStereoscopic ? 2 : 1, // Create two textures when rendering in stereo.
                    MipLevels         = 1,                               // Use a single mipmap level.
                    BindFlags         = BindFlags.DepthStencil,
                    SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0)
                };

                using (var depthStencil = new Texture2D(device, depthStencilDesc)) {
                    var depthStencilViewDesc = new DepthStencilViewDescription {
                        Dimension      = IsRenderingStereoscopic ? DepthStencilViewDimension.Texture2DArray : DepthStencilViewDimension.Texture2D,
                        Texture2DArray =
                        {
                            ArraySize = IsRenderingStereoscopic ? 2 : 0
                        }
                    };
                    _d3DDepthStencilView = this.ToDispose(new DepthStencilView(device, depthStencil, depthStencilViewDesc));
                }
            }

            // Create the constant buffer, if needed.
            if (null == _viewProjectionConstantBuffer)
            {
                // Create a constant buffer to store view and projection matrices for the camera.
                var viewProjectionConstantBufferData = new ViewProjectionConstantBuffer();
                _viewProjectionConstantBuffer = this.ToDispose(Buffer.Create(
                                                                   device,
                                                                   BindFlags.ConstantBuffer,
                                                                   ref viewProjectionConstantBufferData));
            }
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem;

#if DRAW_SAMPLE_CONTENT
            // Check for new input state since the last frame.
            SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
            if (null != pointerState)
            {
                // When a Pressed gesture is detected, the sample hologram will be repositioned
                // two meters in front of the user.
                spinningCubeRenderer.PositionHologram(
                    pointerState.TryGetPointerPose(currentCoordinateSystem)
                    );
            }
#endif

            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //

#if DRAW_SAMPLE_CONTENT
                spinningCubeRenderer.Update(timer);
                _spatialSurfaceRenderer.Update();
#endif
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
#if DRAW_SAMPLE_CONTENT
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                renderingParameters.SetFocusPoint(
                    currentCoordinateSystem,
                    spinningCubeRenderer.Position
                    );
#endif
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
Beispiel #7
0
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            if (MediaPlayer.IsEndOfStream)
            {
                ViewManagement.SwitchTo2DViewAsync();
            }
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem;


            var gestureStatus = spatialInputHandler.CheckGestureStatus();

            switch (gestureStatus)
            {
            case SpatialGestureSettings.None:
                break;

            case SpatialGestureSettings.Tap:
                videoRenderer.Pause();
                break;

            case SpatialGestureSettings.DoubleTap:
                videoRenderer.Stop();
                ViewManagement.SwitchTo2DViewAsync();
                break;

            case SpatialGestureSettings.Hold:
                videoRenderer.FastForward(spatialInputHandler.HoldTotalTime.TotalSeconds);
                break;

            case SpatialGestureSettings.ManipulationTranslate:
                break;

            case SpatialGestureSettings.NavigationX:
                break;

            case SpatialGestureSettings.NavigationY:
                break;

            case SpatialGestureSettings.NavigationZ:
                break;

            case SpatialGestureSettings.NavigationRailsX:
                break;

            case SpatialGestureSettings.NavigationRailsY:
                break;

            case SpatialGestureSettings.NavigationRailsZ:
                break;

            default:
                break;
            }

            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //
                videoRenderer.Update(timer);
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                renderingParameters.SetFocusPoint(
                    currentCoordinateSystem,
                    videoRenderer.Position
                    );
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = attachreferenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp);//referenceFrame.CoordinateSystem;


#if DRAW_SAMPLE_CONTENT
            // Check for new input state since the last frame.
            SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
            if (null != pointerState)
            {
                // When a Pressed gesture is detected, the sample hologram will be repositioned
                // two meters in front of the user.
                spinningCubeRenderer.PositionHologram(
                    //  pointerState.TryGetPointerPose(currentCoordinateSystem)
                    SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp)
                    );
            }

            //var downstate = Windows.UI.Core.CoreVirtualKeyStates.Down;
            //bool rightclick = (Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.Escape) & downstate) == downstate;
            //System.Diagnostics.Debug.WriteLine("Windows.System.VirtualKey.Escape  " + Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.Escape).ToString() + " downstate" + downstate);
            //System.Diagnostics.Debug.WriteLine("Windows.System.VirtualKey.A  " + Windows.UI.Core.CoreWindow.GetForCurrentThread().GetKeyState(Windows.System.VirtualKey.A).ToString() + " downstate" + downstate);
            //if (rightclick)
            //{
            //    Windows.UI.ViewManagement.ApplicationViewSwitcher.SwitchAsync(VideoGallery.mainId, VideoGallery.appId, Windows.UI.ViewManagement.ApplicationViewSwitchingOptions.ConsolidateViews);
            //}
#endif



            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //

#if DRAW_SAMPLE_CONTENT
                spinningCubeRenderer.Update(timer);
#endif
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
#if DRAW_SAMPLE_CONTENT
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                Vector3 position = new Vector3(0.0f, 0.0f, -3.0f);
                renderingParameters.SetFocusPoint(
                    currentCoordinateSystem, position
                    /*spinningCubeRenderer.Position*/
                    );
#endif
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
Beispiel #9
0
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update(HolographicFrame previousFrame)
        {
            // TODO: Put CPU work that does not depend on the HolographicCameraPose here.

            // Apps should wait for the optimal time to begin pose-dependent work.
            // The platform will automatically adjust the wakeup time to get
            // the lowest possible latency at high frame rates. For manual
            // control over latency, use the WaitForNextFrameReadyWithHeadStart
            // API.
            // WaitForNextFrameReady and WaitForNextFrameReadyWithHeadStart are the
            // preferred frame synchronization APIs for Windows Mixed Reality. When
            // running on older versions of the OS that do not include support for
            // these APIs, your app can use the WaitForFrameToFinish API for similar
            // (but not as optimal) behavior.
            if (canUseWaitForNextFrameReadyAPI)
            {
                try
                {
                    holographicSpace.WaitForNextFrameReady();
                }
                catch (NotImplementedException)
                {
                    // Catch a specific case where WaitForNextFrameReady() is present but not implemented
                    // and default back to WaitForFrameToFinish() in that case.
                    canUseWaitForNextFrameReadyAPI = false;
                }
            }
            else if (previousFrame != null)
            {
                previousFrame.WaitForFrameToFinish();
            }

            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

#if DRAW_SAMPLE_CONTENT
            if (stationaryReferenceFrame != null)
            {
                // Check for new input state since the last frame.
                for (int i = 0; i < gamepads.Count; ++i)
                {
                    bool buttonDownThisUpdate = (gamepads[i].gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A;
                    if (buttonDownThisUpdate && !gamepads[i].buttonAWasPressedLastFrame)
                    {
                        pointerPressed = true;
                    }
                    gamepads[i].buttonAWasPressedLastFrame = buttonDownThisUpdate;
                }

                SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
                SpatialPointerPose            pose         = null;
                if (null != pointerState)
                {
                    pose = pointerState.TryGetPointerPose(stationaryReferenceFrame.CoordinateSystem);
                }
                else if (pointerPressed)
                {
                    pose = SpatialPointerPose.TryGetAtTimestamp(stationaryReferenceFrame.CoordinateSystem, prediction.Timestamp);
                }
                pointerPressed = false;

                // When a Pressed gesture is detected, the sample hologram will be repositioned
                // two meters in front of the user.
                quadRendererR.PositionHologram(pose);
                quadRendererL.PositionHologram(pose);
            }
#endif

            timer.Tick(() =>
            {
                //
                // TODO: Update scene objects.
                //
                // Put time-based updates here. By default this code will run once per frame,
                // but if you change the StepTimer to use a fixed time step this code will
                // run as many times as needed to get to the current step.
                //

#if DRAW_SAMPLE_CONTENT
                quadRendererR.Update(timer);
                quadRendererL.Update(timer);
#endif
            });

            // On HoloLens 2, the platform can achieve better image stabilization results if it has
            // a stabilization plane and a depth buffer.
            // Note that the SetFocusPoint API includes an override which takes velocity as a
            // parameter. This is recommended for stabilizing holograms in motion.
            foreach (var cameraPose in prediction.CameraPoses)
            {
#if DRAW_SAMPLE_CONTENT
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera. When setting the focus point, put it on or
                // near content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram.
                // You can also set the relative velocity and facing of the stabilization
                // plane using overloads of this method.
                if (stationaryReferenceFrame != null)
                {
                    renderingParameters.SetFocusPoint(
                        stationaryReferenceFrame.CoordinateSystem,
                        new System.Numerics.Vector3(0, 0, 0)
                        );
                }
#endif
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
Beispiel #10
0
        /// <summary>
        /// Renders the current frame to each holographic display, according to the
        /// current application and spatial positioning state. Returns true if the
        /// frame was rendered to at least one display.
        /// </summary>
        public void UpdateAndDraw()
        {
            HolographicFrame holographicFrame = this.holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            this.deviceResources.EnsureCameraResources(holographicFrame, prediction);

            this.UpdateEyeProperties();

            // Up-to-date frame predictions enhance the effectiveness of image stablization and
            // allow more accurate positioning of holograms.
            holographicFrame.UpdateCurrentPrediction();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            prediction = holographicFrame.CurrentPrediction;

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = this.ReferenceFrame.CoordinateSystem;

            var eyeTexture = this.eyesProperties[0].Texture;

            this.deviceResources.UpdateCameraClipDistance(eyeTexture.NearPlane, eyeTexture.FarPlane);

            holographicFrame.UpdateCurrentPrediction();
            prediction = holographicFrame.CurrentPrediction;

            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                if (this.mixedRealityService.FocusPosition.HasValue)
                {
                    var position = this.mixedRealityService.FocusPosition.Value;

                    if (!this.mixedRealityService.FocusNormal.HasValue)
                    {
                        renderingParameters.SetFocusPoint(currentCoordinateSystem, new System.Numerics.Vector3(position.X, position.Y, position.Z));
                    }
                    else
                    {
                        var normal = this.mixedRealityService.FocusNormal.Value;

                        if (!this.mixedRealityService.FocusVelocity.HasValue)
                        {
                            renderingParameters.SetFocusPoint(
                                currentCoordinateSystem,
                                new System.Numerics.Vector3(position.X, position.Y, position.Z),
                                new System.Numerics.Vector3(normal.X, normal.Y, normal.Z));
                        }
                        else
                        {
                            var velocity = this.mixedRealityService.FocusVelocity.Value;

                            renderingParameters.SetFocusPoint(
                                currentCoordinateSystem,
                                new System.Numerics.Vector3(position.X, position.Y, position.Z),
                                new System.Numerics.Vector3(normal.X, normal.Y, normal.Z),
                                new System.Numerics.Vector3(velocity.X, velocity.Y, velocity.Z));
                        }
                    }
                }

                var pointerPose = SpatialPointerPose.TryGetAtTimestamp(this.ReferenceFrame.CoordinateSystem, prediction.Timestamp);
                if (pointerPose != null)
                {
                    pointerPose.Head.Position.ToWave(out this.headRay.Position);
                    pointerPose.Head.ForwardDirection.ToWave(out this.headRay.Direction);
                }

                var viewTransaform      = cameraPose.TryGetViewTransform(this.ReferenceFrame.CoordinateSystem);
                var projectionTransform = cameraPose.ProjectionTransform;

                if (viewTransaform.HasValue)
                {
                    for (int i = 0; i < 2; i++)
                    {
                        Matrix viewMatrix;
                        Matrix projectionMatrix;

                        if (i == (int)VREyeType.LeftEye)
                        {
                            viewTransaform.Value.Left.ToWave(out viewMatrix);
                            projectionTransform.Left.ToWave(out projectionMatrix);
                        }
                        else
                        {
                            viewTransaform.Value.Right.ToWave(out viewMatrix);
                            projectionTransform.Right.ToWave(out projectionMatrix);
                        }

                        Matrix view;
                        Matrix.Invert(ref viewMatrix, out view);

                        var eyeProperties = this.eyesProperties[i];
                        var eyePose       = eyeProperties.Pose;
                        eyePose.Position = view.Translation;
                        Quaternion.CreateFromRotationMatrix(ref view, out eyePose.Orientation);
                        eyeProperties.Pose       = eyePose;
                        eyeProperties.Projection = projectionMatrix;
                    }

                    var leftEyePose         = this.eyesProperties[(int)VREyeType.LeftEye].Pose;
                    var rightEyePose        = this.eyesProperties[(int)VREyeType.RightEye].Pose;
                    var centerEyeProperties = this.eyesProperties[(int)VREyeType.CenterEye];

                    var centerEyePose = centerEyeProperties.Pose;
                    centerEyePose.Position    = Vector3.Lerp(leftEyePose.Position, rightEyePose.Position, 0.5f);
                    centerEyePose.Orientation = Quaternion.Lerp(leftEyePose.Orientation, rightEyePose.Orientation, 0.5f);
                    centerEyeProperties.Pose  = centerEyePose;
                }
            }

            this.Render();

            this.deviceResources.Present(ref holographicFrame);
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.

            SpatialCoordinateSystem referenceFrameCoordinateSystem = referenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp);

            // remember where we were (changed if the CurrentNode != previousNode)
            var previousNode = CurrentNode;

            // update current node the user resides in
            CurrentNode = UpdateCurrentNode(referenceFrameCoordinateSystem, prediction.Timestamp, NodeRadius);

            // .. and current gaze
            SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(referenceFrameCoordinateSystem, prediction.Timestamp);

            NodePosition = pose.Head.Position;
            GazeForward  = pose.Head.ForwardDirection;
            GazeUp       = pose.Head.UpDirection;

            var mat = referenceFrameCoordinateSystem.TryGetTransformTo(CurrentNode.Anchor.CoordinateSystem);

            if (mat.HasValue)
            {
                NodePosition = Vector3.Transform(NodePosition, mat.Value);
                GazeForward  = Vector3.TransformNormal(GazeForward, mat.Value);
                GazeUp       = Vector3.TransformNormal(GazeUp, mat.Value);
            }

            if (!string.IsNullOrEmpty(requestedSightingTerm))
            {
                var candidates = FindClosestNodesWithSightedItem(referenceFrameCoordinateSystem, pose, requestedSightingTerm);

                if (candidates != null && candidates.Count > 0)
                {
                    targetNode     = candidates[0];
                    targetSighting = candidates[0].Sightings.Where(sighting => sighting.Tokens.Any(token => token.Equals(requestedSightingTerm, StringComparison.OrdinalIgnoreCase))).First();
                }

                requestedSightingTerm = string.Empty;
            }

            // currently at position
            if (CurrentNode == targetNode)
            {
                if (dwellTimeAtCurrentNode >= 5)
                {
                    targetNode     = null;
                    targetSighting = null;
                    entities.Clear();
                    Debug.WriteLine("Well done! Assisted the user find their item");
                }
            }

            if (targetNode != null)
            {
                RebuildTrailToTarget(referenceFrameCoordinateSystem, prediction.Timestamp, CurrentNode, targetNode);
            }


            ProcessNextFrame();

            timer.Tick(() =>
            {
                dwellTimeAtCurrentNode += timer.ElapsedSeconds;

                for (var entityIndex = 0; entityIndex < entities.Count; entityIndex++)
                {
                    var entity = entities[entityIndex];
                    entity.Update(timer, referenceFrameCoordinateSystem);
                }
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.

            SpatialCoordinateSystem referenceFrameCoordinateSystem = attachedReferenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp);

            var previousNode = currentNode;

            currentNode = UpdateCurrentNode(referenceFrameCoordinateSystem, prediction.Timestamp);

            if (currentNode != previousNode)
            {
                SpatialPointerPose pose = SpatialPointerPose.TryGetAtTimestamp(referenceFrameCoordinateSystem, prediction.Timestamp);
            }

            if (targetNode != null)
            {
                RebuildTrailToTarget(referenceFrameCoordinateSystem, prediction.Timestamp, currentNode, targetNode);
            }

            SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();

            if (null != pointerState)
            {
                Debug.WriteLine($"Setting target {nodes[1].Name}");
                targetNode = nodes[1];
            }

            timer.Tick(() =>
            {
                if (currentNode != previousNode)
                {
                    dwellTimeAtCurrentNode = 0;
                }
                else
                {
                    dwellTimeAtCurrentNode += timer.ElapsedSeconds;
                }

                for (var entityIndex = entities.Count - 1; entityIndex >= 0; entityIndex--)
                {
                    var entity = entities[entityIndex];

                    // update rotation of previous one
                    if (entityIndex != entities.Count - 1)
                    {
                        var previousEntity         = entities[entityIndex + 1];
                        var previousEntityPosition = previousEntity.Node.TryGetTransformedPosition(referenceFrameCoordinateSystem);
                        var currentEntityPosition  = entity.Node.TryGetTransformedPosition(referenceFrameCoordinateSystem);
                        if (previousEntityPosition.HasValue && currentEntityPosition.HasValue)
                        {
                            var tV   = previousEntityPosition.Value;
                            var sV   = currentEntityPosition.Value;
                            tV.Y     = sV.Y = 0;
                            var diff = sV - tV;

                            var yAngle = Math.Atan2(diff.X, diff.Z);

                            entity.EulerAngles = new Vector3(0, (float)(yAngle * (180 / Math.PI)), 0);
                        }
                    }

                    entity.Update(timer, referenceFrameCoordinateSystem);
                }
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                //if (spinningCubeSpatialAnchor != null)
                //{
                //    //renderingParameters.SetFocusPoint(
                //    //spinningCubeSpatialAnchor.CoordinateSystem,
                //    //spinningCubeRenderer.Position
                //    //);
                //}
                //else
                //{
                //    //renderingParameters.SetFocusPoint(
                //    //currentCoordinateSystem,
                //    //spinningCubeRenderer.Position
                //    //);
                //}
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
Beispiel #13
0
        /// <summary>
        /// Updates the application state once per frame.
        /// </summary>
        public HolographicFrame Update()
        {
            // Before doing the timer update, there is some work to do per-frame
            // to maintain holographic rendering. First, we will get information
            // about the current frame.

            // The HolographicFrame has information that the app needs in order
            // to update and render the current frame. The app begins each new
            // frame by calling CreateNextFrame.
            HolographicFrame holographicFrame = holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            deviceResources.EnsureCameraResources(holographicFrame, prediction);

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = referenceFrame.CoordinateSystem;

            // Check for new input state since the last frame.
            //foreach (var gamepad in gamepads)
            //{
            //    pointerPressed |= ((gamepad.GetCurrentReading().Buttons & GamepadButtons.A) == GamepadButtons.A);
            //}

            //SpatialInteractionSourceState pointerState = spatialInputHandler.CheckForInput();
            //SpatialPointerPose pose = null;
            //if (null != pointerState)
            //{
            //    pose = pointerState.TryGetPointerPose(currentCoordinateSystem);
            //}
            //else if (pointerPressed)
            //{
            //    pose = SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp);
            //}
            //pointerPressed = false;

            //if (null != pose)
            //{
            //    //var angle = Angle(pose.Head.ForwardDirection, new Vector3(0.0f, 0.0f, -1.0f), new Vector3(0.0f, 1.0f, 0.0f));
            //    //var rotator = Matrix4x4.CreateRotationY(-angle);
            //    //var mover = Matrix4x4.CreateTranslation(pose.Head.Position);
            //    //var transformer = rotator * mover;
            //}

            mutex.WaitOne();

            var key   = mainView.VirtualKey;
            var count = mainView.KeyCount;

            mainView.KeyCount   = 0;
            mainView.VirtualKey = Windows.System.VirtualKey.None;

            mutex.ReleaseMutex();

            if (key != Windows.System.VirtualKey.None && count > 0)
            {
                mainView.LastKey = key;
                mainView.OnKeyPressed(key);
            }

            timer1.Tick(() =>
            {
                mainView.Update(timer1);
            });

            timer2.Tick(() =>
            {
                mainView.Update(SpatialPointerPose.TryGetAtTimestamp(currentCoordinateSystem, prediction.Timestamp));
            });

            // We complete the frame update by using information about our content positioning
            // to set the focus point.
            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.

                if (mainView.Pointers[0] != null)
                {
                    renderingParameters.SetFocusPoint(currentCoordinateSystem, mainView.Pointers[0].Position);
                }
            }

            // The holographic frame will be used to get up-to-date view and projection matrices and
            // to present the swap chain.
            return(holographicFrame);
        }
        /// <summary>
        /// Updates resources associated with a holographic camera's swap chain.
        /// The app does not access the swap chain directly, but it does create
        /// resource views for the back buffer.
        /// </summary>
        public void CreateResourcesForBackBuffer(
            DeviceResources deviceResources,
            HolographicCameraRenderingParameters cameraParameters
            )
        {
            var device = deviceResources.D3DDevice;

            // Get the WinRT object representing the holographic camera's back buffer.
            IDirect3DSurface surface = cameraParameters.Direct3D11BackBuffer;

            // Get a DXGI interface for the holographic camera's back buffer.
            // Holographic cameras do not provide the DXGI swap chain, which is owned
            // by the system. The Direct3D back buffer resource is provided using WinRT
            // interop APIs.
            InteropStatics.IDirect3DDxgiInterfaceAccess surfaceDxgiInterfaceAccess = surface as InteropStatics.IDirect3DDxgiInterfaceAccess;
            IntPtr pResource = surfaceDxgiInterfaceAccess.GetInterface(InteropStatics.ID3D11Resource);
            Resource resource = SharpDX.CppObject.FromPointer<Resource>(pResource);
            Marshal.Release(pResource);

            // Get a Direct3D interface for the holographic camera's back buffer.
            Texture2D cameraBackBuffer = resource.QueryInterface<Texture2D>();

            // Determine if the back buffer has changed. If so, ensure that the render target view
            // is for the current back buffer.
            if ((null == d3dBackBuffer) || (d3dBackBuffer.NativePointer != cameraBackBuffer.NativePointer))
            {
                // This can change every frame as the system moves to the next buffer in the
                // swap chain. This mode of operation will occur when certain rendering modes
                // are activated.
                d3dBackBuffer = cameraBackBuffer;

                // Get the DXGI format for the back buffer.
                // This information can be accessed by the app using CameraResources::GetBackBufferDXGIFormat().
                Texture2DDescription backBufferDesc = BackBufferTexture2D.Description;
                dxgiFormat = backBufferDesc.Format;

                // Check for render target size changes.
                Size currentSize = holographicCamera.RenderTargetSize;
                if (d3dRenderTargetSize != currentSize)
                {
                    // Set render target size.
                    d3dRenderTargetSize = HolographicCamera.RenderTargetSize;
                }
            }

            // Create the constant buffer, if needed.
            if (null == viewProjectionConstantBuffer)
            {
                // Create a constant buffer to store view and projection matrices for the camera.
                ViewProjectionConstantBuffer viewProjectionConstantBufferData = new ViewProjectionConstantBuffer();
                viewProjectionConstantBuffer = this.ToDispose(SharpDX.Direct3D11.Buffer.Create(
                    device,
                    BindFlags.ConstantBuffer,
                    ref viewProjectionConstantBufferData));
            }
        }