/// <summary>
        /// Renders the current frame to each holographic display, according to the 
        /// current application and spatial positioning state. Returns true if the 
        /// frame was rendered to at least one display.
        /// </summary>
        public bool Render(ref HolographicFrame holographicFrame)
        {
            // Don't try to render anything before the first Update.
            if (timer.FrameCount == 0)
            {
                return false;
            }

            // Up-to-date frame predictions enhance the effectiveness of image stablization and
            // allow more accurate positioning of holograms.
            holographicFrame.UpdateCurrentPrediction();
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Lock the set of holographic camera resources, then draw to each camera
            // in this frame.
            return deviceResources.UseHolographicCameraResources(
                (Dictionary<uint, CameraResources> cameraResourceDictionary) =>
            {
                bool atLeastOneCameraRendered = false;

                foreach (var cameraPose in prediction.CameraPoses)
                {
                    // This represents the device-based resources for a HolographicCamera.
                    CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id];

                    // Get the device context.
                    var context = deviceResources.D3DDeviceContext;
                    var renderTargetView = cameraResources.BackBufferRenderTargetView;
                    var depthStencilView = cameraResources.DepthStencilView;

                    // Set render targets to the current holographic camera.
                    context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView);

                    // Clear the back buffer and depth stencil view.
                    SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f);
                    context.ClearRenderTargetView(renderTargetView, transparent);
                    context.ClearDepthStencilView(
                        depthStencilView,
                        SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil,
                        1.0f,
                        0);

                    // The view and projection matrices for each holographic camera will change
                    // every frame. This function refreshes the data in the constant buffer for
                    // the holographic camera indicated by cameraPose.
                    cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, referenceFrame.CoordinateSystem);

                    // Attach the view/projection constant buffer for this camera to the graphics pipeline.
                    bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources);

                    // Only render world-locked content when positional tracking is active.
                    if (cameraActive)
                    {
                        quadRenderer.RenderRGB(textRenderer.Texture);
                    }                    

                    atLeastOneCameraRendered = true;
                }

                return atLeastOneCameraRendered;
            });
        }
예제 #2
0
        /// <summary>
        /// Renders the current frame to each holographic display, according to the
        /// current application and spatial positioning state. Returns true if the
        /// frame was rendered to at least one display.
        /// </summary>
        public bool Render(HolographicFrame holographicFrame)
        {
            // Don't try to render anything before the first Update.
            if (timer.FrameCount == 0)
            {
                return(false);
            }

            //
            // TODO: Add code for pre-pass rendering here.
            //
            // Take care of any tasks that are not specific to an individual holographic
            // camera. This includes anything that doesn't need the final view or projection
            // matrix, such as lighting maps.
            //

            // Up-to-date frame predictions enhance the effectiveness of image stablization and
            // allow more accurate positioning of holograms.
            holographicFrame.UpdateCurrentPrediction();
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Lock the set of holographic camera resources, then draw to each camera
            // in this frame.
            return(deviceResources.UseHolographicCameraResources(
                       (Dictionary <uint, CameraResources> cameraResourceDictionary) =>
            {
                bool atLeastOneCameraRendered = false;

                foreach (var cameraPose in prediction.CameraPoses)
                {
                    // This represents the device-based resources for a HolographicCamera.
                    CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id];

                    // Get the device context.
                    var context = deviceResources.D3DDeviceContext;
                    var renderTargetView = cameraResources.BackBufferRenderTargetView;
                    var depthStencilView = cameraResources.DepthStencilView;

                    // Set render targets to the current holographic camera.
                    context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView);

                    // Clear the back buffer and depth stencil view.
                    if (canGetHolographicDisplayForCamera &&
                        cameraPose.HolographicCamera.Display.IsOpaque)
                    {
                        SharpDX.Mathematics.Interop.RawColor4 cornflowerBlue = new SharpDX.Mathematics.Interop.RawColor4(0.392156899f, 0.58431375f, 0.929411829f, 1.0f);
                        context.ClearRenderTargetView(renderTargetView, cornflowerBlue);
                    }
                    else
                    {
                        SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f);
                        context.ClearRenderTargetView(renderTargetView, transparent);
                    }
                    context.ClearDepthStencilView(
                        depthStencilView,
                        SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil,
                        1.0f,
                        0);

                    //
                    // TODO: Replace the sample content with your own content.
                    //
                    // Notes regarding holographic content:
                    //    * For drawing, remember that you have the potential to fill twice as many pixels
                    //      in a stereoscopic render target as compared to a non-stereoscopic render target
                    //      of the same resolution. Avoid unnecessary or repeated writes to the same pixel,
                    //      and only draw holograms that the user can see.
                    //    * To help occlude hologram geometry, you can create a depth map using geometry
                    //      data obtained via the surface mapping APIs. You can use this depth map to avoid
                    //      rendering holograms that are intended to be hidden behind tables, walls,
                    //      monitors, and so on.
                    //    * On HolographicDisplays that are transparent, black pixels will appear transparent
                    //      to the user. On such devices, you should clear the screen to Transparent as shown
                    //      above. You should still use alpha blending to draw semitransparent holograms.
                    //


                    // The view and projection matrices for each holographic camera will change
                    // every frame. This function refreshes the data in the constant buffer for
                    // the holographic camera indicated by cameraPose.
                    if (stationaryReferenceFrame != null)
                    {
                        cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, stationaryReferenceFrame.CoordinateSystem);
                    }

                    // Attach the view/projection constant buffer for this camera to the graphics pipeline.
                    bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources);

#if DRAW_SAMPLE_CONTENT
                    // Only render world-locked content when positional tracking is active.
                    if (cameraActive)
                    {
                        // Draw the sample hologram.
                        spinningCubeRenderer.Render();

                        if (canCommitDirect3D11DepthBuffer)
                        {
                            // On versions of the platform that support the CommitDirect3D11DepthBuffer API, we can
                            // provide the depth buffer to the system, and it will use depth information to stabilize
                            // the image at a per-pixel level.
                            HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);
                            SharpDX.Direct3D11.Texture2D depthBuffer = cameraResources.DepthBufferTexture2D;

                            // Direct3D interop APIs are used to provide the buffer to the WinRT API.
                            SharpDX.DXGI.Resource1 depthStencilResource = depthBuffer.QueryInterface <SharpDX.DXGI.Resource1>();
                            SharpDX.DXGI.Surface2 depthDxgiSurface = new SharpDX.DXGI.Surface2(depthStencilResource, 0);
                            IDirect3DSurface depthD3DSurface = InteropStatics.CreateDirect3DSurface(depthDxgiSurface.NativePointer);
                            if (depthD3DSurface != null)
                            {
                                // Calling CommitDirect3D11DepthBuffer causes the system to queue Direct3D commands to
                                // read the depth buffer. It will then use that information to stabilize the image as
                                // the HolographicFrame is presented.
                                renderingParameters.CommitDirect3D11DepthBuffer(depthD3DSurface);
                            }
                        }
                    }
#endif
                    atLeastOneCameraRendered = true;
                }

                return atLeastOneCameraRendered;
            }));
        }
예제 #3
0
        /// <summary>
        /// Renders the current frame to each holographic display, according to the
        /// current application and spatial positioning state. Returns true if the
        /// frame was rendered to at least one display.
        /// </summary>
        public bool Render(ref HolographicFrame holographicFrame)
        {
            // Don't try to render anything before the first Update.
            if (timer.FrameCount == 0)
            {
                return(false);
            }

            //
            // TODO: Add code for pre-pass rendering here.
            //
            // Take care of any tasks that are not specific to an individual holographic
            // camera. This includes anything that doesn't need the final view or projection
            // matrix, such as lighting maps.
            //

            // Up-to-date frame predictions enhance the effectiveness of image stablization and
            // allow more accurate positioning of holograms.
            holographicFrame.UpdateCurrentPrediction();
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Lock the set of holographic camera resources, then draw to each camera
            // in this frame.
            return(deviceResources.UseHolographicCameraResources(
                       (Dictionary <uint, CameraResources> cameraResourceDictionary) =>
            {
                bool atLeastOneCameraRendered = false;

                foreach (var cameraPose in prediction.CameraPoses)
                {
                    // This represents the device-based resources for a HolographicCamera.
                    CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id];

                    // Get the device context.
                    var context = deviceResources.D3DDeviceContext;
                    var renderTargetView = cameraResources.BackBufferRenderTargetView;
                    var depthStencilView = cameraResources.DepthStencilView;

                    // Set render targets to the current holographic camera.
                    context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView);

                    // Clear the back buffer and depth stencil view.
                    SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f);
                    context.ClearRenderTargetView(renderTargetView, transparent);
                    context.ClearDepthStencilView(
                        depthStencilView,
                        SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil,
                        1.0f,
                        0);

                    //
                    // TODO: Replace the sample content with your own content.
                    //
                    // Notes regarding holographic content:
                    //    * For drawing, remember that you have the potential to fill twice as many pixels
                    //      in a stereoscopic render target as compared to a non-stereoscopic render target
                    //      of the same resolution. Avoid unnecessary or repeated writes to the same pixel,
                    //      and only draw holograms that the user can see.
                    //    * To help occlude hologram geometry, you can create a depth map using geometry
                    //      data obtained via the surface mapping APIs. You can use this depth map to avoid
                    //      rendering holograms that are intended to be hidden behind tables, walls,
                    //      monitors, and so on.
                    //    * Black pixels will appear transparent to the user wearing the device, but you
                    //      should still use alpha blending to draw semitransparent holograms. You should
                    //      also clear the screen to Transparent as shown above.
                    //


                    // The view and projection matrices for each holographic camera will change
                    // every frame. This function refreshes the data in the constant buffer for
                    // the holographic camera indicated by cameraPose.
                    cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, referenceFrame.CoordinateSystem);

                    // Attach the view/projection constant buffer for this camera to the graphics pipeline.
                    bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources);

                    atLeastOneCameraRendered = true;
                }

                return atLeastOneCameraRendered;
            }));
        }
예제 #4
0
        public unsafe void Run()
        {
            ReferenceFrame = SpatialLocator.GetDefault().CreateStationaryFrameOfReferenceAtCurrentLocation();
            CoreWindow.GetForCurrentThread().CustomProperties.Add("HolographicSpace", HolographicSpace);
            InitializeSpace();
            InteractionManager = SpatialInteractionManager.GetForCurrentView();
            InteractionManager.InteractionDetected += (s, e) => GesturesManager?.HandleInteraction(e.Interaction);

            while (!windowClosed)
            {
                if (!appInited)
                {
                    SpatialMappingManager = new SpatialMappingManager();
                    VoiceManager          = new VoiceManager();
                    appInited             = true;
                    Game = (HoloApplication)Activator.CreateInstance(holoAppType, assetsDirectory);
                    Game.Run();
                    Game.Engine.PostUpdate += e => currentFrame?.UpdateCurrentPrediction();
                    GesturesManager         = new GesturesManager(Game, ReferenceFrame);
                }

                if (windowVisible && (null != HolographicSpace))
                {
                    if (Game != null)
                    {
                        currentFrame = HolographicSpace.CreateNextFrame();

                        var prediction = currentFrame.CurrentPrediction;
                        if (prediction.CameraPoses.Count < 1)
                        {
                            continue;
                        }
                        var cameraPose = prediction.CameraPoses[0];

                        var viewBox = cameraPose.TryGetViewTransform(ReferenceFrame.CoordinateSystem);
                        if (viewBox != null)
                        {
                            Matrix4x4 leftViewMatrixDx  = viewBox.Value.Left;
                            Matrix4x4 rightViewMatrixDx = viewBox.Value.Right;
                            Matrix4x4 leftProjMatrixDx  = cameraPose.ProjectionTransform.Left;
                            Matrix4x4 rightProjMatrixDx = cameraPose.ProjectionTransform.Right;

                            Matrix4 leftViewMatrixUrho  = *(Matrix4 *)(void *)&leftViewMatrixDx;
                            Matrix4 rightViewMatrixUrho = *(Matrix4 *)(void *)&rightViewMatrixDx;
                            Matrix4 leftProjMatrixUrho  = *(Matrix4 *)(void *)&leftProjMatrixDx;
                            Matrix4 rightProjMatrixUrho = *(Matrix4 *)(void *)&rightProjMatrixDx;
                            Game.UpdateStereoView(leftViewMatrixUrho, rightViewMatrixUrho, leftProjMatrixUrho, rightProjMatrixUrho);
                        }

                        var parameters = currentFrame.GetRenderingParameters(cameraPose);
                        if (Game.FocusWorldPoint != Vector3.Zero)
                        {
                            parameters.SetFocusPoint(ReferenceFrame.CoordinateSystem,
                                                     new System.Numerics.Vector3(
                                                         Game.FocusWorldPoint.X,
                                                         Game.FocusWorldPoint.Y,
                                                         -Game.FocusWorldPoint.Z));                //LH->RH
                        }
                        Game.Engine.RunFrame();
                        currentFrame.PresentUsingCurrentPrediction(HolographicFramePresentWaitBehavior.WaitForFrameToFinish);
                    }
                    CoreWindow.GetForCurrentThread().Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessAllIfPresent);
                }
                else
                {
                    CoreWindow.GetForCurrentThread().Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessOneAndAllPending);
                }
            }
        }
예제 #5
0
        /// <summary>
        /// Renders the current frame to each holographic display, according to the
        /// current application and spatial positioning state. Returns true if the
        /// frame was rendered to at least one display.
        /// </summary>
        public void UpdateAndDraw()
        {
            HolographicFrame holographicFrame = this.holographicSpace.CreateNextFrame();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Back buffers can change from frame to frame. Validate each buffer, and recreate
            // resource views and depth buffers as needed.
            this.deviceResources.EnsureCameraResources(holographicFrame, prediction);

            this.UpdateEyeProperties();

            // Up-to-date frame predictions enhance the effectiveness of image stablization and
            // allow more accurate positioning of holograms.
            holographicFrame.UpdateCurrentPrediction();

            // Get a prediction of where holographic cameras will be when this frame
            // is presented.
            prediction = holographicFrame.CurrentPrediction;

            // Next, we get a coordinate system from the attached frame of reference that is
            // associated with the current frame. Later, this coordinate system is used for
            // for creating the stereo view matrices when rendering the sample content.
            SpatialCoordinateSystem currentCoordinateSystem = this.ReferenceFrame.CoordinateSystem;

            var eyeTexture = this.eyesProperties[0].Texture;

            this.deviceResources.UpdateCameraClipDistance(eyeTexture.NearPlane, eyeTexture.FarPlane);

            holographicFrame.UpdateCurrentPrediction();
            prediction = holographicFrame.CurrentPrediction;

            foreach (var cameraPose in prediction.CameraPoses)
            {
                // The HolographicCameraRenderingParameters class provides access to set
                // the image stabilization parameters.
                HolographicCameraRenderingParameters renderingParameters = holographicFrame.GetRenderingParameters(cameraPose);

                // SetFocusPoint informs the system about a specific point in your scene to
                // prioritize for image stabilization. The focus point is set independently
                // for each holographic camera.
                // You should set the focus point near the content that the user is looking at.
                // In this example, we put the focus point at the center of the sample hologram,
                // since that is the only hologram available for the user to focus on.
                // You can also set the relative velocity and facing of that content; the sample
                // hologram is at a fixed point so we only need to indicate its position.
                if (this.mixedRealityService.FocusPosition.HasValue)
                {
                    var position = this.mixedRealityService.FocusPosition.Value;

                    if (!this.mixedRealityService.FocusNormal.HasValue)
                    {
                        renderingParameters.SetFocusPoint(currentCoordinateSystem, new System.Numerics.Vector3(position.X, position.Y, position.Z));
                    }
                    else
                    {
                        var normal = this.mixedRealityService.FocusNormal.Value;

                        if (!this.mixedRealityService.FocusVelocity.HasValue)
                        {
                            renderingParameters.SetFocusPoint(
                                currentCoordinateSystem,
                                new System.Numerics.Vector3(position.X, position.Y, position.Z),
                                new System.Numerics.Vector3(normal.X, normal.Y, normal.Z));
                        }
                        else
                        {
                            var velocity = this.mixedRealityService.FocusVelocity.Value;

                            renderingParameters.SetFocusPoint(
                                currentCoordinateSystem,
                                new System.Numerics.Vector3(position.X, position.Y, position.Z),
                                new System.Numerics.Vector3(normal.X, normal.Y, normal.Z),
                                new System.Numerics.Vector3(velocity.X, velocity.Y, velocity.Z));
                        }
                    }
                }

                var pointerPose = SpatialPointerPose.TryGetAtTimestamp(this.ReferenceFrame.CoordinateSystem, prediction.Timestamp);
                if (pointerPose != null)
                {
                    pointerPose.Head.Position.ToWave(out this.headRay.Position);
                    pointerPose.Head.ForwardDirection.ToWave(out this.headRay.Direction);
                }

                var viewTransaform      = cameraPose.TryGetViewTransform(this.ReferenceFrame.CoordinateSystem);
                var projectionTransform = cameraPose.ProjectionTransform;

                if (viewTransaform.HasValue)
                {
                    for (int i = 0; i < 2; i++)
                    {
                        Matrix viewMatrix;
                        Matrix projectionMatrix;

                        if (i == (int)VREyeType.LeftEye)
                        {
                            viewTransaform.Value.Left.ToWave(out viewMatrix);
                            projectionTransform.Left.ToWave(out projectionMatrix);
                        }
                        else
                        {
                            viewTransaform.Value.Right.ToWave(out viewMatrix);
                            projectionTransform.Right.ToWave(out projectionMatrix);
                        }

                        Matrix view;
                        Matrix.Invert(ref viewMatrix, out view);

                        var eyeProperties = this.eyesProperties[i];
                        var eyePose       = eyeProperties.Pose;
                        eyePose.Position = view.Translation;
                        Quaternion.CreateFromRotationMatrix(ref view, out eyePose.Orientation);
                        eyeProperties.Pose       = eyePose;
                        eyeProperties.Projection = projectionMatrix;
                    }

                    var leftEyePose         = this.eyesProperties[(int)VREyeType.LeftEye].Pose;
                    var rightEyePose        = this.eyesProperties[(int)VREyeType.RightEye].Pose;
                    var centerEyeProperties = this.eyesProperties[(int)VREyeType.CenterEye];

                    var centerEyePose = centerEyeProperties.Pose;
                    centerEyePose.Position    = Vector3.Lerp(leftEyePose.Position, rightEyePose.Position, 0.5f);
                    centerEyePose.Orientation = Quaternion.Lerp(leftEyePose.Orientation, rightEyePose.Orientation, 0.5f);
                    centerEyeProperties.Pose  = centerEyePose;
                }
            }

            this.Render();

            this.deviceResources.Present(ref holographicFrame);
        }
        /// <summary>
        /// Renders the current frame to each holographic display, according to the
        /// current application and spatial positioning state. Returns true if the
        /// frame was rendered to at least one display.
        /// </summary>
        public bool Render(ref HolographicFrame holographicFrame)
        {
            // Don't try to render anything before the first Update.
            if (timer.FrameCount == 0)
            {
                return(false);
            }

            //
            // TODO: Add code for pre-pass rendering here.
            //
            // Take care of any tasks that are not specific to an individual holographic
            // camera. This includes anything that doesn't need the final view or projection
            // matrix, such as lighting maps.
            //

            // Up-to-date frame predictions enhance the effectiveness of image stablization and
            // allow more accurate positioning of holograms.
            holographicFrame.UpdateCurrentPrediction();
            HolographicFramePrediction prediction = holographicFrame.CurrentPrediction;

            // Lock the set of holographic camera resources, then draw to each camera
            // in this frame.
            return(deviceResources.UseHolographicCameraResources(
                       (Dictionary <uint, CameraResources> cameraResourceDictionary) =>
            {
                bool atLeastOneCameraRendered = false;

                foreach (var cameraPose in prediction.CameraPoses)
                {
                    // This represents the device-based resources for a HolographicCamera.
                    CameraResources cameraResources = cameraResourceDictionary[cameraPose.HolographicCamera.Id];

                    // Get the device context.
                    var context = deviceResources.D3DDeviceContext;
                    var renderTargetView = cameraResources.BackBufferRenderTargetView;
                    var depthStencilView = cameraResources.DepthStencilView;

                    // Set render targets to the current holographic camera.
                    context.OutputMerger.SetRenderTargets(depthStencilView, renderTargetView);

                    // Clear the back buffer and depth stencil view.
                    SharpDX.Mathematics.Interop.RawColor4 transparent = new SharpDX.Mathematics.Interop.RawColor4(0.0f, 0.0f, 0.0f, 0.0f);
                    context.ClearRenderTargetView(renderTargetView, transparent);
                    context.ClearDepthStencilView(
                        depthStencilView,
                        SharpDX.Direct3D11.DepthStencilClearFlags.Depth | SharpDX.Direct3D11.DepthStencilClearFlags.Stencil,
                        1.0f,
                        0);

                    // The view and projection matrices for each holographic camera will change
                    // every frame. This function refreshes the data in the constant buffer for
                    // the holographic camera indicated by cameraPose.

                    SpatialCoordinateSystem referenceFrameCoordinateSystem = referenceFrame.GetStationaryCoordinateSystemAtTimestamp(prediction.Timestamp);

                    if (referenceFrameCoordinateSystem == null)
                    {
                        continue;
                    }

                    cameraResources.UpdateViewProjectionBuffer(deviceResources, cameraPose, referenceFrameCoordinateSystem);

                    // Attach the view/projection constant buffer for this camera to the graphics pipeline.
                    bool cameraActive = cameraResources.AttachViewProjectionBuffer(deviceResources);

                    // Only render world-locked content when positional tracking is active.
                    if (cameraActive)
                    {
                        foreach (var entity in entities)
                        {
                            entity.Render();
                        }
                    }

                    atLeastOneCameraRendered = true;
                }

                return atLeastOneCameraRendered;
            }));
        }