예제 #1
0
        /// <summary>
        /// Dispose the created resource and removes the added rendering steps.
        /// </summary>
        protected override void Dispose(bool isDisposing)
        {
            if (isDisposing)
            {
                DiposeMsaaBuffers();

                if (_mirrorTexture != null)
                {
                    _mirrorTexture.Dispose();
                    _mirrorTexture = null;

                    _mirrorTextureDesc = new MirrorTextureDesc();
                }

                if (_eyeTextureSwapChains != null)
                {
                    _eyeTextureSwapChains[0].Dispose();
                    _eyeTextureSwapChains[1].Dispose();

                    _eyeTextureSwapChains = null;
                }

                if (_resetViewportRenderingStep != null)
                {
                    if (parentDXScene != null)
                    {
                        parentDXScene.RenderingSteps.Remove(_resetViewportRenderingStep);
                    }

                    _resetViewportRenderingStep.Dispose();
                    _resetViewportRenderingStep = null;
                }

                if (renderingStepsLoop != null)
                {
                    if (parentDXScene != null)
                    {
                        parentDXScene.RenderingSteps.Remove(renderingStepsLoop);
                    }

                    renderingStepsLoop.Dispose();
                    renderingStepsLoop = null;
                }

                if (parentDXScene != null)
                {
                    parentDXScene.DefaultResolveMultisampledBackBufferRenderingStep.DestinationBuffer = null; // Set to previous value
                }
            }

            base.Dispose(isDisposing);
        }
예제 #2
0
        /// <summary>
        /// InitializeRenderingSteps is called when the VirtualRealityProvider is initialized and should add customer rendering steps to the DXScene.RenderingSteps list.
        /// See remarks for more into.
        /// </summary>
        /// <remarks>
        /// <para>
        /// <b>InitializeRenderingSteps</b> is called when the VirtualRealityProvider is initialized and should add customer rendering steps to the DXScene.RenderingSteps list.
        /// </para>
        /// <para>
        /// Usually the virtual reality rendering provider adds 3 rendering steps to existing rendering step:<br/>
        /// 1) <see cref="BeginVirtualRealityRenderingStep"/> is added before DXScene.DefaultPrepareRenderTargetsRenderingStep (prepares the rendering context for the currently rendered eys)<br/>
        /// 2) <see cref="RenderingStepsLoop"/> is added after DXScene.DefaultResolveMultisampledBackBufferRenderingStep (this renders the scene again for the other eye - jumps to BeginVirtualRealityRenderingStep)<br/>
        /// 3) <see cref="SimpleResolveStereoscopicImagesRenderingStep"/> or similar step is added after RenderingStepsLoop (to render post-process effects after the VR resolve) or befor DXScene.DefaultCompleteRenderingStep (to render post-process effects before the VS resolve).
        /// </para>
        /// <para>
        /// This method usually also created the pixel shaders and constant buffers.
        /// Other resources (back buffers and views) are usually created in <see cref="VirtualRealityProviderBase.UpdateRenderingContext"/> where the size of the current back buffer is compared with the size of back buffers for virtual reality.
        /// </para>
        /// <para>
        /// It is recommended that the created rendering steps are protected or public with private setter.
        /// This way a derived class can override the InitializeRenderingSteps method and add the created rendering steps in some other was to the DXScene.RenderingSteps.
        /// </para>
        /// </remarks>
        /// <param name="dxScene">parent DXScene</param>
        protected override void InitializeRenderingSteps(DXScene dxScene)
        {
            // Call base class to:
            // Create and add beginVirtualRealityRenderingStep
            // Create and add renderingStepsLoop
            base.InitializeRenderingSteps(dxScene);

            if (_resetViewportRenderingStep != null)
            {
                dxScene.RenderingSteps.Remove(_resetViewportRenderingStep);
                _resetViewportRenderingStep.Dispose();
            }


            // After both eyes were rendered, we need to reset the Viewport back to full screen
            // This can be done with adding the ChangeBackBufferRenderingStep after the renderingStepsLoop (after both eyes are rendered)
            // ChangeBackBufferRenderingStep is usually used to change current back buffer and its views, but it can be also used to change only Viewport.
            // Here we only create an instance of ChangeBackBufferRenderingStep and add it to RenderingSteps.
            // In the UpdateRenderingContext (below) we will set the NewViewport property to the size of the FinalBackBuffer
            _resetViewportRenderingStep = new ChangeBackBufferRenderingStep("ResetViewportRenderingStep", "Resets the Viewport from split screen viewport to the final full screen viewport");

            dxScene.RenderingSteps.AddAfter(dxScene.DefaultResolveMultisampledBackBufferRenderingStep, _resetViewportRenderingStep);


            if (renderingStepsLoop != null)
            {
                dxScene.RenderingSteps.Remove(renderingStepsLoop);
            }

            // We need to call _textureSwapChain.Commit() after image for each eye is rendered

            // We create a loop in rendering steps with adding a RenderingStepsLoop (this is the last step in the loop)
            // The loop begins with beginVirtualRealityRenderingStep (when the loop is repeated, the execution goes back to beginVirtualRealityRenderingStep step)
            // The RenderingStepsLoop also requires a loopPredicate that determines if the loop should repeat (returns true) or exit (returns false).
            renderingStepsLoop = new RenderingStepsLoop("RepeatVirtualRealityLoop",
                                                        beginLoopRenderingStep: beginVirtualRealityRenderingStep,
                                                        loopPredicate: (RenderingContext r) =>
            {
                // This predicate is executed when with the RenderingStepsLoop execution.
                // It returns true in case the rendering loop should repeat itself, or false when it should exit.
                // As seen from the return statement below, we repeat the rendering loop when the stereoscopic rendering is enabled and when we have rendered the left eye
                var currentEye = r.VirtualRealityContext.CurrentEye;

                if (_eyeTextureSwapChains != null)
                {
                    // Update the _sessionStatus before rendering the frame
                    if (currentEye == Eye.Left)
                    {
                        UpdateSessionStatus();

                        if (_sessionStatus.ShouldRecenter)
                        {
                            _ovr.RecenterTrackingOrigin(_sessionPtr);
                        }
                    }

                    if (_sessionStatus.IsVisible)     // We should submit OVR frames only when VR has focus
                    {
                        int eyeIndex = currentEye == Eye.Left ? 0 : 1;

                        _eyeTextureSwapChains[eyeIndex].Commit();

                        if (currentEye == Eye.Right)
                        {
                            _layerShared.Header.Type  = LayerType.EyeFov;
                            _layerShared.Header.Flags = LayerFlags.None;

                            _layerShared.ColorTextureLeft = _eyeTextureSwapChains[0].TextureSwapChainPtr;
                            _layerShared.ViewportLeft     = new Recti(new Vector2i(0, 0), new Sizei(_eyeTextureSwapChains[0].ViewportSize.Width, _eyeTextureSwapChains[0].ViewportSize.Height));
                            _layerShared.FovLeft          = _hmdDesc.DefaultEyeFov[0];
                            _layerShared.RenderPoseLeft   = _eyePoses[0];

                            _layerShared.ColorTextureRight = _eyeTextureSwapChains[1].TextureSwapChainPtr;
                            _layerShared.ViewportRight     = new Recti(new Vector2i(0, 0), new Sizei(_eyeTextureSwapChains[1].ViewportSize.Width, _eyeTextureSwapChains[1].ViewportSize.Height));
                            _layerShared.FovRight          = _hmdDesc.DefaultEyeFov[1];
                            _layerShared.RenderPoseRight   = _eyePoses[1];

                            _layerShared.SensorSampleTime = _sensorSampleTime;

                            var result = _ovr.SubmitFrame(_sessionPtr, _frameIndex, IntPtr.Zero, ref _layerShared);

                            if (result < Ab3d.OculusWrap.Result.Success)
                            {
                                var lastError = _ovr.GetLastErrorInfo();
                                throw new OvrException("Failed to sumbit frame: " + result);
                            }

                            _frameIndex++;
                        }
                    }

                    if (_mirrorTextureDesc.Width == r.FinalBackBufferDescription.Width && _mirrorTextureDesc.Height == r.FinalBackBufferDescription.Height)
                    {
                        r.DeviceContext.CopyResource(_mirrorTextureDX, r.FinalBackBuffer);
                    }
                }

                // Repeat the rendering loop when the stereoscopic rendering is enabled and when we have rendered the left eye
                return(this.IsEnabled &&
                       r.VirtualRealityContext != null &&
                       currentEye == Eye.Left);
            });

            dxScene.RenderingSteps.AddAfter(dxScene.DefaultResolveMultisampledBackBufferRenderingStep, renderingStepsLoop);
        }