示例#1
0
        /// <summary>
        /// CreateResources is called when the VirtualRealityProvider is initialized and should create the DirectX resources.
        /// </summary>
        /// <remarks>
        /// <para>
        /// <b>CreateResources</b> is called when the VirtualRealityProvider is initialized and should create the DirectX resources.
        /// </para>
        /// <para>
        /// This method is called after this virtual reality provider is registered with calling the <see cref="DXScene.InitializeVirtualRealityRendering"/> method.
        /// This method then calls the <see cref="DXSceneResource.InitializeResources"/> and the <see cref="VirtualRealityProviderBase.OnInitializeResources"/>.
        /// OnInitializeResources calls the this CreateResources method and then <see cref="VirtualRealityProviderBase.InitializeRenderingSteps"/> method.
        /// </para>
        /// <para>
        /// This method usually creates pixel shaders and constant buffers.
        /// Other resources (back buffers and views) are usually created in <see cref="VirtualRealityProviderBase.UpdateRenderingContext"/> where the size of the current back buffer is compared with the size of back buffers for virtual reality.
        /// </para>
        /// </remarks>
        /// <param name="dxScene">parent DXScene</param>
        protected override void CreateResources(DXScene dxScene)
        {
            if (_eyeTextureSwapChains != null)
            {
                if (_eyeTextureSwapChains[0] != null)
                {
                    _eyeTextureSwapChains[0].Dispose();
                }

                if (_eyeTextureSwapChains[1] != null)
                {
                    _eyeTextureSwapChains[1].Dispose();
                }
            }
            else
            {
                _eyeTextureSwapChains = new OculusTextureSwapChain[2];
            }

            _eyeTextureSwapChains[0] = new OculusTextureSwapChain(_ovr,
                                                                  _sessionPtr,
                                                                  dxScene.Device,
                                                                  EyeType.Left,
                                                                  Format.B8G8R8A8_UNorm_SRgb,
                                                                  _ovr.GetFovTextureSize(_sessionPtr, EyeType.Left, _hmdDesc.DefaultEyeFov[0], 1.0f),
                                                                  createDepthStencilView: true,
                                                                  isDebugDevice: dxScene.DXDevice.IsDebugDevice);

            _eyeTextureSwapChains[1] = new OculusTextureSwapChain(_ovr,
                                                                  _sessionPtr,
                                                                  dxScene.Device,
                                                                  EyeType.Left,
                                                                  Format.B8G8R8A8_UNorm_SRgb,
                                                                  _ovr.GetFovTextureSize(_sessionPtr, EyeType.Right, _hmdDesc.DefaultEyeFov[1], 1.0f),
                                                                  createDepthStencilView: true,
                                                                  isDebugDevice: dxScene.DXDevice.IsDebugDevice);


            _layerShared        = new LayerEyeFov();
            _layerShared.Header = new LayerHeader()
            {
                Type  = LayerType.EyeFov,
                Flags = LayerFlags.HighQuality
            };

            // Specify the texture to show on the HMD.
            _layerShared.ColorTextureLeft  = _eyeTextureSwapChains[0].TextureSwapChainPtr;
            _layerShared.ColorTextureRight = _eyeTextureSwapChains[1].TextureSwapChainPtr;

            _layerShared.ViewportLeft.Position = new Vector2i(0, 0);
            _layerShared.ViewportLeft.Size     = _eyeTextureSwapChains[0].Size;

            _layerShared.ViewportRight.Position = new Vector2i(0, 0);
            _layerShared.ViewportRight.Size     = _eyeTextureSwapChains[1].Size;

            _layerShared.FovLeft  = _hmdDesc.DefaultEyeFov[0];
            _layerShared.FovRight = _hmdDesc.DefaultEyeFov[1];


            _eyeRenderDesc[0]  = _ovr.GetRenderDesc(_sessionPtr, EyeType.Left, _hmdDesc.DefaultEyeFov[0]);
            _hmdToEyeOffset[1] = _eyeRenderDesc[1].HmdToEyePose.Position;

            _eyeRenderDesc[1]  = _ovr.GetRenderDesc(_sessionPtr, EyeType.Right, _hmdDesc.DefaultEyeFov[1]);
            _hmdToEyeOffset[1] = _eyeRenderDesc[1].HmdToEyePose.Position;


            // Create MSAA back buffer if needed
            UpdateMsaaBackBuffer(_eyeTextureSwapChains[0].Size.Width, _eyeTextureSwapChains[0].Size.Height, _multisamplingCount);


            _mirrorTextureDesc = new MirrorTextureDesc()
            {
                Format    = SharpDXHelpers.GetTextureFormat(dxScene.BackBufferDescription.Format),
                Height    = dxScene.BackBufferDescription.Height,
                MiscFlags = dxScene.BackBufferDescription.MipLevels != 1 ? TextureMiscFlags.AllowGenerateMips : TextureMiscFlags.None,
                Width     = dxScene.BackBufferDescription.Width
            };

            // FloorLevel will give tracking poses where the floor height is 0
            _ovr.SetTrackingOriginType(_sessionPtr, TrackingOrigin.EyeLevel);

            IntPtr mirrorTexturePtr;
            var    result = _ovr.CreateMirrorTextureDX(_sessionPtr, dxScene.Device.NativePointer, ref _mirrorTextureDesc, out mirrorTexturePtr);

            if (result < Ab3d.OculusWrap.Result.Success)
            {
                var lastError = _ovr.GetLastErrorInfo();
                throw new OvrException("Failed to create Oculus mirror texture: " + lastError.ErrorString, lastError.Result);
            }

            _mirrorTexture = new OculusMirrorTexture(_ovr, _sessionPtr, mirrorTexturePtr);

            // Retrieve the Direct3D texture contained in the Oculus MirrorTexture.
            IntPtr mirrorTextureComPtr;

            result = _mirrorTexture.GetBufferDX(typeof(Texture2D).GUID, out mirrorTextureComPtr);

            if (result < Ab3d.OculusWrap.Result.Success)
            {
                var lastError = _ovr.GetLastErrorInfo();
                throw new OvrException("Failed to retrieve the texture from the created mirror texture buffer: " + lastError.ErrorString, lastError.Result);
            }

            // Create a managed Texture2D, based on the unmanaged texture pointer.
            _mirrorTextureDX = new Texture2D(mirrorTextureComPtr);

            if (dxScene.DXDevice.IsDebugDevice)
            {
                _mirrorTextureDX.DebugName = "OculusMirrorTexture";
            }


            // To prevent DirectX from rendering more then one frame in the background,
            // we need to set the MaximumFrameLatency to 1.
            // This prevents occasional dropped frames in Oculus Rift.
            var dxgiDevice = dxScene.Device.QueryInterface <SharpDX.DXGI.Device1>();

            if (dxgiDevice != null)
            {
                dxgiDevice.MaximumFrameLatency = 1;
                dxgiDevice.Dispose();
            }

            _frameIndex = 0;

            _matrixCamera = new MatrixCamera();
        }
示例#2
0
        /// <summary>
        /// UpdateRenderingContext is called from the BeginVirtualRealityRenderingStep and should update the properties in the RenderingContext according to the current eye.
        /// See remarks for more info about the usual tasks that are preformed in this method.
        /// </summary>
        /// <remarks>
        /// <para>
        /// <b>UpdateRenderingContext</b> is called from the BeginVirtualRealityRenderingStep and should update the properties in the RenderingContext according to the current eye.
        /// </para>
        /// <para>
        /// This method is usually called from the <see cref="BeginVirtualRealityRenderingStep"/> (when the virtual reality provider is enabled).
        /// </para>
        /// <para>
        /// Usually this method does the following:<br/>
        /// - Creates the back buffers and views that are needed for rendering
        /// (the back buffers are also recreated if the size of <see cref="RenderingContext.CurrentBackBufferDescription"/> is different from the size of previously created back buffers).<br/>
        /// - Updates the <see cref="VirtualRealityContext.CurrentEye"/> property.<br/>
        /// - Sets the <see cref="RenderingContext.UsedCamera"/> property to a <see cref="StereoscopicCamera"/><br/>
        /// - Calls <see cref="RenderingContext.SetBackBuffer"/> method and sets the new back buffers.<br/>
        /// - Updates <see cref="ResolveMultisampledBackBufferRenderingStep.DestinationBuffer"/> on the <see cref="DXScene.DefaultResolveMultisampledBackBufferRenderingStep"/> and sets it to the eye texture.
        /// </para>
        /// </remarks>
        /// <param name="renderingContext">RenderingContext</param>
        /// <param name="isNewFrame">true if no eye was yet rendered for the current frame; false if the first eye was already rendered for the current frame and we need to render the second eye</param>
        public override void UpdateRenderingContext(RenderingContext renderingContext, bool isNewFrame)
        {
            // This code is executed inside BeginVirtualRealityRenderingStep before all the objects are rendered.

            // Base method does:
            // - sets the virtualRealityContext.CurrentEye based on the isNewFrame parameter: isNewFrame == true => LeftEye else RightEye
            // - ensures that stereoscopicCamera is created and sets its properties for the current eye and based on the current EyeSeparation, Parallax and InvertLeftRightView
            // - sets renderingContext.UsedCamera = stereoscopicCamera
            base.UpdateRenderingContext(renderingContext, isNewFrame);


            var virtualRealityContext = renderingContext.VirtualRealityContext;

            Eye     currentEye = virtualRealityContext.CurrentEye;
            EyeType ovrEye     = currentEye == Eye.Left ? EyeType.Left : EyeType.Right;

            int eyeIndex = currentEye == Eye.Left ? 0 : 1;


            FovPort defaultEyeFov = _hmdDesc.DefaultEyeFov[eyeIndex];
            var     idealSize     = _ovr.GetFovTextureSize(_sessionPtr, ovrEye, defaultEyeFov, _pixelsPerDisplayPixel);

            // When we render this frame for the first time
            // we also check that all the required resources are created
            // Check if we need to create or recreate the RenderTargetViews and DepthStencilViews
            if (isNewFrame &&
                (_eyeTextureSwapChains[eyeIndex] == null ||
                 _eyeTextureSwapChains[eyeIndex].Size.Width != idealSize.Width ||
                 _eyeTextureSwapChains[eyeIndex].Size.Height != idealSize.Height))
            {
                CreateResources(renderingContext.DXScene);
            }

            if (isNewFrame)
            {
                _ovr.GetEyePoses(_sessionPtr, 0L, true, _hmdToEyeOffset, ref _eyePoses, out _sensorSampleTime);
            }

            var camera = renderingContext.DXScene.Camera;


            // From OculusRoomTiny main.cpp #221

            //Get the pose information
            var eyeQuat = SharpDXHelpers.ToQuaternion(_eyePoses[eyeIndex].Orientation);
            var eyePos  = SharpDXHelpers.ToVector3(_eyePoses[eyeIndex].Position);

            // Get view and projection matrices for the Rift camera
            Vector3 cameraPosition       = camera.GetCameraPosition();
            Matrix  cameraRotationMatrix = camera.View;

            cameraRotationMatrix.M41 = 0; // Remove translation
            cameraRotationMatrix.M42 = 0;
            cameraRotationMatrix.M43 = 0;

            cameraRotationMatrix.Invert(); // Invert to get rotation matrix

            Vector4 rotatedEyePos4 = Vector3.Transform(eyePos, cameraRotationMatrix);
            var     rotatedEyePos  = new Vector3(rotatedEyePos4.X, rotatedEyePos4.Y, rotatedEyePos4.Z);

            var finalCameraPosition = cameraPosition + rotatedEyePos;

            var eyeQuaternionMatrix = Matrix.RotationQuaternion(eyeQuat);
            var finalRotationMatrix = eyeQuaternionMatrix * cameraRotationMatrix;

            Vector4 lookDirection4 = Vector3.Transform(new Vector3(0, 0, -1), finalRotationMatrix);
            var     lookDirection  = new Vector3(lookDirection4.X, lookDirection4.Y, lookDirection4.Z);

            Vector4 upDirection4 = Vector3.Transform(Vector3.UnitY, finalRotationMatrix);
            var     upDirection  = new Vector3(upDirection4.X, upDirection4.Y, upDirection4.Z);

            var viewMatrix = Matrix.LookAtRH(finalCameraPosition, finalCameraPosition + lookDirection, upDirection);



            float zNear = camera.NearPlaneDistance;
            float zFar  = camera.FarPlaneDistance;

            if (zNear < 0.05f)
            {
                zNear = 0.05f;
            }

            zFar *= 1.2f; // increase the zFar - the FarPlaneDistance is not exactly correct because the camera can be higher because the eye's Position can be over the Camera's position

            //zNear = 0.1f;
            //zFar = 100;

            var eyeRenderDesc = _ovr.GetRenderDesc(_sessionPtr, ovrEye, _hmdDesc.DefaultEyeFov[eyeIndex]);

            var projectionMatrix = _ovr.Matrix4f_Projection(eyeRenderDesc.Fov, zNear, zFar, ProjectionModifier.None).ToMatrix();

            projectionMatrix.Transpose();

            _matrixCamera.Projection = projectionMatrix;
            _matrixCamera.View       = viewMatrix;
            _matrixCamera.SetCameraPosition(finalCameraPosition);

            renderingContext.UsedCamera = _matrixCamera;


            // Change the current viewport
            renderingContext.CurrentViewport = _eyeTextureSwapChains[eyeIndex].Viewport;
            renderingContext.DeviceContext.Rasterizer.SetViewport(renderingContext.CurrentViewport);

            if (_msaaBackBuffer == null)
            {
                renderingContext.SetBackBuffer(backBuffer: _eyeTextureSwapChains[eyeIndex].CurrentTexture,
                                               backBufferDescription: _eyeTextureSwapChains[eyeIndex].CurrentTextureDescription,
                                               renderTargetView: _eyeTextureSwapChains[eyeIndex].CurrentRTView,
                                               depthStencilView: _eyeTextureSwapChains[eyeIndex].CurrentDepthStencilView,
                                               bindNewRenderTargetsToDeviceContext: false); // Do not bind new buffers because this is done in the next rendering step - PrepareRenderTargetsRenderingStep
            }
            else
            {
                // MSAA
                renderingContext.SetBackBuffer(backBuffer: _msaaBackBuffer,
                                               backBufferDescription: _msaaBackBufferDescription,
                                               renderTargetView: _msaaBackBufferRenderTargetView,
                                               depthStencilView: _msaaDepthStencilView,
                                               bindNewRenderTargetsToDeviceContext: false); // Do not bind new buffers because this is done in the next rendering step - PrepareRenderTargetsRenderingStep

                renderingContext.DXScene.DefaultResolveMultisampledBackBufferRenderingStep.DestinationBuffer = _eyeTextureSwapChains[eyeIndex].CurrentTexture;
            }


            // When we render this frame for the first time set the NewViewport on the ChangeBackBufferRenderingStep to resets the Viewport from split screen viewport to the final full screen viewport
            if (isNewFrame && _resetViewportRenderingStep != null)
            {
                int backBufferWidth  = renderingContext.FinalBackBufferDescription.Width;
                int backBufferHeight = renderingContext.FinalBackBufferDescription.Height;

                _resetViewportRenderingStep.NewViewport = new ViewportF(0, 0, backBufferWidth, backBufferHeight);
            }
        }
        public OculusTextureSwapChain(OvrWrap ovr,
                                      IntPtr sessionPtr,
                                      SharpDX.Direct3D11.Device device,
                                      EyeType eye,
                                      Format format,
                                      Sizei size,
                                      bool createDepthStencilView = false,
                                      bool isDebugDevice          = false)
        {
            _ovr          = ovr;
            _sessionPtr   = sessionPtr;
            _size         = size;
            _viewportSize = size;
            _viewport     = new ViewportF(0.0f, 0.0f, (float)size.Width, (float)size.Height);

            Format srgbFormat = GetSRgbFormat(format);

            TextureFormat        textureFormat = SharpDXHelpers.GetTextureFormat(srgbFormat);
            TextureSwapChainDesc swapChainDesc = new TextureSwapChainDesc()
            {
                ArraySize   = 1,
                BindFlags   = TextureBindFlags.DX_RenderTarget,
                Format      = textureFormat,
                Height      = _size.Height,
                MipLevels   = 1,
                MiscFlags   = TextureMiscFlags.DX_Typeless,
                SampleCount = 1,
                Width       = _size.Width
            };

            Texture2DDescription description1 = new Texture2DDescription()
            {
                ArraySize         = 1,
                BindFlags         = BindFlags.DepthStencil,
                CpuAccessFlags    = CpuAccessFlags.None,
                Format            = Format.R24G8_Typeless,
                Height            = _size.Height,
                MipLevels         = 1,
                OptionFlags       = ResourceOptionFlags.None,
                SampleDescription = new SampleDescription(1, 0),
                Usage             = ResourceUsage.Default,
                Width             = _size.Width
            };

            ShaderResourceViewDescription description2 = new ShaderResourceViewDescription()
            {
                Format    = srgbFormat,
                Dimension = ShaderResourceViewDimension.Texture2D
            };

            description2.Texture2D.MipLevels = 1;

            // Create a texture swap chain, which will contain the textures to render to, for the current eye.
            var result = _ovr.CreateTextureSwapChainDX(_sessionPtr, device.NativePointer, ref swapChainDesc, out _textureSwapChainPtr);

            if (result < Ab3d.OculusWrap.Result.Success)
            {
                var lastError = _ovr.GetLastErrorInfo();
                throw new OvrException("Error creating Oculus TextureSwapChain: " + lastError.ErrorString, lastError.Result);
            }


            int length;

            result = _ovr.GetTextureSwapChainLength(_sessionPtr, _textureSwapChainPtr, out length);

            if (result < Ab3d.OculusWrap.Result.Success)
            {
                var lastError = _ovr.GetLastErrorInfo();
                throw new OvrException("Failed to retrieve the number of buffers of the created swap chain: " + lastError.ErrorString, lastError.Result);
            }


            _textures = new TextureItem[length];

            for (int index = 0; index < length; ++index)
            {
                IntPtr bufferPtr;
                result = _ovr.GetTextureSwapChainBufferDX(_sessionPtr, _textureSwapChainPtr, index, typeof(Texture2D).GUID, out bufferPtr);

                if (result < Ab3d.OculusWrap.Result.Success)
                {
                    var lastError = _ovr.GetLastErrorInfo();
                    throw new OvrException("Failed to retrieve a texture from the created swap chain: " + lastError.ErrorString, lastError.Result);
                }

                Texture2D        texture2D1       = new Texture2D(bufferPtr);
                Texture2D        texture2D2       = null;
                DepthStencilView depthStencilView = null;

                if (createDepthStencilView)
                {
                    texture2D2       = new Texture2D(device, description1);
                    depthStencilView = new DepthStencilView(device, texture2D2, new DepthStencilViewDescription()
                    {
                        Flags     = DepthStencilViewFlags.None,
                        Dimension = DepthStencilViewDimension.Texture2D,
                        Format    = Format.D24_UNorm_S8_UInt
                    });
                }

                _textures[index] = new TextureItem()
                {
                    Texture            = texture2D1,
                    TextureDescription = texture2D1.Description,
                    DepthBuffer        = texture2D2,
                    DepthStencilView   = depthStencilView,
                    RTView             = new RenderTargetView(device, texture2D1, new RenderTargetViewDescription()
                    {
                        Format    = format,
                        Dimension = RenderTargetViewDimension.Texture2D
                    }),
                    SRView = new ShaderResourceView(device, texture2D1, description2)
                };


                if (isDebugDevice)
                {
                    var eyeTextAndIndex = eye.ToString() + index.ToString();

                    _textures[index].Texture.DebugName          = "OculusBackBuffer" + eyeTextAndIndex;
                    _textures[index].RTView.DebugName           = "OculusRT" + eyeTextAndIndex;
                    _textures[index].SRView.DebugName           = "OculusSR" + eyeTextAndIndex;
                    _textures[index].DepthBuffer.DebugName      = "OculusDepthBuffer" + eyeTextAndIndex;
                    _textures[index].DepthStencilView.DebugName = "OculusDepthStencilView" + eyeTextAndIndex;
                }
            }
        }