コード例 #1
0
        /// <summary>
        ///
        /// </summary>
        public override void CreateDisplayResources()
        {
            base.CreateDisplayResources();

            backbufferColor = new RenderTarget2D(device, swapChain.GetBackBuffer <D3D.Texture2D>(0));


            // Configure Stereo settings.
            OVR.Sizei recommenedTex0Size = hmd.GetFovTextureSize(OVR.EyeType.Left, hmd.DefaultEyeFov[0], 1.0f);
            OVR.Sizei recommenedTex1Size = hmd.GetFovTextureSize(OVR.EyeType.Right, hmd.DefaultEyeFov[1], 1.0f);

            int ms = backbufferColor.SampleCount;

            backbufferColor1 = new RenderTarget2D(device, ColorFormat.Rgba8, recommenedTex0Size.Width, recommenedTex0Size.Height, ms);
            backbufferDepth1 = new DepthStencil2D(device, DepthFormat.D24S8, recommenedTex0Size.Width, recommenedTex0Size.Height, ms);
            backbufferColor2 = new RenderTarget2D(device, ColorFormat.Rgba8, recommenedTex1Size.Width, recommenedTex1Size.Height, ms);
            backbufferDepth2 = new DepthStencil2D(device, DepthFormat.D24S8, recommenedTex1Size.Width, recommenedTex1Size.Height, ms);

            if (ms > 1)
            {
                backbufferColor1Resolved = new RenderTarget2D(device, ColorFormat.Rgba8, recommenedTex0Size.Width, recommenedTex0Size.Height);
                backbufferColor2Resolved = new RenderTarget2D(device, ColorFormat.Rgba8, recommenedTex1Size.Width, recommenedTex1Size.Height);
            }


            OVR.FovPort[] eyeFov = new OVR.FovPort[]
            {
                hmd.DefaultEyeFov[0],
                hmd.DefaultEyeFov[1]
            };

            OVR.Sizei size1 = new OVR.Sizei(recommenedTex0Size.Width, recommenedTex0Size.Height);
            OVR.Sizei size2 = new OVR.Sizei(recommenedTex1Size.Width, recommenedTex1Size.Height);

            OVR.Recti[] eyeRenderViewport = new OVR.Recti[2];
            eyeRenderViewport[0].Position = new OVR.Vector2i(0, 0);
            eyeRenderViewport[0].Size     = size1;
            eyeRenderViewport[1].Position = new OVR.Vector2i(0, 0);;
            eyeRenderViewport[1].Size     = size2;

            // Query D3D texture data.
            eyeTexture = new OVR.D3D11.D3D11TextureData[2];
            eyeTexture[0].Header.API            = OVR.RenderAPIType.D3D11;
            eyeTexture[0].Header.TextureSize    = size1;
            eyeTexture[0].Header.RenderViewport = eyeRenderViewport[0];


            // Right eye uses the same texture, but different rendering viewport.
            eyeTexture[1] = eyeTexture[0];
            eyeTexture[1].Header.RenderViewport = eyeRenderViewport[1];

            if (ms > 1)
            {
                eyeTexture[0].Texture            = backbufferColor1Resolved.Surface.Resource.NativePointer;
                eyeTexture[0].ShaderResourceView = backbufferColor1Resolved.SRV.NativePointer;

                eyeTexture[1].Texture            = backbufferColor2Resolved.Surface.Resource.NativePointer;
                eyeTexture[1].ShaderResourceView = backbufferColor2Resolved.SRV.NativePointer;
            }
            else
            {
                eyeTexture[0].Texture            = backbufferColor1.Surface.Resource.NativePointer;
                eyeTexture[0].ShaderResourceView = backbufferColor1.SRV.NativePointer;

                eyeTexture[1].Texture            = backbufferColor2.Surface.Resource.NativePointer;
                eyeTexture[1].ShaderResourceView = backbufferColor2.SRV.NativePointer;
            }

            // Configure d3d11.
            OVR.D3D11.D3D11ConfigData d3d11cfg = new OVR.D3D11.D3D11ConfigData();
            d3d11cfg.Header.API            = OVR.RenderAPIType.D3D11;
            d3d11cfg.Header.BackBufferSize = new OVR.Sizei(hmd.Resolution.Width, hmd.Resolution.Height);
            d3d11cfg.Header.Multisample    = 1;
            d3d11cfg.Device        = d3dDevice.NativePointer;
            d3d11cfg.DeviceContext = d3dDevice.ImmediateContext.NativePointer;
            d3d11cfg.BackBufferRenderTargetView = backbufferColor.Surface.RTV.NativePointer;
            d3d11cfg.SwapChain = swapChain.NativePointer;

            eyeRenderDesc = hmd.ConfigureRendering(d3d11cfg, OVR.DistortionCaps.ovrDistortionCap_Chromatic | OVR.DistortionCaps.ovrDistortionCap_Vignette | OVR.DistortionCaps.ovrDistortionCap_TimeWarp | OVR.DistortionCaps.ovrDistortionCap_Overdrive, eyeFov);
            if (eyeRenderDesc == null)
            {
                throw new ArgumentNullException("eyeRenderDesc", "Achtung eyeRenderDesc = null");
            }

            // Specify which head tracking capabilities to enable.
            hmd.SetEnabledCaps(OVR.HmdCaps.LowPersistence /*| OVR.HmdCaps.DynamicPrediction*/);

            // Start the sensor which informs of the Rift's pose and motion
            hmd.ConfigureTracking(OVR.TrackingCaps.ovrTrackingCap_Orientation | OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OVR.TrackingCaps.ovrTrackingCap_Position, OVR.TrackingCaps.None);
        }
コード例 #2
0
        /// <summary>
        /// 
        /// </summary>
        public override void CreateDisplayResources()
        {
            base.CreateDisplayResources();

            backbufferColor = new RenderTarget2D(device, swapChain.GetBackBuffer<D3D.Texture2D>(0));

            // Configure Stereo settings.
            OVR.Sizei recommenedTex0Size = hmd.GetFovTextureSize(OVR.EyeType.Left, hmd.DefaultEyeFov[0], 1.0f);
            OVR.Sizei recommenedTex1Size = hmd.GetFovTextureSize(OVR.EyeType.Right, hmd.DefaultEyeFov[1], 1.0f);

            int ms	= backbufferColor.SampleCount;

            backbufferColor1	=	new RenderTarget2D( device, ColorFormat.Rgba8, recommenedTex0Size.Width, recommenedTex0Size.Height, ms );
            backbufferDepth1	=	new DepthStencil2D( device, DepthFormat.D24S8, recommenedTex0Size.Width, recommenedTex0Size.Height, ms );
            backbufferColor2	=	new RenderTarget2D( device, ColorFormat.Rgba8, recommenedTex1Size.Width, recommenedTex1Size.Height, ms );
            backbufferDepth2	=	new DepthStencil2D( device, DepthFormat.D24S8, recommenedTex1Size.Width, recommenedTex1Size.Height, ms );

            if (ms>1) {
                backbufferColor1Resolved	=	new RenderTarget2D( device, ColorFormat.Rgba8, recommenedTex0Size.Width, recommenedTex0Size.Height );
                backbufferColor2Resolved	=	new RenderTarget2D( device, ColorFormat.Rgba8, recommenedTex1Size.Width, recommenedTex1Size.Height );
            }

            OVR.FovPort[] eyeFov = new OVR.FovPort[]
            {
                hmd.DefaultEyeFov[0],
                hmd.DefaultEyeFov[1]
            };

            OVR.Sizei size1 = new OVR.Sizei(recommenedTex0Size.Width, recommenedTex0Size.Height);
            OVR.Sizei size2 = new OVR.Sizei(recommenedTex1Size.Width, recommenedTex1Size.Height);

            OVR.Recti[] eyeRenderViewport	= new OVR.Recti[2];
            eyeRenderViewport[0].Position	= new OVR.Vector2i(0, 0);
            eyeRenderViewport[0].Size		= size1;
            eyeRenderViewport[1].Position	= new OVR.Vector2i(0, 0); ;
            eyeRenderViewport[1].Size		= size2;

            // Query D3D texture data.
            eyeTexture = new OVR.D3D11.D3D11TextureData[2];
            eyeTexture[0].Header.API			= OVR.RenderAPIType.D3D11;
            eyeTexture[0].Header.TextureSize	= size1;
            eyeTexture[0].Header.RenderViewport = eyeRenderViewport[0];

            // Right eye uses the same texture, but different rendering viewport.
            eyeTexture[1] = eyeTexture[0];
            eyeTexture[1].Header.RenderViewport = eyeRenderViewport[1];

            if (ms > 1) {
                eyeTexture[0].Texture				= backbufferColor1Resolved.Surface.Resource.NativePointer;
                eyeTexture[0].ShaderResourceView	= backbufferColor1Resolved.SRV.NativePointer;

                eyeTexture[1].Texture				= backbufferColor2Resolved.Surface.Resource.NativePointer;
                eyeTexture[1].ShaderResourceView	= backbufferColor2Resolved.SRV.NativePointer;
            } else {
                eyeTexture[0].Texture				= backbufferColor1.Surface.Resource.NativePointer;
                eyeTexture[0].ShaderResourceView	= backbufferColor1.SRV.NativePointer;

                eyeTexture[1].Texture				= backbufferColor2.Surface.Resource.NativePointer;
                eyeTexture[1].ShaderResourceView	= backbufferColor2.SRV.NativePointer;
            }

            // Configure d3d11.
            OVR.D3D11.D3D11ConfigData d3d11cfg	= new OVR.D3D11.D3D11ConfigData();
            d3d11cfg.Header.API					= OVR.RenderAPIType.D3D11;
            d3d11cfg.Header.BackBufferSize		= new OVR.Sizei(hmd.Resolution.Width, hmd.Resolution.Height);
            d3d11cfg.Header.Multisample			= 1;
            d3d11cfg.Device						= d3dDevice.NativePointer;
            d3d11cfg.DeviceContext				= d3dDevice.ImmediateContext.NativePointer;
            d3d11cfg.BackBufferRenderTargetView = backbufferColor.Surface.RTV.NativePointer;
            d3d11cfg.SwapChain					= swapChain.NativePointer;

            eyeRenderDesc = hmd.ConfigureRendering(d3d11cfg, OVR.DistortionCaps.ovrDistortionCap_Chromatic | OVR.DistortionCaps.ovrDistortionCap_Vignette | OVR.DistortionCaps.ovrDistortionCap_TimeWarp | OVR.DistortionCaps.ovrDistortionCap_Overdrive, eyeFov);
            if (eyeRenderDesc == null) {
                throw new ArgumentNullException("eyeRenderDesc", "Achtung eyeRenderDesc = null");
            }

            // Specify which head tracking capabilities to enable.
            hmd.SetEnabledCaps(OVR.HmdCaps.LowPersistence /*| OVR.HmdCaps.DynamicPrediction*/);

            // Start the sensor which informs of the Rift's pose and motion
            hmd.ConfigureTracking(OVR.TrackingCaps.ovrTrackingCap_Orientation | OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OVR.TrackingCaps.ovrTrackingCap_Position, OVR.TrackingCaps.None);
        }