Example #1
0
        static void Main(string[] args)
        {
            Initialize();
            IntPtr     hmd  = Create(0);
            ovrHmdDesc desc = new ovrHmdDesc();

            GetDesc(hmd, ref desc);

            ovrSizei texture_size_left  = GetFovTextureSize(hmd, ovrEyeType.ovrEye_Left, desc.DefaultEyeFov()[(int)ovrEyeType.ovrEye_Left], 1.0f);
            ovrSizei texture_size_right = GetFovTextureSize(hmd, ovrEyeType.ovrEye_Right, desc.DefaultEyeFov()[(int)ovrEyeType.ovrEye_Right], 1.0f);

            ovrSizei rt_size = new ovrSizei(texture_size_left.w + texture_size_right.w, (texture_size_left.h > texture_size_right.h) ? texture_size_left.h : texture_size_right.h);

            // Initialize eye rendering information for ovrHmd_Configure.
            // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
            ovrEyeDesc[] eyes = new ovrEyeDesc[2];
            eyes[0].Eye                 = ovrEyeType.ovrEye_Left;
            eyes[1].Eye                 = ovrEyeType.ovrEye_Right;
            eyes[0].Fov                 = desc.DefaultEyeFov()[(int)ovrEyeType.ovrEye_Left];
            eyes[1].Fov                 = desc.DefaultEyeFov()[(int)ovrEyeType.ovrEye_Right];
            eyes[0].TextureSize         = rt_size;
            eyes[1].TextureSize         = rt_size;
            eyes[0].RenderViewport.Pos  = new ovrVector2i(0, 0);
            eyes[0].RenderViewport.Size = new ovrSizei(rt_size.w / 2, rt_size.h);
            eyes[1].RenderViewport.Pos  = new ovrVector2i((rt_size.w + 1) / 2, 0);
            eyes[1].RenderViewport.Size = eyes[0].RenderViewport.Size;

            ovrEyeRenderDesc[] renderDesc = new ovrEyeRenderDesc[2];

            ovrD3D9ConfigData renderConfigData = new ovrD3D9ConfigData();

            //real pointer (IDirect3DDevice9*) to device
            renderConfigData.Device = (IntPtr)0;
            renderConfigData.Header = new ovrRenderAPIConfigHeader
            {
                API         = ovrRenderAPIType.ovrRenderAPI_D3D9,
                Multisample = 1,
                RTSize      = new ovrSizei(desc.Resolution.w, desc.Resolution.h)
            };

            ovrD3D9Texture[] textures = new ovrD3D9Texture[2];

            if (ConfigureRendering(hmd, ref renderConfigData, 0, ovrDistortionCaps.ovrDistortion_Chromatic | ovrDistortionCaps.ovrDistortion_TimeWarp, eyes, renderDesc))
            {
                StartSensor(hmd, ovrHmdCapBits.ovrHmdCap_Orientation | ovrHmdCapBits.ovrHmdCap_YawCorrection | ovrHmdCapBits.ovrHmdCap_LatencyTest, 0);
                //while true
                BeginFrame(hmd, 0);
                for (int eyeIndex = 0; eyeIndex < (int)ovrEyeType.ovrEye_Count; eyeIndex++)
                {
                    ovrEyeType eye           = desc.EyeRenderOrder()[eyeIndex];
                    ovrPosef   eyeRenderPose = BeginEyeRender(hmd, eye);

                    ovrSensorState sensor_state = GetSensorState(hmd, 0.0);

                    EndEyeRender(hmd, eye, eyeRenderPose, ref textures[eyeIndex]);
                }
                EndFrame(hmd);
                //end while
                StopSensor(hmd);
            }
            Shutdown();
        }
Example #2
0
        //TODO: ovrHmd_ConfigureRendering().
        public ovrEyeRenderDesc? ConfigureRendering(ovrEyeDesc[] eyeDescIn, uint distortionCaps)
        {
            ovrEyeRenderDesc[] eyeRenderDesc = new ovrEyeRenderDesc[] { new ovrEyeRenderDesc(), new ovrEyeRenderDesc() };
            ovrRenderAPIConfig renderAPIConfig = new ovrRenderAPIConfig();

            if (ovrHmd_ConfigureRendering(HmdPtr, renderAPIConfig, distortionCaps, eyeDescIn, eyeRenderDesc))
                return eyeRenderDesc;
            return null;
        }