コード例 #1
0
 public static extern void EndEyeRender(IntPtr hmd, ovrEyeType eye, ovrPosef renderPose, ref ovrD3D9Texture eyeTexture);
コード例 #2
0
        static void Main(string[] args)
        {
            Initialize();
            IntPtr     hmd  = Create(0);
            ovrHmdDesc desc = new ovrHmdDesc();

            GetDesc(hmd, ref desc);

            ovrSizei texture_size_left  = GetFovTextureSize(hmd, ovrEyeType.ovrEye_Left, desc.DefaultEyeFov()[(int)ovrEyeType.ovrEye_Left], 1.0f);
            ovrSizei texture_size_right = GetFovTextureSize(hmd, ovrEyeType.ovrEye_Right, desc.DefaultEyeFov()[(int)ovrEyeType.ovrEye_Right], 1.0f);

            ovrSizei rt_size = new ovrSizei(texture_size_left.w + texture_size_right.w, (texture_size_left.h > texture_size_right.h) ? texture_size_left.h : texture_size_right.h);

            // Initialize eye rendering information for ovrHmd_Configure.
            // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
            ovrEyeDesc[] eyes = new ovrEyeDesc[2];
            eyes[0].Eye                 = ovrEyeType.ovrEye_Left;
            eyes[1].Eye                 = ovrEyeType.ovrEye_Right;
            eyes[0].Fov                 = desc.DefaultEyeFov()[(int)ovrEyeType.ovrEye_Left];
            eyes[1].Fov                 = desc.DefaultEyeFov()[(int)ovrEyeType.ovrEye_Right];
            eyes[0].TextureSize         = rt_size;
            eyes[1].TextureSize         = rt_size;
            eyes[0].RenderViewport.Pos  = new ovrVector2i(0, 0);
            eyes[0].RenderViewport.Size = new ovrSizei(rt_size.w / 2, rt_size.h);
            eyes[1].RenderViewport.Pos  = new ovrVector2i((rt_size.w + 1) / 2, 0);
            eyes[1].RenderViewport.Size = eyes[0].RenderViewport.Size;

            ovrEyeRenderDesc[] renderDesc = new ovrEyeRenderDesc[2];

            ovrD3D9ConfigData renderConfigData = new ovrD3D9ConfigData();

            //real pointer (IDirect3DDevice9*) to device
            renderConfigData.Device = (IntPtr)0;
            renderConfigData.Header = new ovrRenderAPIConfigHeader
            {
                API         = ovrRenderAPIType.ovrRenderAPI_D3D9,
                Multisample = 1,
                RTSize      = new ovrSizei(desc.Resolution.w, desc.Resolution.h)
            };

            ovrD3D9Texture[] textures = new ovrD3D9Texture[2];

            if (ConfigureRendering(hmd, ref renderConfigData, 0, ovrDistortionCaps.ovrDistortion_Chromatic | ovrDistortionCaps.ovrDistortion_TimeWarp, eyes, renderDesc))
            {
                StartSensor(hmd, ovrHmdCapBits.ovrHmdCap_Orientation | ovrHmdCapBits.ovrHmdCap_YawCorrection | ovrHmdCapBits.ovrHmdCap_LatencyTest, 0);
                //while true
                BeginFrame(hmd, 0);
                for (int eyeIndex = 0; eyeIndex < (int)ovrEyeType.ovrEye_Count; eyeIndex++)
                {
                    ovrEyeType eye           = desc.EyeRenderOrder()[eyeIndex];
                    ovrPosef   eyeRenderPose = BeginEyeRender(hmd, eye);

                    ovrSensorState sensor_state = GetSensorState(hmd, 0.0);

                    EndEyeRender(hmd, eye, eyeRenderPose, ref textures[eyeIndex]);
                }
                EndFrame(hmd);
                //end while
                StopSensor(hmd);
            }
            Shutdown();
        }
コード例 #3
0
 public static extern ovrSizei GetFovTextureSize(IntPtr hmd, ovrEyeType eye, ovrFovPort fov, float pixelsPerDisplayPixel);
コード例 #4
0
 public static extern ovrPosef BeginEyeRender(IntPtr hmd, ovrEyeType eye);
コード例 #5
0
ファイル: OVRHmd.cs プロジェクト: mimlowe/Aerosolt
 private static extern ovrEyeRenderDesc ovrHmd_GetRenderDesc(IntPtr hmd, ovrEyeType eye, ovrFovPort fov);
コード例 #6
0
ファイル: OVRHmd.cs プロジェクト: mimlowe/Aerosolt
        // Generate distortion mesh per eye.
        // Distortion capabilities will depend on 'distortionCaps' flags; user should rely on
        // appropriate shaders based on their settings.
        // Distortion mesh data will be allocated and stored into the ovrDistortionMesh data structure,
        // which should be explicitly freed with ovrHmd_DestroyDistortionMesh.
        // Users should call ovrHmd_GetRenderScaleAndOffset to get uvScale and Offset values for rendering.
        // The function shouldn't fail unless theres is a configuration or memory error, in which case
        // ovrDistortionMesh values will be set to null.
        // This is the only function in the SDK reliant on eye relief, currently imported from profiles,
        // or overriden here.
        public ovrDistortionMesh? CreateDistortionMesh(ovrEyeType eye,
            ovrFovPort fov,
            uint distortionCaps)
        {
            ovrDistortionMesh_Raw rawMesh = new ovrDistortionMesh_Raw();

            if (!ovrHmd_CreateDistortionMesh(HmdPtr, eye, fov, distortionCaps, out rawMesh))
            {
                return null;
            }

            ovrDistortionMesh mesh = new ovrDistortionMesh(rawMesh);
            ovrHmd_DestroyDistortionMesh(ref rawMesh);
            return mesh;
        }
コード例 #7
0
ファイル: OVRHmd.cs プロジェクト: mimlowe/Aerosolt
 private static extern void ovrHmd_GetEyeTimewarpMatrices(IntPtr hmd, ovrEyeType eye, ovrPosef renderPose,
     [MarshalAs(UnmanagedType.LPArray, SizeConst = 2)]
     [Out] out ovrMatrix4f_Raw[] twnOut);
コード例 #8
0
ファイル: OVRHmd.cs プロジェクト: mimlowe/Aerosolt
 private static extern ovrSizei ovrHmd_GetFovTextureSize(IntPtr hmd, ovrEyeType eye, ovrFovPort fov, float pixelsPerDisplayPixel);
コード例 #9
0
ファイル: OVRHmd.cs プロジェクト: mimlowe/Aerosolt
 private static extern bool ovrHmd_CreateDistortionMesh(IntPtr hmd,
     ovrEyeType eye,
     ovrFovPort fov,
     uint distortionCaps,
     [Out] out ovrDistortionMesh_Raw meshData);
コード例 #10
0
ファイル: OVRHmd.cs プロジェクト: mimlowe/Aerosolt
 private static extern ovrPosef ovrHmd_GetEyePose(IntPtr hmd, ovrEyeType eye);
コード例 #11
0
ファイル: OVRHmd.cs プロジェクト: mimlowe/Aerosolt
        // Computes timewarp matrices used by distortion mesh shader, these are used to adjust
        // for orientation change since the last call to ovrHmd_GetEyePose for this eye.
        // The ovrDistortionVertex::TimeWarpFactor is used to blend between the matrices,
        // usually representing two different sides of the screen.
        // Must be called on the same thread as ovrHmd_BeginFrameTiming.
        public ovrMatrix4f[] ovrHmd_GetEyeTimewarpMatrices(ovrEyeType eye, ovrPosef renderPose)
        {
            ovrMatrix4f_Raw[] rawMats = {new ovrMatrix4f_Raw(), new ovrMatrix4f_Raw()};
            ovrHmd_GetEyeTimewarpMatrices(HmdPtr, eye, renderPose, out rawMats);

            ovrMatrix4f[] mats = {new ovrMatrix4f(rawMats[0]), new ovrMatrix4f(rawMats[1])};
            return mats;
        }
コード例 #12
0
ファイル: OVRHmd.cs プロジェクト: mimlowe/Aerosolt
 public ovrEyeRenderDesc GetRenderDesc(ovrEyeType eyeType, ovrFovPort fov)
 {
     return ovrHmd_GetRenderDesc(HmdPtr, eyeType, fov);
 }
コード例 #13
0
ファイル: OVRHmd.cs プロジェクト: mimlowe/Aerosolt
 //-------------------------------------------------------------------------------------
 // ***** Graphics Setup
 // Calculates texture size recommended for rendering one eye within HMD, given FOV cone.
 // Higher FOV will generally require larger textures to maintain quality.
 //  - pixelsPerDisplayPixel specifies that number of render target pixels per display
 //    pixel at center of distortion; 1.0 is the default value. Lower values
 //    can improve performance.
 public ovrSizei GetFovTextureSize(ovrEyeType eye, ovrFovPort fov, float pixelsPerDisplayPixel = 1.0f)
 {
     return ovrHmd_GetFovTextureSize(HmdPtr, eye, fov, pixelsPerDisplayPixel);
 }
コード例 #14
0
ファイル: OVRHmd.cs プロジェクト: mimlowe/Aerosolt
 // Predicts and returns Pose that should be used rendering the specified eye.
 // Must be called between ovrHmd_BeginFrameTiming & ovrHmd_EndFrameTiming.
 public ovrPosef GetEyePose(ovrEyeType eye)
 {
     return ovrHmd_GetEyePose(HmdPtr, eye);
 }
コード例 #15
0
ファイル: LibOVR.cs プロジェクト: oggy83/OculusWalkerDemo
 public extern static ovrEyeRenderDesc ovrHmd_GetRenderDesc(IntPtr hmd, ovrEyeType eyeType, ovrFovPort fov);