/// <summary> /// Sets the camera orientation. /// </summary> void SetCameraOrientation() { // Main camera has a depth of 0, so it will be rendered first if (GetComponent <Camera>() == CameraController.CameraMain) { if (CameraController.TrackerRotatesY == true) { Vector3 a = GetComponent <Camera>().transform.rotation.eulerAngles; a.x = 0; a.z = 0; transform.parent.transform.eulerAngles = a; } ovrPosef renderPose = OVR_GetRenderPose(); if (CameraController.EnablePosition) { CameraPosition = renderPose.Position.ToVector3(); } bool useOrt = (CameraController.EnableOrientation && !(CameraController.TimeWarp && CameraController.FreezeTimeWarp)); if (useOrt) { CameraOrientation = renderPose.Orientation.ToQuaternion(); } } // Calculate the rotation Y offset that is getting updated externally // (i.e. like a controller rotation) float yRotation = 0.0f; CameraController.GetYRotation(ref yRotation); Quaternion qp = Quaternion.Euler(0.0f, yRotation, 0.0f); Vector3 dir = qp * Vector3.forward; qp.SetLookRotation(dir, Vector3.up); // Multiply the camera controllers offset orientation (allow follow of orientation offset) Quaternion orientationOffset = Quaternion.identity; CameraController.GetOrientationOffset(ref orientationOffset); qp = orientationOffset * qp * CameraOrientationOffset; // Multiply in the current HeadQuat (q is now the latest best rotation) Quaternion q = qp * CameraOrientation; // * * * // Update camera rotation GetComponent <Camera>().transform.rotation = q; // * * * // Update camera position (first add Offset to parent transform) GetComponent <Camera>().transform.localPosition = NeckPosition; // Adjust neck by taking eye position and transforming through q // Get final camera position as well as the clipping difference // (to allow for absolute location of center of camera grid space) Vector3 newCamPos = Vector3.zero; CameraPositionOffsetAndClip(ref CameraPosition, ref newCamPos); // Update list of game objects with new CameraOrientation / newCamPos here // For example, this location is used to update the GridCube foreach (OVRCameraGameObject obj in CameraLocalSetList) { if (obj.CameraController.GetCameraDepth() == GetComponent <Camera>().depth) { // Initial difference Vector3 newPos = -(qp * CameraPositionOffset); // Final position newPos += GetComponent <Camera>().transform.position; // Set the game object info obj.CameraGameObject.transform.position = newPos; obj.CameraGameObject.transform.rotation = qp; } } // Adjust camera position with offset/clipped cam location GetComponent <Camera>().transform.localPosition += Quaternion.Inverse(GetComponent <Camera>().transform.parent.rotation) * qp * newCamPos; Vector3 newEyePos = Vector3.zero; newEyePos.x = EyePosition.x; GetComponent <Camera>().transform.localPosition += GetComponent <Camera>().transform.localRotation *newEyePos; }
public static extern void EndEyeRender(IntPtr hmd, ovrEyeType eye, ovrPosef renderPose, ref ovrD3D9Texture eyeTexture);
static void Main(string[] args) { Initialize(); IntPtr hmd = Create(0); ovrHmdDesc desc = new ovrHmdDesc(); GetDesc(hmd, ref desc); ovrSizei texture_size_left = GetFovTextureSize(hmd, ovrEyeType.ovrEye_Left, desc.DefaultEyeFov()[(int)ovrEyeType.ovrEye_Left], 1.0f); ovrSizei texture_size_right = GetFovTextureSize(hmd, ovrEyeType.ovrEye_Right, desc.DefaultEyeFov()[(int)ovrEyeType.ovrEye_Right], 1.0f); ovrSizei rt_size = new ovrSizei(texture_size_left.w + texture_size_right.w, (texture_size_left.h > texture_size_right.h) ? texture_size_left.h : texture_size_right.h); // Initialize eye rendering information for ovrHmd_Configure. // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations. ovrEyeDesc[] eyes = new ovrEyeDesc[2]; eyes[0].Eye = ovrEyeType.ovrEye_Left; eyes[1].Eye = ovrEyeType.ovrEye_Right; eyes[0].Fov = desc.DefaultEyeFov()[(int)ovrEyeType.ovrEye_Left]; eyes[1].Fov = desc.DefaultEyeFov()[(int)ovrEyeType.ovrEye_Right]; eyes[0].TextureSize = rt_size; eyes[1].TextureSize = rt_size; eyes[0].RenderViewport.Pos = new ovrVector2i(0, 0); eyes[0].RenderViewport.Size = new ovrSizei(rt_size.w / 2, rt_size.h); eyes[1].RenderViewport.Pos = new ovrVector2i((rt_size.w + 1) / 2, 0); eyes[1].RenderViewport.Size = eyes[0].RenderViewport.Size; ovrEyeRenderDesc[] renderDesc = new ovrEyeRenderDesc[2]; ovrD3D9ConfigData renderConfigData = new ovrD3D9ConfigData(); //real pointer (IDirect3DDevice9*) to device renderConfigData.Device = (IntPtr)0; renderConfigData.Header = new ovrRenderAPIConfigHeader { API = ovrRenderAPIType.ovrRenderAPI_D3D9, Multisample = 1, RTSize = new ovrSizei(desc.Resolution.w, desc.Resolution.h) }; ovrD3D9Texture[] textures = new ovrD3D9Texture[2]; if (ConfigureRendering(hmd, ref renderConfigData, 0, ovrDistortionCaps.ovrDistortion_Chromatic | ovrDistortionCaps.ovrDistortion_TimeWarp, eyes, renderDesc)) { StartSensor(hmd, ovrHmdCapBits.ovrHmdCap_Orientation | ovrHmdCapBits.ovrHmdCap_YawCorrection | ovrHmdCapBits.ovrHmdCap_LatencyTest, 0); //while true BeginFrame(hmd, 0); for (int eyeIndex = 0; eyeIndex < (int)ovrEyeType.ovrEye_Count; eyeIndex++) { ovrEyeType eye = desc.EyeRenderOrder()[eyeIndex]; ovrPosef eyeRenderPose = BeginEyeRender(hmd, eye); ovrSensorState sensor_state = GetSensorState(hmd, 0.0); EndEyeRender(hmd, eye, eyeRenderPose, ref textures[eyeIndex]); } EndFrame(hmd); //end while StopSensor(hmd); } Shutdown(); }
public extern static void ovr_CalcEyePoses(ovrPosef headPose, ovrVector3f[] hmdToEyeViewOffset, [Out] ovrPosef[] outEyePoses);