// InitCameraControllerVariables // Made public so that it can be called by classes that require information about the // camera to be present when initing variables in 'Start' public void InitCameraControllerVariables() { // Get the IPD value (distance between eyes in meters) OVRDeviceImposter.GetIPD(ref IPD); // Get the values for both IPD and lens distortion correction shift. We don't normally // need to set the PhysicalLensOffset once it's been set here. OVRDeviceImposter.CalculatePhysicalLensOffsets(ref LensOffsetLeft, ref LensOffsetRight); // Using the calculated FOV, based on distortion parameters, yeilds the best results. // However, public functions will allow to override the FOV if desired VerticalFOV = OVRDeviceImposter.VerticalFOV(); // Store aspect ratio as well AspectRatio = OVRDeviceImposter.CalculateAspectRatio(); OVRDeviceImposter.GetDistortionCorrectionCoefficients(ref DistK0, ref DistK1, ref DistK2, ref DistK3); // Get our initial world orientation of the cameras from the scene (we can grab it from // the set FollowOrientation object or this OVRCameraController gameObject) if (FollowOrientation != null) { OrientationOffset = FollowOrientation.rotation; } else { OrientationOffset = transform.rotation; } }
// Start new void Start() { base.Start(); // Get the OVRCameraController CameraController = gameObject.transform.parent.GetComponent <OVRCameraController>(); if (CameraController == null) { Debug.LogWarning("WARNING: OVRCameraController not found!"); } // NOTE: MSAA TEXTURES NOT AVAILABLE YET // Set CameraTextureScale (increases the size of the texture we are rendering into // for a better pixel match when post processing the image through lens distortion) #if MSAA_ENABLED CameraTextureScale = OVRDeviceImposter.DistortionScale(); #endif // If CameraTextureScale is not 1.0f, create a new texture and assign to target texture // Otherwise, fall back to normal camera rendering if ((CameraTexture == null) && (CameraTextureScale > 1.0f)) { int w = (int)(Screen.width / 2.0f * CameraTextureScale); int h = (int)(Screen.height * CameraTextureScale); CameraTexture = new RenderTexture(w, h, 24); // 24 bit colorspace // NOTE: MSAA TEXTURES NOT AVAILABLE YET // This value should be the default for MSAA textures // CameraTexture.antiAliasing = 4; // Set it within the project #if MSAA_ENABLED CameraTexture.antiAliasing = QualitySettings.antiAliasing; #endif } }
// StereoBox - Values based on pixels in DK1 resolution of W: (1280 / 2) H: 800 // TODO: Create overloaded function to take normalized float values from 0 - 1 on screen public void StereoBox(int X, int Y, int wX, int wY, ref string text, Color color) { Font prevFont = GUI.skin.font; if (Draw3D == true) { GUI.contentColor = color; if (GUI.skin.font != FontReplace) { GUI.skin.font = FontReplace; } float sSX = (float)Screen.width / PixelWidth; float sSY = (float)Screen.height / PixelHeight; int x = (int)((float)X * sSX * 1.75f); int wx = (int)((float)wX * sSY * 1.0f); GUI.Box(new Rect(x, Y, wx, wY), text); } else { // Deprecate this part of code; we will want to do everything in 3D space // on the RIFT (especially when HD versions of the Rift are available) float ploLeft = 0, ploRight = 0; float sSX = (float)Screen.width / PixelWidth; float sSY = (float)Screen.height / PixelHeight; OVRDeviceImposter.GetPhysicalLensOffsets(ref ploLeft, ref ploRight); int xL = (int)((float)X * sSX); int sSpreadX = (int)(StereoSpreadX * sSX); int xR = (Screen.width / 2) + xL + sSpreadX - // required to adjust for physical lens shift (int)(ploLeft * (float)Screen.width / 2); int y = (int)((float)Y * sSY); GUI.contentColor = color; int sWX = (int)((float)wX * sSX); int sWY = (int)((float)wY * sSY); if (FontReplace != null) { GUI.skin.font = FontReplace; } GUI.Box(new Rect(xL, y, sWX, sWY), text); GUI.Box(new Rect(xR, y, sWX, sWY), text); } GUI.skin.font = prevFont; }
// Start void Start() { DisplayCrosshair = false; CollisionWithGeometry = false; FadeVal = 0.0f; MainCam = Camera.main; // Initialize screen location of cursor XL = Screen.width * 0.25f; YL = Screen.height * 0.5f; // Get the values for both IPD and lens distortion correction shift OVRDeviceImposter.GetPhysicalLensOffsets(ref LensOffsetLeft, ref LensOffsetRight); }
// UpdateGeometry public void UpdateGeometry() { if (MagShowGeometry == false) { return; } if (CameraController == null) { return; } if ((GeometryReference == null) || (GeometryCompass == null)) { return; } // All set, we can update the geometry with camera and positon values Quaternion q = Quaternion.identity; if ((CameraController != null) && (CameraController.PredictionOn == true)) { OVRDeviceImposter.GetPredictedOrientation(0, ref q); } else { OVRDeviceImposter.GetOrientation(0, ref q); } Vector3 v = GeometryCompass.transform.localEulerAngles; v.y = -q.eulerAngles.y + CurEulerRef.y; GeometryCompass.transform.localEulerAngles = v; // Set the color of the marker to red if we are calibrating if (GeometryReferenceMarkMat != null) { Color c = Color.green; if (OVRDeviceImposter.IsMagYawCorrectionInProgress(0) == true) { c = Color.red; } GeometryReferenceMarkMat.SetColor("_Color", c); } }
// LatencyTest void LatencyTest(RenderTexture dest) { byte r = 0, g = 0, b = 0; // See if we get a string back to send to the debug out string s = Marshal.PtrToStringAnsi(OVRDeviceImposter.GetLatencyResultsString()); if (s != null) { string result = "\n\n---------------------\nLATENCY TEST RESULTS:\n---------------------\n"; result += s; result += "\n\n\n"; print(result); } if (OVRDeviceImposter.DisplayLatencyScreenColor(ref r, ref g, ref b) == false) { return; } Debug.Log(r + " " + g + " " + b); RenderTexture.active = dest; Material material = ColorOnlyMaterial; QuadColor.r = (float)r / 255.0f; QuadColor.g = (float)g / 255.0f; QuadColor.b = (float)b / 255.0f; material.SetColor("_Color", QuadColor); GL.PushMatrix(); material.SetPass(0); GL.LoadOrtho(); GL.Begin(GL.QUADS); GL.Vertex3(0.3f, 0.3f, 0); GL.Vertex3(0.3f, 0.7f, 0); GL.Vertex3(0.7f, 0.7f, 0); GL.Vertex3(0.7f, 0.3f, 0); GL.End(); GL.PopMatrix(); }
// SetCameraLensCorrection void ConfigureCameraLensCorrection(ref Camera camera) { // Get the distortion scale and aspect ratio to use when calculating distortion shader float distortionScale = 1.0f / OVRDeviceImposter.DistortionScale(); float aspectRatio = OVRDeviceImposter.CalculateAspectRatio(); // These values are different in the SDK World Demo; Unity renders each camera to a buffer // that is normalized, so we will respect this rule when calculating the distortion inputs float NormalizedWidth = 1.0f; float NormalizedHeight = 1.0f; OVRLensCorrection lc = camera.GetComponent <OVRLensCorrection>(); lc._Scale.x = (NormalizedWidth / 2.0f) * distortionScale; lc._Scale.y = (NormalizedHeight / 2.0f) * distortionScale * aspectRatio; lc._ScaleIn.x = (2.0f / NormalizedWidth); lc._ScaleIn.y = (2.0f / NormalizedHeight) / aspectRatio; lc._HmdWarpParam.x = DistK0; lc._HmdWarpParam.y = DistK1; lc._HmdWarpParam.z = DistK2; }
// FormatCalibratingString void FormatCalibratingString(ref string str) { if (MagAutoCalibrate == true) { str = System.String.Format("Mag Calibrating (AUTO)... Point {0} set", OVRDeviceImposter.MagNumberOfSamples(0)); } else { // Manual Calibration: Make sure to get proper direction str = "Mag Calibrating (MANUAL)... LOOK "; switch (OVRDeviceImposter.MagNumberOfSamples(0)) { case (0): str += "FORWARD"; break; case (1): str += "UP"; break; case (2): str += "LEFT"; break; case (3): str += "RIGHT"; break; } } }
// GUIMagYawDriftCorrection public void GUIMagYawDriftCorrection(int xLoc, int yLoc, int xWidth, int yWidth, ref OVRGUI guiHelper) { string strMagCal = ""; Color c = Color.red; int xloc = xLoc; int xwidth = xWidth; switch (MagCalState) { case (MagCalibrationState.MagDisabled): strMagCal = "Mag Calibration OFF"; break; case (MagCalibrationState.MagManualGetReady): strMagCal = "Manual Calibration: Look Forward, Press 'Z'.."; c = Color.white; xloc -= 40; xwidth += 150; break; case (MagCalibrationState.MagCalibrating): if (MagCalTimerFlash > 0.2f) { FormatCalibratingString(ref strMagCal); } MagCalTimerFlash -= Time.deltaTime; if (MagCalTimerFlash < 0.0f) { MagCalTimerFlash += 0.5f; } c = Color.white; xloc -= 40; xwidth += 150; break; case (MagCalibrationState.MagCalibrated): strMagCal = "Mag Calibrated"; MagCalTimerFlash -= Time.deltaTime; if (MagCalTimerFlash < 0.0f) { MagCalTimerFlash += 0.5f; } c = Color.yellow; break; case (MagCalibrationState.MagReady): if (OVRDeviceImposter.IsMagYawCorrectionInProgress(0) == true) { if (MagCalTimerFlash > 0.2f) { strMagCal = "Mag CORRECTING..."; strMagCal = System.String.Format("Mag CORRECTING (deg)... {0:F3}", OVRDeviceImposter.GetYawErrorAngle(0)); } MagCalTimerFlash -= Time.deltaTime; if (MagCalTimerFlash < 0.0f) { MagCalTimerFlash += 0.5f; } xloc -= 40; xwidth += 150; c = Color.red; } else { strMagCal = "Mag Correction ON"; c = Color.green; } break; } guiHelper.StereoBox(xloc, yLoc, xwidth, yWidth, ref strMagCal, c); }
// UpdateMagYawDriftCorrection public void UpdateMagYawDriftCorrection() { if (Input.GetKeyDown(KeyCode.Z) == true) { if (MagCalState == MagCalibrationState.MagDisabled) { // Start calibration process if (MagAutoCalibrate == true) { OVRDeviceImposter.BeginMagAutoCalibration(0); MagCalState = MagCalibrationState.MagCalibrating; } else { // Go to pre-manual calibration state (to allow for // setting refrence point) MagCalState = MagCalibrationState.MagManualGetReady; return; } } else if (MagCalState == MagCalibrationState.MagManualGetReady) { OVRDeviceImposter.SetMagReference(0); OVRDeviceImposter.EnableMagYawCorrection(0, true); Quaternion q = Quaternion.identity; if ((CameraController != null) && (CameraController.PredictionOn == true)) { OVRDeviceImposter.GetPredictedOrientation(0, ref q); } else { OVRDeviceImposter.GetOrientation(0, ref q); } CurEulerRef = q.eulerAngles; // Begin manual calibration OVRDeviceImposter.BeginMagManualCalibration(0); MagCalState = MagCalibrationState.MagCalibrating; } else { // Reset calibration process if (MagAutoCalibrate == true) { OVRDeviceImposter.StopMagAutoCalibration(0); } else { OVRDeviceImposter.StopMagManualCalibration(0); } OVRDeviceImposter.EnableMagYawCorrection(0, false); MagCalState = MagCalibrationState.MagDisabled; // Do not show geometry MagShowGeometry = false; ShowGeometry(MagShowGeometry); return; } } // Check to see if calibration is completed if (MagCalState == MagCalibrationState.MagCalibrating) { if (MagAutoCalibrate == true) { OVRDeviceImposter.UpdateMagAutoCalibration(0); } else { OVRDeviceImposter.UpdateMagManualCalibration(0); } if (OVRDeviceImposter.IsMagCalibrated(0) == true) { if (MagAutoCalibrate == true) { MagCalState = MagCalibrationState.MagCalibrated; } else { // Manual Calibration take account of having set the // reference orientation. MagCalState = MagCalibrationState.MagReady; } } } // If we are calibrated, we will set mag reference and // enable yaw correction on a buton press if ((MagCalState == MagCalibrationState.MagCalibrated) || (MagCalState == MagCalibrationState.MagReady)) { if (Input.GetKeyDown(KeyCode.X) == true) { OVRDeviceImposter.SetMagReference(0); OVRDeviceImposter.EnableMagYawCorrection(0, true); MagCalState = MagCalibrationState.MagReady; Quaternion q = Quaternion.identity; if ((CameraController != null) && (CameraController.PredictionOn == true)) { OVRDeviceImposter.GetPredictedOrientation(0, ref q); } else { OVRDeviceImposter.GetOrientation(0, ref q); } CurEulerRef = q.eulerAngles; } if ((MagCalState == MagCalibrationState.MagReady) && (Input.GetKeyDown(KeyCode.F6))) { // Toggle showing geometry either on or off if (MagShowGeometry == false) { MagShowGeometry = true; ShowGeometry(MagShowGeometry); } else { MagShowGeometry = false; ShowGeometry(MagShowGeometry); } } UpdateGeometry(); } }
// SetCameraOrientation void SetCameraOrientation() { Quaternion q = Quaternion.identity; Vector3 dir = Vector3.forward; // Main camera has a depth of 0, so it will be rendered first if (gameObject.camera.depth == 0.0f) { // If desired, update parent transform y rotation here // This is useful if we want to track the current location of // of the head. // TODO: Future support for x and z, and possibly change to a quaternion if (CameraController.TrackerRotatesY == true) { Vector3 a = gameObject.camera.transform.rotation.eulerAngles; a.x = 0; a.z = 0; gameObject.transform.parent.transform.eulerAngles = a; } // Read shared data from CameraController if (CameraController != null) { // Read sensor here (prediction on or off) if (CameraController.PredictionOn == false) { OVRDeviceImposter.GetOrientation(0, ref CameraOrientation); } else { OVRDeviceImposter.GetPredictedOrientation(0, ref CameraOrientation); } } // This needs to go as close to reading Rift orientation inputs OVRDeviceImposter.ProcessLatencyInputs(); } // Calculate the rotation Y offset that is getting updated externally // (i.e. like a controller rotation) float yRotation = 0.0f; CameraController.GetYRotation(ref yRotation); q = Quaternion.Euler(0.0f, yRotation, 0.0f); dir = q * Vector3.forward; q.SetLookRotation(dir, Vector3.up); // Multiply the camera controllers offset orientation (allow follow of orientation offset) Quaternion orientationOffset = Quaternion.identity; CameraController.GetOrientationOffset(ref orientationOffset); q = orientationOffset * q; // Multiply in the current HeadQuat (q is now the latest best rotation) if (CameraController != null) { q = q * CameraOrientation; } // * * * // Update camera rotation gameObject.camera.transform.rotation = q; // * * * // Update camera position (first add Offset to parent transform) gameObject.camera.transform.position = gameObject.camera.transform.parent.transform.position + NeckPosition; // Adjust neck by taking eye position and transforming through q gameObject.camera.transform.position += q * EyePosition; }