// UpdateGeometry public void UpdateGeometry() { if (MagShowGeometry == false) { return; } if (CameraController == null) { return; } if ((GeometryReference == null) || (GeometryCompass == null)) { return; } // All set, we can update the geometry with camera and positon values Quaternion q = Quaternion.identity; if ((CameraController != null) && (CameraController.PredictionOn == true)) { OVRDeviceImposter.GetPredictedOrientation(0, ref q); } else { OVRDeviceImposter.GetOrientation(0, ref q); } Vector3 v = GeometryCompass.transform.localEulerAngles; v.y = -q.eulerAngles.y + CurEulerRef.y; GeometryCompass.transform.localEulerAngles = v; // Set the color of the marker to red if we are calibrating if (GeometryReferenceMarkMat != null) { Color c = Color.green; if (OVRDeviceImposter.IsMagYawCorrectionInProgress(0) == true) { c = Color.red; } GeometryReferenceMarkMat.SetColor("_Color", c); } }
// UpdateMagYawDriftCorrection public void UpdateMagYawDriftCorrection() { if (Input.GetKeyDown(KeyCode.Z) == true) { if (MagCalState == MagCalibrationState.MagDisabled) { // Start calibration process if (MagAutoCalibrate == true) { OVRDeviceImposter.BeginMagAutoCalibration(0); MagCalState = MagCalibrationState.MagCalibrating; } else { // Go to pre-manual calibration state (to allow for // setting refrence point) MagCalState = MagCalibrationState.MagManualGetReady; return; } } else if (MagCalState == MagCalibrationState.MagManualGetReady) { OVRDeviceImposter.SetMagReference(0); OVRDeviceImposter.EnableMagYawCorrection(0, true); Quaternion q = Quaternion.identity; if ((CameraController != null) && (CameraController.PredictionOn == true)) { OVRDeviceImposter.GetPredictedOrientation(0, ref q); } else { OVRDeviceImposter.GetOrientation(0, ref q); } CurEulerRef = q.eulerAngles; // Begin manual calibration OVRDeviceImposter.BeginMagManualCalibration(0); MagCalState = MagCalibrationState.MagCalibrating; } else { // Reset calibration process if (MagAutoCalibrate == true) { OVRDeviceImposter.StopMagAutoCalibration(0); } else { OVRDeviceImposter.StopMagManualCalibration(0); } OVRDeviceImposter.EnableMagYawCorrection(0, false); MagCalState = MagCalibrationState.MagDisabled; // Do not show geometry MagShowGeometry = false; ShowGeometry(MagShowGeometry); return; } } // Check to see if calibration is completed if (MagCalState == MagCalibrationState.MagCalibrating) { if (MagAutoCalibrate == true) { OVRDeviceImposter.UpdateMagAutoCalibration(0); } else { OVRDeviceImposter.UpdateMagManualCalibration(0); } if (OVRDeviceImposter.IsMagCalibrated(0) == true) { if (MagAutoCalibrate == true) { MagCalState = MagCalibrationState.MagCalibrated; } else { // Manual Calibration take account of having set the // reference orientation. MagCalState = MagCalibrationState.MagReady; } } } // If we are calibrated, we will set mag reference and // enable yaw correction on a buton press if ((MagCalState == MagCalibrationState.MagCalibrated) || (MagCalState == MagCalibrationState.MagReady)) { if (Input.GetKeyDown(KeyCode.X) == true) { OVRDeviceImposter.SetMagReference(0); OVRDeviceImposter.EnableMagYawCorrection(0, true); MagCalState = MagCalibrationState.MagReady; Quaternion q = Quaternion.identity; if ((CameraController != null) && (CameraController.PredictionOn == true)) { OVRDeviceImposter.GetPredictedOrientation(0, ref q); } else { OVRDeviceImposter.GetOrientation(0, ref q); } CurEulerRef = q.eulerAngles; } if ((MagCalState == MagCalibrationState.MagReady) && (Input.GetKeyDown(KeyCode.F6))) { // Toggle showing geometry either on or off if (MagShowGeometry == false) { MagShowGeometry = true; ShowGeometry(MagShowGeometry); } else { MagShowGeometry = false; ShowGeometry(MagShowGeometry); } } UpdateGeometry(); } }
// SetCameraOrientation void SetCameraOrientation() { Quaternion q = Quaternion.identity; Vector3 dir = Vector3.forward; // Main camera has a depth of 0, so it will be rendered first if (gameObject.camera.depth == 0.0f) { // If desired, update parent transform y rotation here // This is useful if we want to track the current location of // of the head. // TODO: Future support for x and z, and possibly change to a quaternion if (CameraController.TrackerRotatesY == true) { Vector3 a = gameObject.camera.transform.rotation.eulerAngles; a.x = 0; a.z = 0; gameObject.transform.parent.transform.eulerAngles = a; } // Read shared data from CameraController if (CameraController != null) { // Read sensor here (prediction on or off) if (CameraController.PredictionOn == false) { OVRDeviceImposter.GetOrientation(0, ref CameraOrientation); } else { OVRDeviceImposter.GetPredictedOrientation(0, ref CameraOrientation); } } // This needs to go as close to reading Rift orientation inputs OVRDeviceImposter.ProcessLatencyInputs(); } // Calculate the rotation Y offset that is getting updated externally // (i.e. like a controller rotation) float yRotation = 0.0f; CameraController.GetYRotation(ref yRotation); q = Quaternion.Euler(0.0f, yRotation, 0.0f); dir = q * Vector3.forward; q.SetLookRotation(dir, Vector3.up); // Multiply the camera controllers offset orientation (allow follow of orientation offset) Quaternion orientationOffset = Quaternion.identity; CameraController.GetOrientationOffset(ref orientationOffset); q = orientationOffset * q; // Multiply in the current HeadQuat (q is now the latest best rotation) if (CameraController != null) { q = q * CameraOrientation; } // * * * // Update camera rotation gameObject.camera.transform.rotation = q; // * * * // Update camera position (first add Offset to parent transform) gameObject.camera.transform.position = gameObject.camera.transform.parent.transform.position + NeckPosition; // Adjust neck by taking eye position and transforming through q gameObject.camera.transform.position += q * EyePosition; }