/// <summary> /// Enables the yaw correction. /// </summary> void EnableYawCorrection() { OVRDevice.EnableMagYawCorrection(true); Quaternion q = Quaternion.identity; if ((CameraController != null) && (CameraController.PredictionOn == true)) { OVRDevice.GetPredictedOrientation(ref q); } else { OVRDevice.GetOrientation(ref q); } CurEulerRef = q.eulerAngles; }
// UpdateGeometry public void UpdateGeometry() { if (MagShowGeometry == false) { return; } if (CameraController == null) { return; } if ((GeometryReference == null) || (GeometryCompass == null)) { return; } // All set, we can update the geometry with camera and positon values Quaternion q = Quaternion.identity; if ((CameraController != null) && (CameraController.PredictionOn == true)) { OVRDevice.GetPredictedOrientation(0, ref q); } else { OVRDevice.GetOrientation(0, ref q); } Vector3 v = GeometryCompass.transform.localEulerAngles; v.y = -q.eulerAngles.y + CurEulerRef.y; GeometryCompass.transform.localEulerAngles = v; // Set the color of the marker to red if we are calibrating if (GeometryReferenceMarkMat != null) { Color c = Color.green; if (OVRDevice.IsMagYawCorrectionInProgress(0) == true) { c = Color.red; } GeometryReferenceMarkMat.SetColor("_Color", c); } }
// UpdateMagYawDriftCorrection public void UpdateMagYawDriftCorrection() { if (Input.GetKeyDown(KeyCode.Z) == true) { if (MagCalState == MagCalibrationState.MagDisabled) { // Start calibration process if (MagAutoCalibrate == true) { OVRDevice.BeginMagAutoCalibration(0); MagCalState = MagCalibrationState.MagCalibrating; } else { // Go to pre-manual calibration state (to allow for // setting refrence point) MagCalState = MagCalibrationState.MagManualGetReady; return; } } else if (MagCalState == MagCalibrationState.MagManualGetReady) { OVRDevice.SetMagReference(0); OVRDevice.EnableMagYawCorrection(0, true); Quaternion q = Quaternion.identity; if ((CameraController != null) && (CameraController.PredictionOn == true)) { OVRDevice.GetPredictedOrientation(0, ref q); } else { OVRDevice.GetOrientation(0, ref q); } CurEulerRef = q.eulerAngles; // Begin manual calibration OVRDevice.BeginMagManualCalibration(0); MagCalState = MagCalibrationState.MagCalibrating; } else { // Reset calibration process if (MagAutoCalibrate == true) { OVRDevice.StopMagAutoCalibration(0); } else { OVRDevice.StopMagManualCalibration(0); } OVRDevice.EnableMagYawCorrection(0, false); MagCalState = MagCalibrationState.MagDisabled; // Do not show geometry MagShowGeometry = false; ShowGeometry(MagShowGeometry); return; } } // Check to see if calibration is completed if (MagCalState == MagCalibrationState.MagCalibrating) { if (MagAutoCalibrate == true) { OVRDevice.UpdateMagAutoCalibration(0); } else { OVRDevice.UpdateMagManualCalibration(0); } if (OVRDevice.IsMagCalibrated(0) == true) { if (MagAutoCalibrate == true) { MagCalState = MagCalibrationState.MagCalibrated; } else { // Manual Calibration take account of having set the // reference orientation. MagCalState = MagCalibrationState.MagReady; } } } // If we are calibrated, we will set mag reference and // enable yaw correction on a buton press if ((MagCalState == MagCalibrationState.MagCalibrated) || (MagCalState == MagCalibrationState.MagReady)) { if (Input.GetKeyDown(KeyCode.X) == true) { OVRDevice.SetMagReference(0); OVRDevice.EnableMagYawCorrection(0, true); MagCalState = MagCalibrationState.MagReady; Quaternion q = Quaternion.identity; if ((CameraController != null) && (CameraController.PredictionOn == true)) { OVRDevice.GetPredictedOrientation(0, ref q); } else { OVRDevice.GetOrientation(0, ref q); } CurEulerRef = q.eulerAngles; } if ((MagCalState == MagCalibrationState.MagReady) && (Input.GetKeyDown(KeyCode.F6))) { // Toggle showing geometry either on or off if (MagShowGeometry == false) { MagShowGeometry = true; ShowGeometry(MagShowGeometry); } else { MagShowGeometry = false; ShowGeometry(MagShowGeometry); } } UpdateGeometry(); } }
// SetCameraOrientation void SetCameraOrientation() { Quaternion q = Quaternion.identity; Vector3 dir = Vector3.forward; // Main camera has a depth of 0, so it will be rendered first if (gameObject.camera.depth == 0.0f) { // If desired, update parent transform y rotation here // This is useful if we want to track the current location of // of the head. // TODO: Future support for x and z, and possibly change to a quaternion // NOTE: This calculation is one frame behind if (CameraController.TrackerRotatesY == true) { Vector3 a = gameObject.camera.transform.rotation.eulerAngles; a.x = 0; a.z = 0; gameObject.transform.parent.transform.eulerAngles = a; } /* * else * { * // We will still rotate the CameraController in the y axis * // based on the fact that we have a Y rotation being passed * // in from above that still needs to take place (this functionality * // may be better suited to be calculated one level up) * Vector3 a = Vector3.zero; * float y = 0.0f; * CameraController.GetYRotation(ref y); * a.y = y; * gameObject.transform.parent.transform.eulerAngles = a; * } */ // Read shared data from CameraController if (CameraController != null) { // Read sensor here (prediction on or off) if (CameraController.PredictionOn == false) { OVRDevice.GetOrientation(0, ref CameraOrientation); } else { OVRDevice.GetPredictedOrientation(0, ref CameraOrientation); } } // This needs to go as close to reading Rift orientation inputs OVRDevice.ProcessLatencyInputs(); } // Calculate the rotation Y offset that is getting updated externally // (i.e. like a controller rotation) float yRotation = 0.0f; CameraController.GetYRotation(ref yRotation); q = Quaternion.Euler(0.0f, yRotation, 0.0f); dir = q * Vector3.forward; q.SetLookRotation(dir, Vector3.up); // Multiply the camera controllers offset orientation (allow follow of orientation offset) Quaternion orientationOffset = Quaternion.identity; CameraController.GetOrientationOffset(ref orientationOffset); q = orientationOffset * q; // Multiply in the current HeadQuat (q is now the latest best rotation) if (CameraController != null) { q = q * CameraOrientation; } // * * * // Update camera rotation gameObject.camera.transform.rotation = q; // * * * // Update camera position (first add Offset to parent transform) gameObject.camera.transform.position = gameObject.camera.transform.parent.transform.position + NeckPosition; // Adjust neck by taking eye position and transforming through q gameObject.camera.transform.position += q * EyePosition; }
private void doYawFiltering(float deltaT) { switch (driftingSensor) { case DriftingRotation.OculusRift: if (OVRDevice.IsSensorPresent(oculusID)) { OVRDevice.GetOrientation(oculusID, ref driftingRot); if (oculusCamController) { // In the future OVR SDK oculusCamController will have oculusID? oculusCamController.SetYRotation(-finalYawDifference.eulerAngles.y); } } break; case DriftingRotation.RazerHydra: // TODO //driftingRot = hydraRotation; break; case DriftingRotation.InputTransform: if (driftingTransform) { driftingRot = driftingTransform.rotation; } break; } if (driftingDirectionVisualizer != null) { driftingDirectionVisualizer.transform.rotation = driftingRot; } driftingEuler = driftingRot.eulerAngles; switch (compass) { case CompassSource.Kinect: if (!skeletonManager || !skeletonManager.skeletons[kinectPlayerID].isTracking) { break; } else { compassData = skeletonManager.GetJointData(compassJoint, kinectPlayerID); // First check for high confidence value if (compassData != null && compassData.rotationConfidence >= 1.0f) { updateDifferenceKalman(compassData.rotation.eulerAngles, driftingEuler, deltaT); } } break; case CompassSource.PSMove: if (inputManager) { compassMove = inputManager.GetMoveWand(PSMoveID); if (compassMove) { updateDifferenceKalman(compassMove.localRotation.eulerAngles, driftingEuler, deltaT); } } break; case CompassSource.InputTransform: if (compassTransform != null) { updateDifferenceKalman(compassTransform.rotation.eulerAngles, driftingEuler, deltaT); } break; } float normalizedT = Mathf.Clamp01(deltaT * driftCorrectionRate); if (normalizedT != 0) { finalYawDifference = Quaternion.Lerp(finalYawDifference, filteredYawDifference, normalizedT); } if (correctedDirectionVisualizer != null) { correctedDirectionVisualizer.transform.rotation = Quaternion.Euler( new Vector3(driftingEuler.x, (360 + driftingEuler.y - finalYawDifference.eulerAngles.y) % 360, driftingEuler.z)); } //driftingRotation*Quaternion.Inverse(finalDifference); if (correctedDirectionVisualizer != null && driftVisualizerPosition != null) { correctedDirectionVisualizer.transform.position = driftVisualizerPosition.position; } }
// Update is called once per frame void Update() { // Vector3 currW = new Vector3(); // OVRDevice.GetAngularVelocity(0, ref currW.x, ref currW.y, ref currW.z); Quaternion currO = new Quaternion(); OVRDevice.GetOrientation(0, ref currO); // print ("x: " + currO.x + " y: "+ currO.y + " z: " + currO.z + " w: " + currO.w); // print ("angular velocity: " + "x: " + currW.x + " y: " + currW.y + " z: " + currW.z); rightFlick = false; leftFlick = false; //print("t: " + (Time.time - lastFlickAt) + " currW: " + currO.z); // print ("dt: " + (Time.time - lastFlickAt) + " min: " + minSecsBetweenFlicks + " cO: " + currO.z + " pO: " + prevO.z ); if ((Time.time - lastFlickAt) > minSecsBetweenFlicks) { rightFlick = (currO.z < -flickThreshold); // && (prevO.z >= -flickThreshold); leftFlick = (currO.z > flickThreshold); // && (prevO.z <= flickThreshold); } if (leftFlick || rightFlick) { // print ("x: " + currW.x + " left: " + leftFlick + " right: " + rightFlick); print("HIT l: " + leftFlick + " r: " + rightFlick); lastFlickAt = Time.time; } prevO = currO; // prevW = currW; // print (gameObject.transform.position.z); if (gameObject.transform.position.z >= exit.transform.position.z) { Application.LoadLevel("mollyLevel"); // } if (Input.GetKeyDown(KeyCode.Escape)) { Application.LoadLevel(Application.loadedLevel); } Vector3 p = lead.transform.position; bool clickedThisTurn = false; if (Input.GetKeyDown(KeyCode.RightArrow) || rightFlick) { p.x += gridDistance; clickedThisTurn = true; soundManager.GetComponents <AudioSource>()[1].Play(); } if (Input.GetKeyDown(KeyCode.LeftArrow) || leftFlick) { p.x -= gridDistance; clickedThisTurn = true; soundManager.GetComponents <AudioSource>()[1].Play(); } if (clickedThisTurn) { print("clickedThisTurn: " + p.x); lead.transform.position = new Vector3(p.x, p.y, p.z); } if (Input.GetKeyDown(KeyCode.Space)) { print("here"); lead.rigidbody.velocity = new Vector3(0, 0, 0); } }
// SetCameraOrientation void SetCameraOrientation() { Quaternion q = Quaternion.identity; Vector3 dir = Vector3.forward; // Main camera has a depth of 0, so it will be rendered first if (gameObject.camera.depth == 0.0f) { // If desired, update parent transform y rotation here // This is useful if we want to track the current location of // of the head. // TODO: Future support for x and z, and possibly change to a quaternion if (CameraController.TrackerRotatesY == true) { Vector3 a = gameObject.camera.transform.rotation.eulerAngles; a.x = 0; a.z = 0; gameObject.transform.parent.transform.eulerAngles = a; } // Read shared data from CameraController if (CameraController != null) { Quaternion DirQ = Quaternion.identity; // Read sensor here (prediction on or off) if (CameraController.PredictionOn == false) { OVRDevice.GetOrientation(ref DirQ); } else { OVRDevice.GetPredictedOrientation(ref DirQ); } CameraController.SetSharedOrientation(DirQ); } // This needs to go as close to reading Rift orientation inputs OVRDevice.ProcessLatencyInputs(); } // Calculate the rotation Y offset that is getting updated externally // (i.e. like a controller rotation) float yRotation = 0.0f; CameraController.GetYRotation(ref yRotation); q = Quaternion.Euler(0.0f, yRotation, 0.0f); dir = q * Vector3.forward; q.SetLookRotation(dir, Vector3.up); // Multiply the camera controllers offset orientation (allow follow of orientation offset) Quaternion orientationOffset = Quaternion.identity; CameraController.GetOrientationOffset(ref orientationOffset); q = orientationOffset * q; // Multiply in the current HeadQuat (q is now the latest best rotation) if (CameraController != null) { Quaternion DirQ = Quaternion.identity; CameraController.GetSharedOrientation(ref DirQ); q = q * DirQ; } // * * * // Update camera rotation gameObject.camera.transform.rotation = q; // * * * // Update camera position (first add Offset to parent transform) gameObject.camera.transform.position = gameObject.camera.transform.parent.transform.position + NeckPosition; // Adjust neck by taking eye position and transforming through q gameObject.camera.transform.position += q * EyePosition; }
// Update is called once per frame void Update() { OVRDevice.GetOrientation(0, ref riftOrientation); Debug.Log(riftOrientation); groundController.rotation = riftOrientation; }
// Use this for initialization void Start() { OVRDevice.GetOrientation(0, ref riftOrientation); }
// SetCameraOrientation void SetCameraOrientation() { Quaternion q = Quaternion.identity; Vector3 dir = Vector3.forward; // Main camera has a depth of 0, so it will be rendered first if (gameObject.camera.depth == 0.0f) { // If desired, update parent transform y rotation here // This is useful if we want to track the current location of // of the head. // TODO: Future support for x and z, and possibly change to a quaternion if (SetParentYRotation == true) { Vector3 a = gameObject.camera.transform.rotation.eulerAngles; a.x = 0; a.z = 0; gameObject.transform.parent.transform.eulerAngles = a; } // Read sensor here (prediction on or off) if (PredictionOn == false) { OVRDevice.GetOrientation(ref DirQ); } else { OVRDevice.GetPredictedOrientation(ref DirQ); } // This needs to go as close to reading Rift orientation inputs OVRDevice.ProcessLatencyInputs(); } // Calculate the rotation Y offset that is getting updated externally // (i.e. like a controller rotation) q = Quaternion.Euler(0.0f, YRotation, 0.0f); dir = q * Vector3.forward; q.SetLookRotation(dir, Vector3.up); // Multiply the offset orientation first q = OrientationOffset * q; // Multiply in the current HeadQuat (q is now the latest best rotation) q = q * DirQ; // * * * // Update camera rotation gameObject.camera.transform.rotation = q; // * * * // Update camera position (first add Offset to parent transform) gameObject.camera.transform.position = gameObject.camera.transform.parent.transform.position + NeckPosition; // Adjust neck by taking eye position and transforming through q gameObject.camera.transform.position += q * EyePosition; // PGG Alternate calculation for above... //Vector3 EyePositionNoX = EyePosition; EyePositionNoX.x = 0.0f; //gameObject.camera.transform.position += q * EyePositionNoX; //gameObject.camera.ResetWorldToCameraMatrix(); //Matrix4x4 m = camera.worldToCameraMatrix; //Matrix4x4 tm = Matrix4x4.identity; //tm.SetColumn (3, new Vector4 (-EyePosition.x, 0.0f, 0.0f, 1)); //gameObject.camera.worldToCameraMatrix = tm * m; }