public virtual Matrix4x4 ComputeTrackReferenceMatrix() { if (centerEyeAnchor == null) { Debug.LogError("centerEyeAnchor is required"); return(Matrix4x4.identity); } // The ideal approach would be using UnityEngine.VR.VRNode.TrackingReference, then we would not have to depend on the OVRCameraRig. Unfortunately, it is not available in Unity 5.4.3 OVRPose headPose = OVRPose.identity; Vector3 pos; Quaternion rot; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos)) { headPose.position = pos; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head, NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot)) { headPose.orientation = rot; } OVRPose invHeadPose = headPose.Inverse(); Matrix4x4 invHeadMatrix = Matrix4x4.TRS(invHeadPose.position, invHeadPose.orientation, Vector3.one); Matrix4x4 ret = centerEyeAnchor.localToWorldMatrix * invHeadMatrix; return(ret); }
private void CalculateCurrentPose() { // Platform and device agnostic calls to return center eye pose, used to pass in head pose to sdk OVRNodeStateProperties.GetNodeStatePropertyVector3(UnityEngine.XR.XRNode.CenterEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyePosition); OVRNodeStateProperties.GetNodeStatePropertyQuaternion(UnityEngine.XR.XRNode.CenterEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyeRotation); Debug.Log("gggg"); if (!is_hand_movment_locked) { Debug.Log("log"); CurrentPose = new PoseFrame { voiceAmplitude = voiceAmplitude, headPosition = centerEyePosition, headRotation = centerEyeRotation, handLeftPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch), handLeftRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch), handRightPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch), handRightRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch), controllerLeftPose = GetControllerPose(OVRInput.Controller.LTouch), controllerRightPose = GetControllerPose(OVRInput.Controller.RTouch), }; } }
private void UpdateLeftAndRightEyeAnchorIfNeedIt(bool hmdPresent, bool monoscopic) { if (!hmdPresent || monoscopic) { leftEyeAnchor.localPosition = centerEyeAnchor.localPosition; rightEyeAnchor.localPosition = centerEyeAnchor.localPosition; leftEyeAnchor.localRotation = centerEyeAnchor.localRotation; rightEyeAnchor.localRotation = centerEyeAnchor.localRotation; } else { Vector3 leftEyePosition = Vector3.zero; Vector3 rightEyePosition = Vector3.zero; Quaternion leftEyeRotation = Quaternion.identity; Quaternion rightEyeRotation = Quaternion.identity; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyePosition)) { leftEyeAnchor.localPosition = leftEyePosition; } if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyePosition)) { rightEyeAnchor.localPosition = rightEyePosition; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyeRotation)) { leftEyeAnchor.localRotation = leftEyeRotation; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyeRotation)) { rightEyeAnchor.localRotation = rightEyeRotation; } } }
private Vector3 PollVInput(Node node, OVRPlugin.Node ovrNode, NodeStatePropertyType type) { Vector3 value; OVRNodeStateProperties.GetNodeStatePropertyVector3(node, type, ovrNode, OVRPlugin.Step.Render, out value); return(value); }
private void UpdateHandAnchorsIfNeedIt(bool updateHandAnchors, OVRPose tracker) { if (updateHandAnchors && HandView.IsMine) { //Need this for controller offset because if we're on OpenVR, we want to set the local poses as specified by Unity, but if we're not, OVRInput local position is the right anchor if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR) { Vector3 leftPos = Vector3.zero; Vector3 rightPos = Vector3.zero; Quaternion leftQuat = Quaternion.identity; Quaternion rightQuat = Quaternion.identity; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftPos)) { leftHandAnchor.localPosition = leftPos; } if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightPos)) { rightHandAnchor.localPosition = rightPos; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftQuat)) { leftHandAnchor.localRotation = leftQuat; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightQuat)) { rightHandAnchor.localRotation = rightQuat; } } else { leftHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch); rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch); leftHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch); rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch); } trackerAnchor.localPosition = tracker.position; OVRPose leftOffsetPose = OVRPose.identity; OVRPose rightOffsetPose = OVRPose.identity; if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR) { GetOpenVRControllerOffset(Node.LeftHand, HandView, ref leftOffsetPose); GetOpenVRControllerOffset(Node.RightHand, HandView, ref rightOffsetPose); //Sets poses of left and right nodes, local to the tracking space. OVRManager.SetOpenVRLocalPose(trackingSpace.InverseTransformPoint(leftControllerAnchor.position), trackingSpace.InverseTransformPoint(rightControllerAnchor.position), Quaternion.Inverse(trackingSpace.rotation) * leftControllerAnchor.rotation, Quaternion.Inverse(trackingSpace.rotation) * rightControllerAnchor.rotation); } rightControllerAnchor.localPosition = rightOffsetPose.position; rightControllerAnchor.localRotation = rightOffsetPose.orientation; leftControllerAnchor.localPosition = leftOffsetPose.position; leftControllerAnchor.localRotation = leftOffsetPose.orientation; } }
/// <summary> /// Converts the given world-space transform to an OVRPose in tracking space. /// </summary> public static OVRPose ToTrackingSpacePose(this Transform transform, Camera camera) { //Initializing to identity, but for all Oculus headsets, down below the pose will be initialized to the runtime's pose value, so identity will never be returned. OVRPose headPose = OVRPose.identity; Vector3 pos; Quaternion rot; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos)) { headPose.position = pos; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head, NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot)) { headPose.orientation = rot; } var ret = headPose * transform.ToHeadSpacePose(camera); return(ret); }
private void UpdateCenterEyeAnchorIfNeedIt(bool hmdPresent, Quaternion emulatedRotation) { if (hmdPresent) { Vector3 centerEyePosition = Vector3.zero; Quaternion centerEyeRotation = Quaternion.identity; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.CenterEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyePosition)) { centerEyeAnchor.localPosition = centerEyePosition; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.CenterEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyeRotation)) { centerEyeAnchor.localRotation = centerEyeRotation; } } else { centerEyeAnchor.localRotation = emulatedRotation; centerEyeAnchor.localPosition = OVRManager.instance.headPoseRelativeOffsetTranslation; } }
/// <summary> /// Converts the given pose from tracking-space to world-space. /// </summary> public static OVRPose ToWorldSpacePose(OVRPose trackingSpacePose) { OVRPose headPose = OVRPose.identity; Vector3 pos; Quaternion rot; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos)) { headPose.position = pos; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head, NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot)) { headPose.orientation = rot; } // Transform from tracking-Space to head-Space OVRPose poseInHeadSpace = headPose.Inverse() * trackingSpacePose; // Transform from head space to world space OVRPose ret = Camera.main.transform.ToOVRPose() * poseInHeadSpace; return(ret); }
protected virtual void UpdateAnchors() { EnsureGameObjectIntegrity(); if (!Application.isPlaying) { return; } if (_skipUpdate) { centerEyeAnchor.FromOVRPose(OVRPose.identity, true); leftEyeAnchor.FromOVRPose(OVRPose.identity, true); rightEyeAnchor.FromOVRPose(OVRPose.identity, true); return; } bool monoscopic = OVRManager.instance.monoscopic; bool hmdPresent = OVRNodeStateProperties.IsHmdPresent(); OVRPose tracker = OVRManager.tracker.GetPose(); trackerAnchor.localRotation = tracker.orientation; Quaternion emulatedRotation = Quaternion.Euler(-OVRManager.instance.headPoseRelativeOffsetRotation.x, -OVRManager.instance.headPoseRelativeOffsetRotation.y, OVRManager.instance.headPoseRelativeOffsetRotation.z); //Note: in the below code, when using UnityEngine's API, we only update anchor transforms if we have a new, fresh value this frame. //If we don't, it could mean that tracking is lost, etc. so the pose should not change in the virtual world. //This can be thought of as similar to calling InputTracking GetLocalPosition and Rotation, but only for doing so when the pose is valid. //If false is returned for any of these calls, then a new pose is not valid and thus should not be updated. if (hmdPresent) { Vector3 centerEyePosition = Vector3.zero; Quaternion centerEyeRotation = Quaternion.identity; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.CenterEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyePosition)) { centerEyeAnchor.localPosition = centerEyePosition; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.CenterEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyeRotation)) { centerEyeAnchor.localRotation = centerEyeRotation; } } else { centerEyeAnchor.localRotation = emulatedRotation; centerEyeAnchor.localPosition = OVRManager.instance.headPoseRelativeOffsetTranslation; } if (!hmdPresent || monoscopic) { leftEyeAnchor.localPosition = centerEyeAnchor.localPosition; rightEyeAnchor.localPosition = centerEyeAnchor.localPosition; leftEyeAnchor.localRotation = centerEyeAnchor.localRotation; rightEyeAnchor.localRotation = centerEyeAnchor.localRotation; } else { Vector3 leftEyePosition = Vector3.zero; Vector3 rightEyePosition = Vector3.zero; Quaternion leftEyeRotation = Quaternion.identity; Quaternion rightEyeRotation = Quaternion.identity; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyePosition)) { leftEyeAnchor.localPosition = leftEyePosition; } if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyePosition)) { rightEyeAnchor.localPosition = rightEyePosition; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyeRotation)) { leftEyeAnchor.localRotation = leftEyeRotation; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyeRotation)) { rightEyeAnchor.localRotation = rightEyeRotation; } } Vector3 offset = new Vector3(0f, -2f, 0.5f); //Need this for controller offset because if we're on OpenVR, we want to set the local poses as specified by Unity, but if we're not, OVRInput local position is the right anchor if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR) { Vector3 leftPos = Vector3.zero; Vector3 rightPos = Vector3.zero; Quaternion leftQuat = Quaternion.identity; Quaternion rightQuat = Quaternion.identity; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftPos)) { leftHandAnchor.localPosition = leftPos; } if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightPos)) { rightHandAnchor.localPosition = rightPos + offset; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftQuat)) { leftHandAnchor.localRotation = leftQuat; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightQuat)) { rightHandAnchor.localRotation = rightQuat; } } else { leftHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch); rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch) + offset; leftHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch); rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch); } trackerAnchor.localPosition = tracker.position; OVRPose leftOffsetPose = OVRPose.identity; OVRPose rightOffsetPose = OVRPose.identity; if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR) { leftOffsetPose = OVRManager.GetOpenVRControllerOffset(Node.LeftHand); rightOffsetPose = OVRManager.GetOpenVRControllerOffset(Node.RightHand); //Sets poses of left and right nodes, local to the tracking space. OVRManager.SetOpenVRLocalPose(trackingSpace.InverseTransformPoint(leftControllerAnchor.position), trackingSpace.InverseTransformPoint(rightControllerAnchor.position), Quaternion.Inverse(trackingSpace.rotation) * leftControllerAnchor.rotation, Quaternion.Inverse(trackingSpace.rotation) * rightControllerAnchor.rotation); } rightControllerAnchor.localPosition = rightOffsetPose.position; rightControllerAnchor.localRotation = rightOffsetPose.orientation; leftControllerAnchor.localPosition = leftOffsetPose.position; leftControllerAnchor.localRotation = leftOffsetPose.orientation; RaiseUpdatedAnchorsEvent(); }
protected virtual void UpdateAnchors(bool updateEyeAnchors, bool updateHandAnchors) { if (!OVRManager.OVRManagerinitialized) { return; } ovrInitialized = true; EnsureGameObjectIntegrity(); if (!Application.isPlaying) { return; } if (_skipUpdate) { centerEyeAnchor.FromOVRPose(OVRPose.identity, true); leftEyeAnchor.FromOVRPose(OVRPose.identity, true); rightEyeAnchor.FromOVRPose(OVRPose.identity, true); return; } monoscopic = OVRManager.instance.monoscopic; hmdPresent = OVRNodeStateProperties.IsHmdPresent(); if (OVRInput.GetDown(OVRInput.RawButton.A) || Input.GetKeyDown(KeyCode.I)) { Mode = mode0g.Inertial; } if (OVRInput.GetDown(OVRInput.RawButton.B) || Input.GetKeyDown(KeyCode.N)) { Mode = mode0g.OculusNative; } if (OVRInput.GetDown(OVRInput.RawButton.X) || Input.GetKeyDown(KeyCode.F)) { Mode = mode0g.HeadFixed; } if (OVRInput.GetDown(OVRInput.RawButton.Y) || Input.GetKeyDown(KeyCode.Alpha1)) { Mode = mode0g.OculusOneToOne; } if (OVRInput.GetDown(OVRInput.RawButton.LIndexTrigger) || OVRInput.GetDown(OVRInput.RawButton.RIndexTrigger) || Input.GetKeyDown(KeyCode.Space)) { spin = 0.0f; inertial = OVRPose.identity; } OVRPose tracker = OVRManager.tracker.GetPose(); trackerAnchor.localRotation = tracker.orientation; Quaternion emulatedRotation = Quaternion.Euler(-OVRManager.instance.headPoseRelativeOffsetRotation.x, -OVRManager.instance.headPoseRelativeOffsetRotation.y, OVRManager.instance.headPoseRelativeOffsetRotation.z); // Integrate the velocity tracker = OVRManager.tracker.GetPose(); trackerAnchor.localRotation = tracker.orientation; angularVelocity = ovrDisplay.angularVelocity; float current_time = Time.time; deltaT = current_time - previous_instant; Quaternion qVelocity; qVelocity.x = -angularVelocity.x; qVelocity.y = -angularVelocity.y; qVelocity.z = -angularVelocity.z; qVelocity.w = 0.0f; Quaternion ovrCenterEyeRotation = Quaternion.identity; OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.CenterEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out ovrCenterEyeRotation); Quaternion rate = Quaternion.Inverse(ovrCenterEyeRotation) * qVelocity * ovrCenterEyeRotation; // Some stuff for debugging //mVelocity.x += qVelocity.x; //mVelocity.y += qVelocity.y; //mVelocity.z += qVelocity.z; //mRate.x += rate.x; //mRate.y += rate.y; //mRate.z += rate.z; //sampleCount++; //if ( sampleCount >= 40.0f ) //{ // mRate = mRate / sampleCount; // mVelocity = mVelocity / sampleCount; // Debug.Log( mVelocity.ToString() + " " + mRate.ToString() + " " + centerEyeRotation.ToString() ); // sampleCount = 0.0f; // mRate = Vector3.zero; // mVelocity = Vector3.zero; //} Quaternion dQ; dQ.x = rate.x * deltaT / 2.0f; dQ.y = rate.y * deltaT / 2.0f; dQ.z = rate.z * deltaT / 2.0f; dQ.w = Mathf.Sqrt(1.0f - dQ.x * dQ.x - dQ.y * dQ.y - dQ.z * dQ.z); Quaternion newOrientation = inertial.orientation * dQ; inertial.orientation = newOrientation; previous_instant = current_time; if (Mode != mode0g.OculusNative) { if (Mode == mode0g.OculusAmplified || Mode == mode0g.OculusOneToOne) { Vector3 centerEyePosition = Vector3.zero; Quaternion centerEyeRotation = Quaternion.identity; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.CenterEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyePosition)) { tracker.position = centerEyePosition; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.CenterEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyeRotation)) { tracker.orientation = centerEyeRotation; } tracker.orientation = centerEyeRotation; if (Mode == mode0g.OculusAmplified) { tracker.orientation = centerEyeRotation * centerEyeRotation; } else { tracker.orientation = centerEyeRotation; } } else if (Mode == mode0g.HeadBob) { tracker.orientation = Quaternion.Euler(15.0f * Mathf.Sin(0.20f * spin), 30.0f * Mathf.Sin(0.1f * spin), 0.0f); spin += 0.1f; } else if (Mode == mode0g.Inertial) { tracker.orientation = inertial.orientation; } else { tracker.orientation = Quaternion.Euler(0.0f, 0.0f, 0.0f); } if (hmdPresent) { trackerAnchor.localRotation = tracker.orientation; trackerAnchor.localPosition = tracker.position; } else { trackerAnchor.localRotation = emulatedRotation; trackerAnchor.localPosition = tracker.position; } //Note: in the below code, when using UnityEngine's API, we only update anchor transforms if we have a new, fresh value this frame. //If we don't, it could mean that tracking is lost, etc. so the pose should not change in the virtual world. //This can be thought of as similar to calling InputTracking GetLocalPosition and Rotation, but only for doing so when the pose is valid. //If false is returned for any of these calls, then a new pose is not valid and thus should not be updated. if (updateEyeAnchors) { // PsyPhy - Need to compute the left and right eye poses. // For now it is monoscopic. centerEyeAnchor.localPosition = trackerAnchor.localPosition; centerEyeAnchor.localRotation = trackerAnchor.localRotation; leftEyeAnchor.localPosition = trackerAnchor.localPosition; leftEyeAnchor.localRotation = trackerAnchor.localRotation; rightEyeAnchor.localPosition = trackerAnchor.localPosition; rightEyeAnchor.localRotation = trackerAnchor.localRotation; } } else { //Note: in the below code, when using UnityEngine's API, we only update anchor transforms if we have a new, fresh value this frame. //If we don't, it could mean that tracking is lost, etc. so the pose should not change in the virtual world. //This can be thought of as similar to calling InputTracking GetLocalPosition and Rotation, but only for doing so when the pose is valid. //If false is returned for any of these calls, then a new pose is not valid and thus should not be updated. if (updateEyeAnchors) { if (hmdPresent) { Vector3 centerEyePosition = Vector3.zero; Quaternion centerEyeRotation = Quaternion.identity; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.CenterEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyePosition)) { centerEyeAnchor.localPosition = centerEyePosition; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.CenterEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyeRotation)) { centerEyeAnchor.localRotation = centerEyeRotation; } } else { centerEyeAnchor.localRotation = emulatedRotation; centerEyeAnchor.localPosition = OVRManager.instance.headPoseRelativeOffsetTranslation; } if (!hmdPresent || monoscopic) { leftEyeAnchor.localPosition = centerEyeAnchor.localPosition; rightEyeAnchor.localPosition = centerEyeAnchor.localPosition; leftEyeAnchor.localRotation = centerEyeAnchor.localRotation; rightEyeAnchor.localRotation = centerEyeAnchor.localRotation; } else { Vector3 leftEyePosition = Vector3.zero; Vector3 rightEyePosition = Vector3.zero; Quaternion leftEyeRotation = Quaternion.identity; Quaternion rightEyeRotation = Quaternion.identity; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyePosition)) { leftEyeAnchor.localPosition = leftEyePosition; } if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyePosition)) { rightEyeAnchor.localPosition = rightEyePosition; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyeRotation)) { leftEyeAnchor.localRotation = leftEyeRotation; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyeRotation)) { rightEyeAnchor.localRotation = rightEyeRotation; } } } } if (updateHandAnchors) { //Need this for controller offset because if we're on OpenVR, we want to set the local poses as specified by Unity, but if we're not, OVRInput local position is the right anchor if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR) { Vector3 leftPos = Vector3.zero; Vector3 rightPos = Vector3.zero; Quaternion leftQuat = Quaternion.identity; Quaternion rightQuat = Quaternion.identity; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftPos)) { leftHandAnchor.localPosition = leftPos; } if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightPos)) { rightHandAnchor.localPosition = rightPos; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftQuat)) { leftHandAnchor.localRotation = leftQuat; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightQuat)) { rightHandAnchor.localRotation = rightQuat; } } else { leftHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch); rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch); leftHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch); rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch); } trackerAnchor.localPosition = tracker.position; OVRPose leftOffsetPose = OVRPose.identity; OVRPose rightOffsetPose = OVRPose.identity; if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR) { leftOffsetPose = OVRManager.GetOpenVRControllerOffset(Node.LeftHand); rightOffsetPose = OVRManager.GetOpenVRControllerOffset(Node.RightHand); //Sets poses of left and right nodes, local to the tracking space. OVRManager.SetOpenVRLocalPose(trackingSpace.InverseTransformPoint(leftControllerAnchor.position), trackingSpace.InverseTransformPoint(rightControllerAnchor.position), Quaternion.Inverse(trackingSpace.rotation) * leftControllerAnchor.rotation, Quaternion.Inverse(trackingSpace.rotation) * rightControllerAnchor.rotation); } rightControllerAnchor.localPosition = rightOffsetPose.position; rightControllerAnchor.localRotation = rightOffsetPose.orientation; leftControllerAnchor.localPosition = leftOffsetPose.position; leftControllerAnchor.localRotation = leftOffsetPose.orientation; } RaiseUpdatedAnchorsEvent(); }
void Update() { if (!calibratedCameraPose.HasValue) { if (!OVRPlugin.Media.GetInitialized()) { return; } OVRPlugin.CameraIntrinsics cameraIntrinsics; OVRPlugin.CameraExtrinsics cameraExtrinsics; if (OVRPlugin.GetMixedRealityCameraInfo(0, out cameraExtrinsics, out cameraIntrinsics)) { calibratedCameraPose = cameraExtrinsics.RelativePose.ToOVRPose(); } else { return; } } OVRPose cameraStagePoseInUnits = calibratedCameraPose.Value; // Converting position from meters to decimeters (unit used by Open Brush) cameraStagePoseInUnits.position *= App.METERS_TO_UNITS; // Workaround to fix the OVRExtensions.ToWorldSpacePose() and // OVRComposition.ComputeCameraWorldSpacePose() calls when computing // the Mixed Reality foreground and background camera positions. OVRPose headPose = OVRPose.identity; Vector3 pos; Quaternion rot; if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos)) { headPose.position = pos; } if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head, NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot)) { headPose.orientation = rot; } OVRPose headPoseInUnits = OVRPose.identity; headPoseInUnits.position = headPose.position * App.METERS_TO_UNITS; headPoseInUnits.orientation = headPose.orientation; OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose( OVRPlugin.TrackingOrigin.Stage).ToOVRPose(); OVRPose stageToLocalPoseInUnits = OVRPose.identity; stageToLocalPoseInUnits.position = stageToLocalPose.position * App.METERS_TO_UNITS; stageToLocalPoseInUnits.orientation = stageToLocalPose.orientation; OVRPose cameraWorldPoseInUnits = headPoseInUnits.Inverse() * stageToLocalPoseInUnits * cameraStagePoseInUnits; OVRPose cameraStagePoseFix = stageToLocalPose.Inverse() * headPose * cameraWorldPoseInUnits; // Override the MRC camera's stage pose OVRPlugin.OverrideExternalCameraStaticPose(0, true, cameraStagePoseFix.ToPosef()); }