Esempio n. 1
0
    protected virtual void UpdateAnchors()
    {
        EnsureGameObjectIntegrity();

        if (!Application.isPlaying)
        {
            return;
        }

        if (_skipUpdate)
        {
            centerEyeAnchor.FromOVRPose(OVRPose.identity, true);
            leftEyeAnchor.FromOVRPose(OVRPose.identity, true);
            rightEyeAnchor.FromOVRPose(OVRPose.identity, true);

            return;
        }

        bool monoscopic = OVRManager.instance.monoscopic;
        bool hmdPresent = OVRNodeStateProperties.IsHmdPresent();

        OVRPose tracker = OVRManager.tracker.GetPose();

        trackerAnchor.localRotation = tracker.orientation;

        Quaternion emulatedRotation = Quaternion.Euler(-OVRManager.instance.headPoseRelativeOffsetRotation.x, -OVRManager.instance.headPoseRelativeOffsetRotation.y, OVRManager.instance.headPoseRelativeOffsetRotation.z);

        centerEyeAnchor.localRotation = hmdPresent ? InputTracking.GetLocalRotation(Node.CenterEye) : emulatedRotation;
        leftEyeAnchor.localRotation   = (!hmdPresent || monoscopic) ? centerEyeAnchor.localRotation : InputTracking.GetLocalRotation(Node.LeftEye);
        rightEyeAnchor.localRotation  = (!hmdPresent || monoscopic) ? centerEyeAnchor.localRotation : InputTracking.GetLocalRotation(Node.RightEye);

        leftHandAnchor.localRotation  = InputTracking.GetLocalRotation(Node.LeftHand);
        rightHandAnchor.localRotation = InputTracking.GetLocalRotation(Node.RightHand);

        trackerAnchor.localPosition = tracker.position;

        centerEyeAnchor.localPosition = hmdPresent ? InputTracking.GetLocalPosition(Node.CenterEye) : OVRManager.instance.headPoseRelativeOffsetTranslation;
        leftEyeAnchor.localPosition   = (!hmdPresent || monoscopic) ? centerEyeAnchor.localPosition : InputTracking.GetLocalPosition(Node.LeftEye);
        rightEyeAnchor.localPosition  = (!hmdPresent || monoscopic) ? centerEyeAnchor.localPosition : InputTracking.GetLocalPosition(Node.RightEye);

        leftHandAnchor.localPosition  = InputTracking.GetLocalPosition(Node.LeftHand);
        rightHandAnchor.localPosition = InputTracking.GetLocalPosition(Node.RightHand);

        RaiseUpdatedAnchorsEvent();
    }
    protected override void UpdateAnchors(bool updateEyeAnchors, bool updateHandAnchors)
    {
        if (!OVRManager.OVRManagerinitialized)
        {
            return;
        }

        EnsureGameObjectIntegrity();

        if (!Application.isPlaying)
        {
            return;
        }

        if (_skipUpdate)
        {
            centerEyeAnchor.FromOVRPose(OVRPose.identity, true);
            leftEyeAnchor.FromOVRPose(OVRPose.identity, true);
            rightEyeAnchor.FromOVRPose(OVRPose.identity, true);

            return;
        }

        bool monoscopic = OVRManager.instance.monoscopic;
        bool hmdPresent = OVRNodeStateProperties.IsHmdPresent();

        OVRPose tracker = OVRManager.tracker.GetPose();

        trackerAnchor.localRotation = tracker.orientation;

        Quaternion emulatedRotation = Quaternion.Euler(-OVRManager.instance.headPoseRelativeOffsetRotation.x, -OVRManager.instance.headPoseRelativeOffsetRotation.y, OVRManager.instance.headPoseRelativeOffsetRotation.z);

        //Note: in the below code, when using UnityEngine's API, we only update anchor transforms if we have a new, fresh value this frame.
        //If we don't, it could mean that tracking is lost, etc. so the pose should not change in the virtual world.
        //This can be thought of as similar to calling InputTracking GetLocalPosition and Rotation, but only for doing so when the pose is valid.
        //If false is returned for any of these calls, then a new pose is not valid and thus should not be updated.
        UpdateEyeAnchorsIfNeedIt(updateEyeAnchors, hmdPresent, emulatedRotation, monoscopic);

        UpdateHandAnchorsIfNeedIt(updateHandAnchors, tracker);

        RaiseUpdatedAnchorsEvent();
    }
    protected virtual void UpdateAnchors()
    {
        EnsureGameObjectIntegrity();

        if (!Application.isPlaying)
        {
            return;
        }

        if (_skipUpdate)
        {
            centerEyeAnchor.FromOVRPose(OVRPose.identity, true);
            leftEyeAnchor.FromOVRPose(OVRPose.identity, true);
            rightEyeAnchor.FromOVRPose(OVRPose.identity, true);

            return;
        }

        bool monoscopic = OVRManager.instance.monoscopic;
        bool hmdPresent = OVRNodeStateProperties.IsHmdPresent();

        OVRPose tracker = OVRManager.tracker.GetPose();

        trackerAnchor.localRotation = tracker.orientation;

        Quaternion emulatedRotation = Quaternion.Euler(-OVRManager.instance.headPoseRelativeOffsetRotation.x, -OVRManager.instance.headPoseRelativeOffsetRotation.y, OVRManager.instance.headPoseRelativeOffsetRotation.z);

        //Note: in the below code, when using UnityEngine's API, we only update anchor transforms if we have a new, fresh value this frame.
        //If we don't, it could mean that tracking is lost, etc. so the pose should not change in the virtual world.
        //This can be thought of as similar to calling InputTracking GetLocalPosition and Rotation, but only for doing so when the pose is valid.
        //If false is returned for any of these calls, then a new pose is not valid and thus should not be updated.

        if (hmdPresent)
        {
            Vector3    centerEyePosition = Vector3.zero;
            Quaternion centerEyeRotation = Quaternion.identity;

            if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.CenterEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyePosition))
            {
                centerEyeAnchor.localPosition = centerEyePosition;
            }
            if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.CenterEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyeRotation))
            {
                centerEyeAnchor.localRotation = centerEyeRotation;
            }
        }
        else
        {
            centerEyeAnchor.localRotation = emulatedRotation;
            centerEyeAnchor.localPosition = OVRManager.instance.headPoseRelativeOffsetTranslation;
        }

        if (!hmdPresent || monoscopic)
        {
            leftEyeAnchor.localPosition  = centerEyeAnchor.localPosition;
            rightEyeAnchor.localPosition = centerEyeAnchor.localPosition;
            leftEyeAnchor.localRotation  = centerEyeAnchor.localRotation;
            rightEyeAnchor.localRotation = centerEyeAnchor.localRotation;
        }
        else
        {
            Vector3    leftEyePosition  = Vector3.zero;
            Vector3    rightEyePosition = Vector3.zero;
            Quaternion leftEyeRotation  = Quaternion.identity;
            Quaternion rightEyeRotation = Quaternion.identity;

            if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyePosition))
            {
                leftEyeAnchor.localPosition = leftEyePosition;
            }
            if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyePosition))
            {
                rightEyeAnchor.localPosition = rightEyePosition;
            }
            if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyeRotation))
            {
                leftEyeAnchor.localRotation = leftEyeRotation;
            }
            if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyeRotation))
            {
                rightEyeAnchor.localRotation = rightEyeRotation;
            }
        }

        Vector3 offset = new Vector3(0f, -2f, 0.5f);

        //Need this for controller offset because if we're on OpenVR, we want to set the local poses as specified by Unity, but if we're not, OVRInput local position is the right anchor
        if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
        {
            Vector3    leftPos   = Vector3.zero;
            Vector3    rightPos  = Vector3.zero;
            Quaternion leftQuat  = Quaternion.identity;
            Quaternion rightQuat = Quaternion.identity;


            if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftPos))
            {
                leftHandAnchor.localPosition = leftPos;
            }
            if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightPos))
            {
                rightHandAnchor.localPosition = rightPos + offset;
            }
            if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftQuat))
            {
                leftHandAnchor.localRotation = leftQuat;
            }
            if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightQuat))
            {
                rightHandAnchor.localRotation = rightQuat;
            }
        }
        else
        {
            leftHandAnchor.localPosition  = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch);
            rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch) + offset;
            leftHandAnchor.localRotation  = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch);
            rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch);
        }

        trackerAnchor.localPosition = tracker.position;

        OVRPose leftOffsetPose  = OVRPose.identity;
        OVRPose rightOffsetPose = OVRPose.identity;

        if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
        {
            leftOffsetPose  = OVRManager.GetOpenVRControllerOffset(Node.LeftHand);
            rightOffsetPose = OVRManager.GetOpenVRControllerOffset(Node.RightHand);

            //Sets poses of left and right nodes, local to the tracking space.
            OVRManager.SetOpenVRLocalPose(trackingSpace.InverseTransformPoint(leftControllerAnchor.position),
                                          trackingSpace.InverseTransformPoint(rightControllerAnchor.position),
                                          Quaternion.Inverse(trackingSpace.rotation) * leftControllerAnchor.rotation,
                                          Quaternion.Inverse(trackingSpace.rotation) * rightControllerAnchor.rotation);
        }
        rightControllerAnchor.localPosition = rightOffsetPose.position;
        rightControllerAnchor.localRotation = rightOffsetPose.orientation;
        leftControllerAnchor.localPosition  = leftOffsetPose.position;
        leftControllerAnchor.localRotation  = leftOffsetPose.orientation;

        RaiseUpdatedAnchorsEvent();
    }
Esempio n. 4
0
    protected virtual void UpdateAnchors(bool updateEyeAnchors, bool updateHandAnchors)
    {
        if (!OVRManager.OVRManagerinitialized)
        {
            return;
        }
        ovrInitialized = true;

        EnsureGameObjectIntegrity();

        if (!Application.isPlaying)
        {
            return;
        }

        if (_skipUpdate)
        {
            centerEyeAnchor.FromOVRPose(OVRPose.identity, true);
            leftEyeAnchor.FromOVRPose(OVRPose.identity, true);
            rightEyeAnchor.FromOVRPose(OVRPose.identity, true);

            return;
        }

        monoscopic = OVRManager.instance.monoscopic;
        hmdPresent = OVRNodeStateProperties.IsHmdPresent();

        if (OVRInput.GetDown(OVRInput.RawButton.A) || Input.GetKeyDown(KeyCode.I))
        {
            Mode = mode0g.Inertial;
        }
        if (OVRInput.GetDown(OVRInput.RawButton.B) || Input.GetKeyDown(KeyCode.N))
        {
            Mode = mode0g.OculusNative;
        }
        if (OVRInput.GetDown(OVRInput.RawButton.X) || Input.GetKeyDown(KeyCode.F))
        {
            Mode = mode0g.HeadFixed;
        }
        if (OVRInput.GetDown(OVRInput.RawButton.Y) || Input.GetKeyDown(KeyCode.Alpha1))
        {
            Mode = mode0g.OculusOneToOne;
        }
        if (OVRInput.GetDown(OVRInput.RawButton.LIndexTrigger) || OVRInput.GetDown(OVRInput.RawButton.RIndexTrigger) || Input.GetKeyDown(KeyCode.Space))
        {
            spin     = 0.0f;
            inertial = OVRPose.identity;
        }

        OVRPose tracker = OVRManager.tracker.GetPose();

        trackerAnchor.localRotation = tracker.orientation;
        Quaternion emulatedRotation = Quaternion.Euler(-OVRManager.instance.headPoseRelativeOffsetRotation.x, -OVRManager.instance.headPoseRelativeOffsetRotation.y, OVRManager.instance.headPoseRelativeOffsetRotation.z);

        // Integrate the velocity
        tracker = OVRManager.tracker.GetPose();
        trackerAnchor.localRotation = tracker.orientation;

        angularVelocity = ovrDisplay.angularVelocity;
        float current_time = Time.time;

        deltaT = current_time - previous_instant;

        Quaternion qVelocity;

        qVelocity.x = -angularVelocity.x;
        qVelocity.y = -angularVelocity.y;
        qVelocity.z = -angularVelocity.z;
        qVelocity.w = 0.0f;

        Quaternion ovrCenterEyeRotation = Quaternion.identity;

        OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.CenterEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out ovrCenterEyeRotation);
        Quaternion rate = Quaternion.Inverse(ovrCenterEyeRotation) * qVelocity * ovrCenterEyeRotation;

        // Some stuff for debugging
        //mVelocity.x += qVelocity.x;
        //mVelocity.y += qVelocity.y;
        //mVelocity.z += qVelocity.z;
        //mRate.x += rate.x;
        //mRate.y += rate.y;
        //mRate.z += rate.z;

        //sampleCount++;
        //if ( sampleCount >= 40.0f )
        //{
        //    mRate = mRate / sampleCount;
        //    mVelocity = mVelocity / sampleCount;
        //    Debug.Log( mVelocity.ToString() + " " + mRate.ToString() + " " + centerEyeRotation.ToString() );
        //    sampleCount = 0.0f;
        //    mRate = Vector3.zero;
        //    mVelocity = Vector3.zero;

        //}

        Quaternion dQ;

        dQ.x = rate.x * deltaT / 2.0f;
        dQ.y = rate.y * deltaT / 2.0f;
        dQ.z = rate.z * deltaT / 2.0f;
        dQ.w = Mathf.Sqrt(1.0f - dQ.x * dQ.x - dQ.y * dQ.y - dQ.z * dQ.z);

        Quaternion newOrientation = inertial.orientation * dQ;

        inertial.orientation = newOrientation;
        previous_instant     = current_time;

        if (Mode != mode0g.OculusNative)
        {
            if (Mode == mode0g.OculusAmplified || Mode == mode0g.OculusOneToOne)
            {
                Vector3    centerEyePosition = Vector3.zero;
                Quaternion centerEyeRotation = Quaternion.identity;
                if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.CenterEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyePosition))
                {
                    tracker.position = centerEyePosition;
                }
                if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.CenterEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyeRotation))
                {
                    tracker.orientation = centerEyeRotation;
                }

                tracker.orientation = centerEyeRotation;
                if (Mode == mode0g.OculusAmplified)
                {
                    tracker.orientation = centerEyeRotation * centerEyeRotation;
                }
                else
                {
                    tracker.orientation = centerEyeRotation;
                }
            }
            else if (Mode == mode0g.HeadBob)
            {
                tracker.orientation = Quaternion.Euler(15.0f * Mathf.Sin(0.20f * spin), 30.0f * Mathf.Sin(0.1f * spin), 0.0f);
                spin += 0.1f;
            }
            else if (Mode == mode0g.Inertial)
            {
                tracker.orientation = inertial.orientation;
            }
            else
            {
                tracker.orientation = Quaternion.Euler(0.0f, 0.0f, 0.0f);
            }

            if (hmdPresent)
            {
                trackerAnchor.localRotation = tracker.orientation;
                trackerAnchor.localPosition = tracker.position;
            }
            else
            {
                trackerAnchor.localRotation = emulatedRotation;
                trackerAnchor.localPosition = tracker.position;
            }

            //Note: in the below code, when using UnityEngine's API, we only update anchor transforms if we have a new, fresh value this frame.
            //If we don't, it could mean that tracking is lost, etc. so the pose should not change in the virtual world.
            //This can be thought of as similar to calling InputTracking GetLocalPosition and Rotation, but only for doing so when the pose is valid.
            //If false is returned for any of these calls, then a new pose is not valid and thus should not be updated.
            if (updateEyeAnchors)
            {
                // PsyPhy - Need to compute the left and right eye poses.
                // For now it is monoscopic.
                centerEyeAnchor.localPosition = trackerAnchor.localPosition;
                centerEyeAnchor.localRotation = trackerAnchor.localRotation;
                leftEyeAnchor.localPosition   = trackerAnchor.localPosition;
                leftEyeAnchor.localRotation   = trackerAnchor.localRotation;
                rightEyeAnchor.localPosition  = trackerAnchor.localPosition;
                rightEyeAnchor.localRotation  = trackerAnchor.localRotation;
            }
        }
        else
        {
            //Note: in the below code, when using UnityEngine's API, we only update anchor transforms if we have a new, fresh value this frame.
            //If we don't, it could mean that tracking is lost, etc. so the pose should not change in the virtual world.
            //This can be thought of as similar to calling InputTracking GetLocalPosition and Rotation, but only for doing so when the pose is valid.
            //If false is returned for any of these calls, then a new pose is not valid and thus should not be updated.
            if (updateEyeAnchors)
            {
                if (hmdPresent)
                {
                    Vector3    centerEyePosition = Vector3.zero;
                    Quaternion centerEyeRotation = Quaternion.identity;

                    if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.CenterEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyePosition))
                    {
                        centerEyeAnchor.localPosition = centerEyePosition;
                    }
                    if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.CenterEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyeRotation))
                    {
                        centerEyeAnchor.localRotation = centerEyeRotation;
                    }
                }
                else
                {
                    centerEyeAnchor.localRotation = emulatedRotation;
                    centerEyeAnchor.localPosition = OVRManager.instance.headPoseRelativeOffsetTranslation;
                }

                if (!hmdPresent || monoscopic)
                {
                    leftEyeAnchor.localPosition  = centerEyeAnchor.localPosition;
                    rightEyeAnchor.localPosition = centerEyeAnchor.localPosition;
                    leftEyeAnchor.localRotation  = centerEyeAnchor.localRotation;
                    rightEyeAnchor.localRotation = centerEyeAnchor.localRotation;
                }
                else
                {
                    Vector3    leftEyePosition  = Vector3.zero;
                    Vector3    rightEyePosition = Vector3.zero;
                    Quaternion leftEyeRotation  = Quaternion.identity;
                    Quaternion rightEyeRotation = Quaternion.identity;

                    if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyePosition))
                    {
                        leftEyeAnchor.localPosition = leftEyePosition;
                    }
                    if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyePosition))
                    {
                        rightEyeAnchor.localPosition = rightEyePosition;
                    }
                    if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyeRotation))
                    {
                        leftEyeAnchor.localRotation = leftEyeRotation;
                    }
                    if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyeRotation))
                    {
                        rightEyeAnchor.localRotation = rightEyeRotation;
                    }
                }
            }
        }

        if (updateHandAnchors)
        {
            //Need this for controller offset because if we're on OpenVR, we want to set the local poses as specified by Unity, but if we're not, OVRInput local position is the right anchor
            if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
            {
                Vector3    leftPos   = Vector3.zero;
                Vector3    rightPos  = Vector3.zero;
                Quaternion leftQuat  = Quaternion.identity;
                Quaternion rightQuat = Quaternion.identity;

                if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftPos))
                {
                    leftHandAnchor.localPosition = leftPos;
                }
                if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightPos))
                {
                    rightHandAnchor.localPosition = rightPos;
                }
                if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftQuat))
                {
                    leftHandAnchor.localRotation = leftQuat;
                }
                if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightQuat))
                {
                    rightHandAnchor.localRotation = rightQuat;
                }
            }
            else
            {
                leftHandAnchor.localPosition  = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch);
                rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch);
                leftHandAnchor.localRotation  = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch);
                rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch);
            }

            trackerAnchor.localPosition = tracker.position;

            OVRPose leftOffsetPose  = OVRPose.identity;
            OVRPose rightOffsetPose = OVRPose.identity;
            if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
            {
                leftOffsetPose  = OVRManager.GetOpenVRControllerOffset(Node.LeftHand);
                rightOffsetPose = OVRManager.GetOpenVRControllerOffset(Node.RightHand);

                //Sets poses of left and right nodes, local to the tracking space.
                OVRManager.SetOpenVRLocalPose(trackingSpace.InverseTransformPoint(leftControllerAnchor.position),
                                              trackingSpace.InverseTransformPoint(rightControllerAnchor.position),
                                              Quaternion.Inverse(trackingSpace.rotation) * leftControllerAnchor.rotation,
                                              Quaternion.Inverse(trackingSpace.rotation) * rightControllerAnchor.rotation);
            }
            rightControllerAnchor.localPosition = rightOffsetPose.position;
            rightControllerAnchor.localRotation = rightOffsetPose.orientation;
            leftControllerAnchor.localPosition  = leftOffsetPose.position;
            leftControllerAnchor.localRotation  = leftOffsetPose.orientation;
        }

        RaiseUpdatedAnchorsEvent();
    }
Esempio n. 5
0
    private void Update()
    {
#if UNITY_EDITOR
        if (_scriptsReloaded)
        {
            _scriptsReloaded = false;
            instance         = this;
            Initialize();
        }
#endif

        if (OVRPlugin.shouldQuit)
        {
            Application.Quit();
        }

        if (AllowRecenter && OVRPlugin.shouldRecenter)
        {
            OVRManager.display.RecenterPose();
        }


        if (trackingOriginType != _trackingOriginType)
        {
            trackingOriginType = _trackingOriginType;
        }

        tracker.isEnabled = usePositionTracking;

        OVRPlugin.rotation = useRotationTracking;

        OVRPlugin.useIPDInPositionTracking = useIPDInPositionTracking;

        // Dispatch HMD events.

        isHmdPresent = OVRNodeStateProperties.IsHmdPresent();

        if (useRecommendedMSAALevel && QualitySettings.antiAliasing != display.recommendedMSAALevel)
        {
            Debug.Log("The current MSAA level is " + QualitySettings.antiAliasing +
                      ", but the recommended MSAA level is " + display.recommendedMSAALevel +
                      ". Switching to the recommended level.");

            QualitySettings.antiAliasing = display.recommendedMSAALevel;
        }

        if (monoscopic != _monoscopic)
        {
            monoscopic = _monoscopic;
        }

        if (headPoseRelativeOffsetRotation != _headPoseRelativeOffsetRotation)
        {
            headPoseRelativeOffsetRotation = _headPoseRelativeOffsetRotation;
        }

        if (headPoseRelativeOffsetTranslation != _headPoseRelativeOffsetTranslation)
        {
            headPoseRelativeOffsetTranslation = _headPoseRelativeOffsetTranslation;
        }

        if (_wasHmdPresent && !isHmdPresent)
        {
            try
            {
                if (HMDLost != null)
                {
                    HMDLost();
                }
            }
            catch (Exception e)
            {
                Debug.LogError("Caught Exception: " + e);
            }
        }

        if (!_wasHmdPresent && isHmdPresent)
        {
            try
            {
                if (HMDAcquired != null)
                {
                    HMDAcquired();
                }
            }
            catch (Exception e)
            {
                Debug.LogError("Caught Exception: " + e);
            }
        }

        _wasHmdPresent = isHmdPresent;

        // Dispatch HMD mounted events.

        isUserPresent = OVRPlugin.userPresent;

        if (_wasUserPresent && !isUserPresent)
        {
            try
            {
                if (HMDUnmounted != null)
                {
                    HMDUnmounted();
                }
            }
            catch (Exception e)
            {
                Debug.LogError("Caught Exception: " + e);
            }
        }

        if (!_wasUserPresent && isUserPresent)
        {
            try
            {
                if (HMDMounted != null)
                {
                    HMDMounted();
                }
            }
            catch (Exception e)
            {
                Debug.LogError("Caught Exception: " + e);
            }
        }

        _wasUserPresent = isUserPresent;

        // Dispatch VR Focus events.

        hasVrFocus = OVRPlugin.hasVrFocus;

        if (_hadVrFocus && !hasVrFocus)
        {
            try
            {
                if (VrFocusLost != null)
                {
                    VrFocusLost();
                }
            }
            catch (Exception e)
            {
                Debug.LogError("Caught Exception: " + e);
            }
        }

        if (!_hadVrFocus && hasVrFocus)
        {
            try
            {
                if (VrFocusAcquired != null)
                {
                    VrFocusAcquired();
                }
            }
            catch (Exception e)
            {
                Debug.LogError("Caught Exception: " + e);
            }
        }

        _hadVrFocus = hasVrFocus;

        // Dispatch VR Input events.

        bool hasInputFocus = OVRPlugin.hasInputFocus;

        if (_hadInputFocus && !hasInputFocus)
        {
            try
            {
                if (InputFocusLost != null)
                {
                    InputFocusLost();
                }
            }
            catch (Exception e)
            {
                Debug.LogError("Caught Exception: " + e);
            }
        }

        if (!_hadInputFocus && hasInputFocus)
        {
            try
            {
                if (InputFocusAcquired != null)
                {
                    InputFocusAcquired();
                }
            }
            catch (Exception e)
            {
                Debug.LogError("Caught Exception: " + e);
            }
        }

        _hadInputFocus = hasInputFocus;

        // Changing effective rendering resolution dynamically according performance
#if (UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN)
        if (enableAdaptiveResolution)
        {
#if UNITY_2017_2_OR_NEWER
            if (UnityEngine.XR.XRSettings.eyeTextureResolutionScale < maxRenderScale)
            {
                // Allocate renderScale to max to avoid re-allocation
                UnityEngine.XR.XRSettings.eyeTextureResolutionScale = maxRenderScale;
            }
            else
            {
                // Adjusting maxRenderScale in case app started with a larger renderScale value
                maxRenderScale = Mathf.Max(maxRenderScale, UnityEngine.XR.XRSettings.eyeTextureResolutionScale);
            }
            minRenderScale = Mathf.Min(minRenderScale, maxRenderScale);
            float minViewportScale         = minRenderScale / UnityEngine.XR.XRSettings.eyeTextureResolutionScale;
            float recommendedViewportScale = OVRPlugin.GetEyeRecommendedResolutionScale() / UnityEngine.XR.XRSettings.eyeTextureResolutionScale;
            recommendedViewportScale = Mathf.Clamp(recommendedViewportScale, minViewportScale, 1.0f);
            UnityEngine.XR.XRSettings.renderViewportScale = recommendedViewportScale;
#else
            if (UnityEngine.VR.VRSettings.renderScale < maxRenderScale)
            {
                // Allocate renderScale to max to avoid re-allocation
                UnityEngine.VR.VRSettings.renderScale = maxRenderScale;
            }
            else
            {
                // Adjusting maxRenderScale in case app started with a larger renderScale value
                maxRenderScale = Mathf.Max(maxRenderScale, UnityEngine.VR.VRSettings.renderScale);
            }
            minRenderScale = Mathf.Min(minRenderScale, maxRenderScale);
            float minViewportScale         = minRenderScale / UnityEngine.VR.VRSettings.renderScale;
            float recommendedViewportScale = OVRPlugin.GetEyeRecommendedResolutionScale() / UnityEngine.VR.VRSettings.renderScale;
            recommendedViewportScale = Mathf.Clamp(recommendedViewportScale, minViewportScale, 1.0f);
            UnityEngine.VR.VRSettings.renderViewportScale = recommendedViewportScale;
#endif
        }
#endif

        // Dispatch Audio Device events.

        string audioOutId = OVRPlugin.audioOutId;
        if (!prevAudioOutIdIsCached)
        {
            prevAudioOutId         = audioOutId;
            prevAudioOutIdIsCached = true;
        }
        else if (audioOutId != prevAudioOutId)
        {
            try
            {
                if (AudioOutChanged != null)
                {
                    AudioOutChanged();
                }
            }
            catch (Exception e)
            {
                Debug.LogError("Caught Exception: " + e);
            }

            prevAudioOutId = audioOutId;
        }

        string audioInId = OVRPlugin.audioInId;
        if (!prevAudioInIdIsCached)
        {
            prevAudioInId         = audioInId;
            prevAudioInIdIsCached = true;
        }
        else if (audioInId != prevAudioInId)
        {
            try
            {
                if (AudioInChanged != null)
                {
                    AudioInChanged();
                }
            }
            catch (Exception e)
            {
                Debug.LogError("Caught Exception: " + e);
            }

            prevAudioInId = audioInId;
        }

        // Dispatch tracking events.

        if (wasPositionTracked && !tracker.isPositionTracked)
        {
            try
            {
                if (TrackingLost != null)
                {
                    TrackingLost();
                }
            }
            catch (Exception e)
            {
                Debug.LogError("Caught Exception: " + e);
            }
        }

        if (!wasPositionTracked && tracker.isPositionTracked)
        {
            try
            {
                if (TrackingAcquired != null)
                {
                    TrackingAcquired();
                }
            }
            catch (Exception e)
            {
                Debug.LogError("Caught Exception: " + e);
            }
        }

        wasPositionTracked = tracker.isPositionTracked;

        display.Update();
        OVRInput.Update();

#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
        if (enableMixedReality || prevEnableMixedReality)
        {
            Camera mainCamera = FindMainCamera();
            if (Camera.main != null)
            {
                suppressDisableMixedRealityBecauseOfNoMainCameraWarning = false;

                if (enableMixedReality)
                {
                    OVRMixedReality.Update(this.gameObject, mainCamera, compositionMethod, useDynamicLighting, capturingCameraDevice, depthQuality);
                }

                if (prevEnableMixedReality && !enableMixedReality)
                {
                    OVRMixedReality.Cleanup();
                }

                prevEnableMixedReality = enableMixedReality;
            }
            else
            {
                if (!suppressDisableMixedRealityBecauseOfNoMainCameraWarning)
                {
                    Debug.LogWarning("Main Camera is not set, Mixed Reality disabled");
                    suppressDisableMixedRealityBecauseOfNoMainCameraWarning = true;
                }
            }
        }
#endif
    }
Esempio n. 6
0
 protected override void UpdateData()
 {
     _hmdDataAsset.Config = Config;
     bool    hmdPresent    = OVRNodeStateProperties.IsHmdPresent();
     ref var centerEyePose = ref _hmdDataAsset.Root;
Esempio n. 7
0
    protected virtual void UpdateAnchors()
    {
        EnsureGameObjectIntegrity();

        if (!Application.isPlaying)
        {
            return;
        }

        if (_skipUpdate)
        {
            centerEyeAnchor.FromOVRPose(OVRPose.identity, true);
            leftEyeAnchor.FromOVRPose(OVRPose.identity, true);
            rightEyeAnchor.FromOVRPose(OVRPose.identity, true);

            return;
        }

        bool monoscopic = OVRManager.instance.monoscopic;
        bool hmdPresent = OVRNodeStateProperties.IsHmdPresent();

        OVRPose tracker = OVRManager.tracker.GetPose();

        trackerAnchor.localRotation = tracker.orientation;

        Quaternion emulatedRotation = Quaternion.Euler(-OVRManager.instance.headPoseRelativeOffsetRotation.x, -OVRManager.instance.headPoseRelativeOffsetRotation.y, OVRManager.instance.headPoseRelativeOffsetRotation.z);

        centerEyeAnchor.localRotation = hmdPresent ? InputTracking.GetLocalRotation(Node.CenterEye) : emulatedRotation;
        leftEyeAnchor.localRotation   = (!hmdPresent || monoscopic) ? centerEyeAnchor.localRotation : InputTracking.GetLocalRotation(Node.LeftEye);
        rightEyeAnchor.localRotation  = (!hmdPresent || monoscopic) ? centerEyeAnchor.localRotation : InputTracking.GetLocalRotation(Node.RightEye);

        //Need this for controller offset because if we're on OpenVR, we want to set the local poses as specified by Unity, but if we're not, OVRInput local position is the right anchor
        if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
        {
            leftHandAnchor.localPosition  = InputTracking.GetLocalPosition(Node.LeftHand);
            rightHandAnchor.localPosition = InputTracking.GetLocalPosition(Node.RightHand);
            leftHandAnchor.localRotation  = InputTracking.GetLocalRotation(Node.LeftHand);
            rightHandAnchor.localRotation = InputTracking.GetLocalRotation(Node.RightHand);
        }
        else
        {
            leftHandAnchor.localPosition  = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch);
            rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch);
            leftHandAnchor.localRotation  = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch);
            rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch);
        }

        trackerAnchor.localPosition = tracker.position;

        centerEyeAnchor.localPosition = hmdPresent ? InputTracking.GetLocalPosition(Node.CenterEye) : OVRManager.instance.headPoseRelativeOffsetTranslation;
        leftEyeAnchor.localPosition   = (!hmdPresent || monoscopic) ? centerEyeAnchor.localPosition : InputTracking.GetLocalPosition(Node.LeftEye);
        rightEyeAnchor.localPosition  = (!hmdPresent || monoscopic) ? centerEyeAnchor.localPosition : InputTracking.GetLocalPosition(Node.RightEye);

        OVRPose leftOffsetPose  = OVRPose.identity;
        OVRPose rightOffsetPose = OVRPose.identity;

        if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
        {
            leftOffsetPose  = OVRManager.GetOpenVRControllerOffset(Node.LeftHand);
            rightOffsetPose = OVRManager.GetOpenVRControllerOffset(Node.RightHand);

            //Sets poses of left and right nodes, local to the tracking space.
            OVRManager.SetOpenVRLocalPose(trackingSpace.InverseTransformPoint(leftControllerAnchor.position),
                                          trackingSpace.InverseTransformPoint(rightControllerAnchor.position),
                                          Quaternion.Inverse(trackingSpace.rotation) * leftControllerAnchor.rotation,
                                          Quaternion.Inverse(trackingSpace.rotation) * rightControllerAnchor.rotation);
        }
        rightControllerAnchor.localPosition = rightOffsetPose.position;
        rightControllerAnchor.localRotation = rightOffsetPose.orientation;
        leftControllerAnchor.localPosition  = leftOffsetPose.position;
        leftControllerAnchor.localRotation  = leftOffsetPose.orientation;

        RaiseUpdatedAnchorsEvent();
    }