An affine transformation built from a Unity position and orientation.
예제 #1
0
파일: OVRCommon.cs 프로젝트: RSKbeck/VRGame
 /// <summary>
 /// Multiplies two poses.
 /// </summary>
 public static OVRPose operator *(OVRPose lhs, OVRPose rhs)
 {
     var ret = new OVRPose();
     ret.position = lhs.position + lhs.orientation * rhs.position;
     ret.orientation = lhs.orientation * rhs.orientation;
     return ret;
 }
예제 #2
0
    protected virtual void UpdateAnchors()
    {
        this.EnsureGameObjectIntegrity();
        if (!Application.isPlaying)
        {
            return;
        }
        if (this._skipUpdate)
        {
            this.centerEyeAnchor.FromOVRPose(OVRPose.identity, true);
            this.leftEyeAnchor.FromOVRPose(OVRPose.identity, true);
            this.rightEyeAnchor.FromOVRPose(OVRPose.identity, true);
            return;
        }
        bool    monoscopic = OVRManager.instance.monoscopic;
        OVRPose pose       = OVRManager.tracker.GetPose(0);

        this.trackerAnchor.localRotation   = pose.orientation;
        this.centerEyeAnchor.localRotation = InputTracking.GetLocalRotation(VRNode.CenterEye);
        this.leftEyeAnchor.localRotation   = ((!monoscopic) ? InputTracking.GetLocalRotation(VRNode.LeftEye) : this.centerEyeAnchor.localRotation);
        this.rightEyeAnchor.localRotation  = ((!monoscopic) ? InputTracking.GetLocalRotation(VRNode.RightEye) : this.centerEyeAnchor.localRotation);
        this.leftHandAnchor.localRotation  = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch);
        this.rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch);
        this.trackerAnchor.localPosition   = pose.position;
        this.centerEyeAnchor.localPosition = InputTracking.GetLocalPosition(VRNode.CenterEye);
        this.leftEyeAnchor.localPosition   = ((!monoscopic) ? InputTracking.GetLocalPosition(VRNode.LeftEye) : this.centerEyeAnchor.localPosition);
        this.rightEyeAnchor.localPosition  = ((!monoscopic) ? InputTracking.GetLocalPosition(VRNode.RightEye) : this.centerEyeAnchor.localPosition);
        this.leftHandAnchor.localPosition  = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch);
        this.rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch);
        this.RaiseUpdatedAnchorsEvent();
    }
예제 #3
0
    protected OVRPose lastAttachedNodePose   = new OVRPose();              // Sometimes the attach node pose is not readable (lose tracking, low battery, etc.) Use the last pose instead when it happens

    public OVRPose ComputeCameraWorldSpacePose(OVRPlugin.CameraExtrinsics extrinsics, OVRPlugin.Posef calibrationRawPose)
    {
        OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics, calibrationRawPose);
        OVRPose worldSpacePose    = OVRExtensions.ToWorldSpacePose(trackingSpacePose);

        return(worldSpacePose);
    }
예제 #4
0
    private void UpdateAnchors()
    {
        bool monoscopic = OVRManager.instance.monoscopic;

        OVRPose tracker = OVRManager.tracker.GetPose(0d);

        trackerAnchor.localRotation   = tracker.orientation;
        centerEyeAnchor.localRotation = VR.InputTracking.GetLocalRotation(VR.VRNode.CenterEye);
        leftEyeAnchor.localRotation   = monoscopic ? centerEyeAnchor.localRotation : VR.InputTracking.GetLocalRotation(VR.VRNode.LeftEye);
        rightEyeAnchor.localRotation  = monoscopic ? centerEyeAnchor.localRotation : VR.InputTracking.GetLocalRotation(VR.VRNode.RightEye);
        leftHandAnchor.localRotation  = OVRInput.GetLocalHandRotation(OVRInput.Hand.Left);
        rightHandAnchor.localRotation = OVRInput.GetLocalHandRotation(OVRInput.Hand.Right);

        trackerAnchor.localPosition   = tracker.position;
        centerEyeAnchor.localPosition = VR.InputTracking.GetLocalPosition(VR.VRNode.CenterEye);
        leftEyeAnchor.localPosition   = monoscopic ? centerEyeAnchor.localPosition : VR.InputTracking.GetLocalPosition(VR.VRNode.LeftEye);
        rightEyeAnchor.localPosition  = monoscopic ? centerEyeAnchor.localPosition : VR.InputTracking.GetLocalPosition(VR.VRNode.RightEye);
        leftHandAnchor.localPosition  = OVRInput.GetLocalHandPosition(OVRInput.Hand.Left);
        rightHandAnchor.localPosition = OVRInput.GetLocalHandPosition(OVRInput.Hand.Right);

        if (UpdatedAnchors != null)
        {
            UpdatedAnchors(this);
        }
    }
예제 #5
0
    public virtual Matrix4x4 ComputeTrackReferenceMatrix()
    {
        if (centerEyeAnchor == null)
        {
            Debug.LogError("centerEyeAnchor is required");
            return(Matrix4x4.identity);
        }

        // The ideal approach would be using UnityEngine.VR.VRNode.TrackingReference, then we would not have to depend on the OVRCameraRig. Unfortunately, it is not available in Unity 5.4.3

        OVRPose headPose;

#if UNITY_2017_2_OR_NEWER
        headPose.position    = UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.Head);
        headPose.orientation = UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.Head);
#else
        headPose.position    = UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.Head);
        headPose.orientation = UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.Head);
#endif

        OVRPose   invHeadPose   = headPose.Inverse();
        Matrix4x4 invHeadMatrix = Matrix4x4.TRS(invHeadPose.position, invHeadPose.orientation, Vector3.one);

        Matrix4x4 ret = centerEyeAnchor.localToWorldMatrix * invHeadMatrix;

        return(ret);
    }
예제 #6
0
    public OVRPose ComputeCameraTrackingSpacePose(OVRPlugin.CameraExtrinsics extrinsics)
    {
        OVRPose trackingSpacePose = new OVRPose();

        OVRPose cameraTrackingSpacePose = extrinsics.RelativePose.ToOVRPose();

        trackingSpacePose = cameraTrackingSpacePose;

        if (extrinsics.AttachedToNode != OVRPlugin.Node.None && OVRPlugin.GetNodePresent(extrinsics.AttachedToNode))
        {
            if (usingLastAttachedNodePose)
            {
                Debug.Log("The camera attached node get tracked");
                usingLastAttachedNodePose = false;
            }
            OVRPose attachedNodePose = OVRPlugin.GetNodePose(extrinsics.AttachedToNode, OVRPlugin.Step.Render).ToOVRPose();
            lastAttachedNodePose = attachedNodePose;
            trackingSpacePose    = attachedNodePose * trackingSpacePose;
        }
        else
        {
            if (extrinsics.AttachedToNode != OVRPlugin.Node.None)
            {
                if (!usingLastAttachedNodePose)
                {
                    Debug.LogWarning("The camera attached node could not be tracked, using the last pose");
                    usingLastAttachedNodePose = true;
                }
                trackingSpacePose = lastAttachedNodePose * trackingSpacePose;
            }
        }

        return(trackingSpacePose);
    }
예제 #7
0
    protected void GrabEnd()
    {
        /**///ボールを離したとき
        ballcatch = false;
        /**/

        if (m_grabbedObj != null)
        {
            OVRPose localPose = new OVRPose {
                position = OVRInput.GetLocalControllerPosition(m_controller), orientation = OVRInput.GetLocalControllerRotation(m_controller)
            };
            OVRPose offsetPose = new OVRPose {
                position = m_anchorOffsetPosition, orientation = m_anchorOffsetRotation
            };
            localPose = localPose * offsetPose;

            OVRPose trackingSpace   = transform.ToOVRPose() * localPose.Inverse();
            Vector3 linearVelocity  = trackingSpace.orientation * OVRInput.GetLocalControllerVelocity(m_controller);
            Vector3 angularVelocity = trackingSpace.orientation * OVRInput.GetLocalControllerAngularVelocity(m_controller);

            /**/
            GrabbableRelease(linearVelocity * speed, angularVelocity * curve);//ボールを離した後に進む直線方向の速度を加速させている。
            /**/
        }

        // Re-enable grab volumes to allow overlap events
        GrabVolumeEnable(true);
    }
예제 #8
0
    protected static OVRPose MakePose(Quaternion rot, Vector3 pos)
    {
        OVRPose result = OVRPose.identity;

        result.orientation = rot;
        result.position    = pos;
        return(result);
    }
    /// <summary>
    /// Multiplies two poses.
    /// </summary>
    public static OVRPose operator*(OVRPose lhs, OVRPose rhs)
    {
        var ret = new OVRPose();

        ret.position    = lhs.position + lhs.orientation * rhs.position;
        ret.orientation = lhs.orientation * rhs.orientation;
        return(ret);
    }
    void UpdateDefaultExternalCamera()
    {
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
        // always build a 1080p external camera
        const int   cameraPixelWidth  = 1920;
        const int   cameraPixelHeight = 1080;
        const float cameraAspect      = (float)cameraPixelWidth / cameraPixelHeight;


        string cameraName = "UnityExternalCamera";
        OVRPlugin.CameraIntrinsics cameraIntrinsics = new OVRPlugin.CameraIntrinsics();
        OVRPlugin.CameraExtrinsics cameraExtrinsics = new OVRPlugin.CameraExtrinsics();

        // intrinsics

        cameraIntrinsics.IsValid = OVRPlugin.Bool.True;
        cameraIntrinsics.LastChangedTimeSeconds = Time.time;

        float          vFov = defaultExternalCamera.fieldOfView * Mathf.Deg2Rad;
        float          hFov = Mathf.Atan(Mathf.Tan(vFov * 0.5f) * cameraAspect) * 2.0f;
        OVRPlugin.Fovf fov  = new OVRPlugin.Fovf();
        fov.UpTan   = fov.DownTan = Mathf.Tan(vFov * 0.5f);
        fov.LeftTan = fov.RightTan = Mathf.Tan(hFov * 0.5f);

        cameraIntrinsics.FOVPort = fov;
        cameraIntrinsics.VirtualNearPlaneDistanceMeters = defaultExternalCamera.nearClipPlane;
        cameraIntrinsics.VirtualFarPlaneDistanceMeters  = defaultExternalCamera.farClipPlane;
        cameraIntrinsics.ImageSensorPixelResolution.w   = cameraPixelWidth;
        cameraIntrinsics.ImageSensorPixelResolution.h   = cameraPixelHeight;

        // extrinsics

        cameraExtrinsics.IsValid = OVRPlugin.Bool.True;
        cameraExtrinsics.LastChangedTimeSeconds = Time.time;
        cameraExtrinsics.CameraStatusData       = OVRPlugin.CameraStatus.CameraStatus_Calibrated;
        cameraExtrinsics.AttachedToNode         = OVRPlugin.Node.None;

        Camera       mainCamera = Camera.main;
        OVRCameraRig cameraRig  = mainCamera.GetComponentInParent <OVRCameraRig>();
        if (cameraRig)
        {
            Transform trackingSpace     = cameraRig.trackingSpace;
            OVRPose   trackingSpacePose = trackingSpace.ToOVRPose(false);
            OVRPose   cameraPose        = defaultExternalCamera.transform.ToOVRPose(false);
            OVRPose   relativePose      = trackingSpacePose.Inverse() * cameraPose;
            cameraExtrinsics.RelativePose = relativePose.ToPosef();
        }
        else
        {
            cameraExtrinsics.RelativePose = OVRPlugin.Posef.identity;
        }

        if (!OVRPlugin.SetDefaultExternalCamera(cameraName, ref cameraIntrinsics, ref cameraExtrinsics))
        {
            Debug.LogError("SetDefaultExternalCamera() failed");
        }
#endif
    }
예제 #11
0
    void Update()
    {
        OVRPose eye = GetEyePose(OVREye.Left);

        //OVRPose rightEye = OVRManager.display.GetEyePose(OVREye.Right);

        mainCamera.transform.localRotation = eye.orientation;
        //mainCamera.transform.localPosition = 0.5f * (leftEye.position + rightEye.position);
    }
예제 #12
0
    protected void GrabEnd()
    {
        if (m_grabbedObj != null)
        {
            //Drop the food away from mouth ------------------------
            GameObject grabbed = m_grabbedObj.gameObject;
            if (grabbed.CompareTag("Shrimp"))
            {
                InputManager.GetComponent <InputManager>().ShrimpGrabbed = false;
                InputManager.GetComponent <InputManager>().ResetShrimp();
                GrabbableRelease(Vector3.zero, Vector3.zero);
                GrabVolumeEnable(true);
                return;
            }
            else if (grabbed.CompareTag("Watermelon"))
            {
                InputManager.GetComponent <InputManager>().WatermelonGrabbed = false;
                InputManager.GetComponent <InputManager>().ResetWatermelon();
                GrabbableRelease(Vector3.zero, Vector3.zero);
                GrabVolumeEnable(true);
                return;
            }
            else if (grabbed.CompareTag("Large"))
            {
                AudioManager.GetComponent <AudioManagement>().Play("Throw");
            }

            //-----------------------------------------------------


            OVRPose localPose = new OVRPose {
                position = OVRInput.GetLocalControllerPosition(m_controller), orientation = OVRInput.GetLocalControllerRotation(m_controller)
            };
            OVRPose offsetPose = new OVRPose {
                position = m_anchorOffsetPosition, orientation = m_anchorOffsetRotation
            };
            localPose = localPose * offsetPose;

            OVRPose trackingSpace   = transform.ToOVRPose() * localPose.Inverse();
            Vector3 linearVelocity  = trackingSpace.orientation * OVRInput.GetLocalControllerVelocity(m_controller);
            Vector3 angularVelocity = trackingSpace.orientation * OVRInput.GetLocalControllerAngularVelocity(m_controller);

            float scale = Player.transform.localScale.y;

            if (GameplayManager.GetComponent <GameplayManager>().SceneNumber == 2)
            {
                GrabbableRelease(Vector3.zero, Vector3.zero);
            }
            else
            {
                GrabbableRelease(linearVelocity * scale, angularVelocity * scale);
            }
        }

        // Re-enable grab volumes to allow overlap events
        GrabVolumeEnable(true);
    }
예제 #13
0
    protected virtual void UpdateAnchors()
    {
        EnsureGameObjectIntegrity();

        if (!Application.isPlaying)
        {
            return;
        }

        if (_skipUpdate)
        {
            centerEyeAnchor.FromOVRPose(OVRPose.identity, true);
            leftEyeAnchor.FromOVRPose(OVRPose.identity, true);
            rightEyeAnchor.FromOVRPose(OVRPose.identity, true);

            return;
        }

        bool monoscopic = OVRManager.instance.monoscopic;

        OVRPose tracker = OVRManager.tracker.GetPose();

        trackerAnchor.localRotation = tracker.orientation;
#if UNITY_2017_2_OR_NEWER
        centerEyeAnchor.localRotation = UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.CenterEye);
        leftEyeAnchor.localRotation   = monoscopic ? centerEyeAnchor.localRotation : UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.LeftEye);
        rightEyeAnchor.localRotation  = monoscopic ? centerEyeAnchor.localRotation : UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.RightEye);
#else
        centerEyeAnchor.localRotation = UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.CenterEye);
        leftEyeAnchor.localRotation   = monoscopic ? centerEyeAnchor.localRotation : UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.LeftEye);
        rightEyeAnchor.localRotation  = monoscopic ? centerEyeAnchor.localRotation : UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.RightEye);
#endif
        leftHandAnchor.localRotation  = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch);
        rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch);

        trackerAnchor.localPosition = tracker.position;
#if UNITY_2017_2_OR_NEWER
        centerEyeAnchor.localPosition = UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.CenterEye);
        leftEyeAnchor.localPosition   = monoscopic ? centerEyeAnchor.localPosition : UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.LeftEye);
        rightEyeAnchor.localPosition  = monoscopic ? centerEyeAnchor.localPosition : UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.RightEye);
#else
        centerEyeAnchor.localPosition = UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.CenterEye);
        leftEyeAnchor.localPosition   = monoscopic ? centerEyeAnchor.localPosition : UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.LeftEye);
        rightEyeAnchor.localPosition  = monoscopic ? centerEyeAnchor.localPosition : UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.RightEye);
#endif


        leftHandAnchor.localPosition  = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch);
        rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch);

        leftHandAnchor.localRotation  = leftHandAnchor.localRotation * Quaternion.Euler(vLHandRote.x, vLHandRote.y, vLHandRote.z);
        rightHandAnchor.localRotation = rightHandAnchor.localRotation * Quaternion.Euler(vRHandRote.x, vRHandRote.y, vRHandRote.z);
        tHeadLook.localRotation       = Quaternion.Euler(vHeadRote.x, vHeadRote.y, vHeadRote.z);
        tHeadLook.localPosition       = vLookPos;

        RaiseUpdatedAnchorsEvent();
    }
예제 #14
0
    public static void Transform(OVRPose transformPose, OVRPose inPose, out OVRPose outPose)
    {
        transformPose = MakePose(transformPose.orientation, transformPose.position);
        inPose        = MakePose(inPose.orientation, inPose.position);

        outPose             = OVRPose.identity;
        outPose.position    = transformPose.position + (transformPose.orientation * inPose.position);
        outPose.orientation = transformPose.orientation * inPose.orientation;
    }
예제 #15
0
	public static Vector3 GetWorldPosition(Vector3 trackingSpacePosition)
	{
		OVRPose tsPose:
		tsPose.position = trackingSpacePosition:
		tsPose.orientation = Quaternion.identity:
		OVRPose wsPose = OVRExtensions.ToWorldSpacePose(tsPose):
		Vector3 pos = wsPose.position:
		return pos:
	}
예제 #16
0
    private bool SubmitLayer(bool overlay, bool headLocked, OVRPose pose, Vector3 scale, int frameIndex)
    {
        int  rightEyeIndex    = (texturesPerStage >= 2) ? 1 : 0;
        bool isOverlayVisible = OVRPlugin.EnqueueSubmitLayer(overlay, headLocked, layerTextures[0].appTexturePtr, layerTextures[rightEyeIndex].appTexturePtr, layerId, frameIndex, pose.flipZ().ToPosef(), scale.ToVector3f(), layerIndex, (OVRPlugin.OverlayShape)currentOverlayShape);

        prevOverlayShape = currentOverlayShape;

        return(isOverlayVisible);
    }
예제 #17
0
파일: OVRPose.cs 프로젝트: K07H/The-Forest
    internal OVRPose flipZ()
    {
        OVRPose result = this;

        result.position.z    = -result.position.z;
        result.orientation.z = -result.orientation.z;
        result.orientation.w = -result.orientation.w;
        return(result);
    }
    private void UpdateHandAnchorsIfNeedIt(bool updateHandAnchors, OVRPose tracker)
    {
        if (updateHandAnchors && HandView.IsMine)
        {
            //Need this for controller offset because if we're on OpenVR, we want to set the local poses as specified by Unity, but if we're not, OVRInput local position is the right anchor
            if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
            {
                Vector3    leftPos   = Vector3.zero;
                Vector3    rightPos  = Vector3.zero;
                Quaternion leftQuat  = Quaternion.identity;
                Quaternion rightQuat = Quaternion.identity;

                if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftPos))
                {
                    leftHandAnchor.localPosition = leftPos;
                }
                if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightPos))
                {
                    rightHandAnchor.localPosition = rightPos;
                }
                if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftQuat))
                {
                    leftHandAnchor.localRotation = leftQuat;
                }
                if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightQuat))
                {
                    rightHandAnchor.localRotation = rightQuat;
                }
            }
            else
            {
                leftHandAnchor.localPosition  = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch);
                rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch);
                leftHandAnchor.localRotation  = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch);
                rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch);
            }
            trackerAnchor.localPosition = tracker.position;

            OVRPose leftOffsetPose  = OVRPose.identity;
            OVRPose rightOffsetPose = OVRPose.identity;
            if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
            {
                GetOpenVRControllerOffset(Node.LeftHand, HandView, ref leftOffsetPose);
                GetOpenVRControllerOffset(Node.RightHand, HandView, ref rightOffsetPose);

                //Sets poses of left and right nodes, local to the tracking space.
                OVRManager.SetOpenVRLocalPose(trackingSpace.InverseTransformPoint(leftControllerAnchor.position),
                                              trackingSpace.InverseTransformPoint(rightControllerAnchor.position),
                                              Quaternion.Inverse(trackingSpace.rotation) * leftControllerAnchor.rotation,
                                              Quaternion.Inverse(trackingSpace.rotation) * rightControllerAnchor.rotation);
            }
            rightControllerAnchor.localPosition = rightOffsetPose.position;
            rightControllerAnchor.localRotation = rightOffsetPose.orientation;
            leftControllerAnchor.localPosition  = leftOffsetPose.position;
            leftControllerAnchor.localRotation  = leftOffsetPose.orientation;
        }
    }
 protected void RefreshCameraPoses(float fovY, float aspect, OVRPose pose)
 {
     Camera[] cameras = { fgCamera, bgCamera, compositionCamera };
     foreach (Camera c in cameras)
     {
         c.fieldOfView = fovY;
         c.aspect      = aspect;
         c.transform.FromOVRPose(pose);
     }
 }
예제 #20
0
    public static OVRPose ToWorldSpacePose(OVRPose trackingSpacePose)
    {
        OVRPose ovrpose;

        ovrpose.position    = InputTracking.GetLocalPosition(VRNode.Head);
        ovrpose.orientation = InputTracking.GetLocalRotation(VRNode.Head);
        OVRPose rhs = ovrpose.Inverse() * trackingSpacePose;

        return(Camera.main.transform.ToOVRPose(false) * rhs);
    }
예제 #21
0
 /// <summary>
 /// Creates an instance of OVRDisplay. Called by OVRManager.
 /// </summary>
 public OVRDisplay()
 {
     UpdateTextures();
     if (OVRPlugin.GetSystemHeadsetType() == OVRPlugin.SystemHeadset.Oculus_Quest)
     {
         previousTrackingOrigin = OVRManager.instance.trackingOriginType;
         OVRManager.TrackingOrigin relativeOrigin = (previousTrackingOrigin != OVRManager.TrackingOrigin.Stage) ? OVRManager.TrackingOrigin.Stage : OVRManager.TrackingOrigin.EyeLevel;
         previousRelativeTrackingSpacePose = OVRPlugin.GetTrackingTransformRelativePose((OVRPlugin.TrackingOrigin)relativeOrigin).ToOVRPose();
     }
 }
    bool ComputeSubmit(ref OVRPose pose, ref Vector3 scale, ref bool overlay, ref bool headLocked)
    {
        Camera headCamera = Camera.main;

        overlay    = (currentOverlayType == OverlayType.Overlay);
        headLocked = false;
        for (var t = transform; t != null && !headLocked; t = t.parent)
        {
            headLocked |= (t == headCamera.transform);
        }

        pose  = (headLocked) ? transform.ToHeadSpacePose(headCamera) : transform.ToTrackingSpacePose(headCamera);
        scale = transform.lossyScale;
        for (int i = 0; i < 3; ++i)
        {
            scale[i] /= headCamera.transform.lossyScale[i];
        }

        if (currentOverlayShape == OverlayShape.Cubemap)
        {
#if UNITY_ANDROID && !UNITY_EDITOR
            if (OVRPlugin.nativeXrApi != OVRPlugin.XrApi.OpenXR)
            {
                //HACK: VRAPI cubemaps assume are yawed 180 degrees relative to LibOVR.
                pose.orientation = pose.orientation * Quaternion.AngleAxis(180, Vector3.up);
            }
#endif
            pose.position = headCamera.transform.position;
        }

        // Pack the offsetCenter directly into pose.position for offcenterCubemap
        if (currentOverlayShape == OverlayShape.OffcenterCubemap)
        {
            pose.position = transform.position;
            if (pose.position.magnitude > 1.0f)
            {
                Debug.LogWarning("Your cube map center offset's magnitude is greater than 1, which will cause some cube map pixel always invisible .");
                return(false);
            }
        }

        // Cylinder overlay sanity checking when not using OpenXR
        if (OVRPlugin.nativeXrApi != OVRPlugin.XrApi.OpenXR && currentOverlayShape == OverlayShape.Cylinder)
        {
            float arcAngle = scale.x / scale.z / (float)Math.PI * 180.0f;
            if (arcAngle > 180.0f)
            {
                Debug.LogWarning("Cylinder overlay's arc angle has to be below 180 degree, current arc angle is " + arcAngle + " degree.");
                return(false);
            }
        }

        return(true);
    }
예제 #23
0
    public static Vector3 GetWorldPosition(Vector3 trackingSpacePosition)
    {
        OVRPose tsPose;

        tsPose.position    = trackingSpacePosition;
        tsPose.orientation = Quaternion.identity;
        OVRPose wsPose = OVRExtensions.ToWorldSpacePose(tsPose);
        Vector3 pos    = wsPose.position;

        return(pos);
    }
예제 #24
0
    private bool SubmitLayer(bool overlay, bool headLocked, OVRPose pose, Vector3 scale)
    {
        int  num    = (this.texturesPerStage < 2) ? 0 : 1;
        bool result = OVRPlugin.EnqueueSubmitLayer(overlay, headLocked, this.layerTextures[0].appTexturePtr, this.layerTextures[num].appTexturePtr, this.layerId, this.frameIndex, pose.flipZ().ToPosef(), scale.ToVector3f(), this.layerIndex, (OVRPlugin.OverlayShape) this.currentOverlayShape);

        if (this.isDynamic)
        {
            this.frameIndex++;
        }
        this.prevOverlayShape = this.currentOverlayShape;
        return(result);
    }
예제 #25
0
 public override void Update(Camera mainCamera)
 {
     if (!this.hasCameraDeviceOpened)
     {
         return;
     }
     this.frameRealtime = Time.realtimeSinceStartup;
     this.historyRecordCursorIndex++;
     if (this.historyRecordCursorIndex >= this.historyRecordCount)
     {
         this.historyRecordCursorIndex = 0;
     }
     if (!OVRPlugin.SetHandNodePoseStateLatency((double)OVRManager.instance.handPoseStateLatency))
     {
         Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
     }
     this.RefreshRenderTextures(mainCamera);
     this.bgCamera.clearFlags      = mainCamera.clearFlags;
     this.bgCamera.backgroundColor = mainCamera.backgroundColor;
     this.bgCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
     this.fgCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
     OVRPlugin.CameraExtrinsics extrinsics;
     OVRPlugin.CameraIntrinsics cameraIntrinsics;
     if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
     {
         OVRPose pose = default(OVRPose);
         pose = OVRExtensions.ToWorldSpacePose(new OVRPose
         {
             position    = OVRMixedReality.fakeCameraPositon,
             orientation = OVRMixedReality.fakeCameraRotation
         });
         this.RefreshCameraPoses(OVRMixedReality.fakeCameraFov, OVRMixedReality.fakeCameraAspect, pose);
     }
     else if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out cameraIntrinsics))
     {
         OVRPose pose2  = base.ComputeCameraWorldSpacePose(extrinsics);
         float   fovY   = Mathf.Atan(cameraIntrinsics.FOVPort.UpTan) * 57.29578f * 2f;
         float   aspect = cameraIntrinsics.FOVPort.LeftTan / cameraIntrinsics.FOVPort.UpTan;
         this.RefreshCameraPoses(fovY, aspect, pose2);
     }
     else
     {
         Debug.LogWarning("Failed to get external camera information");
     }
     this.compositionCamera.GetComponent <OVRCameraComposition.OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = this.historyRecordArray[this.historyRecordCursorIndex].boundaryMeshMaskTexture;
     OVRSandwichComposition.HistoryRecord historyRecordForComposition = this.GetHistoryRecordForComposition();
     base.UpdateCameraFramePlaneObject(mainCamera, this.compositionCamera, historyRecordForComposition.boundaryMeshMaskTexture);
     OVRSandwichComposition.OVRSandwichCompositionManager component = this.compositionCamera.gameObject.GetComponent <OVRSandwichComposition.OVRSandwichCompositionManager>();
     component.fgTexture = historyRecordForComposition.fgRenderTexture;
     component.bgTexture = historyRecordForComposition.bgRenderTexture;
     this.cameraProxyPlane.transform.position = this.fgCamera.transform.position + this.fgCamera.transform.forward * this.cameraFramePlaneDistance;
     this.cameraProxyPlane.transform.LookAt(this.cameraProxyPlane.transform.position + this.fgCamera.transform.forward);
 }
예제 #26
0
    void OnUpdatedAnchors(OVRCameraRig rig)
    {
        if (!enabled)
        {
            return;
        }

        OVRPose pose = rig.trackerAnchor.ToOVRPose(true).Inverse();

        pose = trackerPose * pose;
        rig.trackingSpace.FromOVRPose(pose, true);
    }
예제 #27
0
    private void UpdateAnchors()
    {
        EnsureGameObjectIntegrity();

        if (!Application.isPlaying)
        {
            return;
        }

        if (_skipUpdate)
        {
            centerEyeAnchor.FromOVRPose(OVRPose.identity, true);
            leftEyeAnchor.FromOVRPose(OVRPose.identity, true);
            rightEyeAnchor.FromOVRPose(OVRPose.identity, true);

            return;
        }

        bool monoscopic = OVRManager.instance.monoscopic;

        OVRPose tracker = OVRManager.tracker.GetPose();

        trackerAnchor.localRotation = tracker.orientation;
#if UNITY_2017_2_OR_NEWER
        centerEyeAnchor.localRotation = UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.CenterEye);
        leftEyeAnchor.localRotation   = monoscopic ? centerEyeAnchor.localRotation : UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.LeftEye);
        rightEyeAnchor.localRotation  = monoscopic ? centerEyeAnchor.localRotation : UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.RightEye);
#else
        centerEyeAnchor.localRotation = UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.CenterEye);
        leftEyeAnchor.localRotation   = monoscopic ? centerEyeAnchor.localRotation : UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.LeftEye);
        rightEyeAnchor.localRotation  = monoscopic ? centerEyeAnchor.localRotation : UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.RightEye);
#endif
        leftHandAnchor.localRotation  = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch);
        rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch);

        trackerAnchor.localPosition = tracker.position;
#if UNITY_2017_2_OR_NEWER
        centerEyeAnchor.localPosition = UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.CenterEye);
        leftEyeAnchor.localPosition   = monoscopic ? centerEyeAnchor.localPosition : UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.LeftEye);
        rightEyeAnchor.localPosition  = monoscopic ? centerEyeAnchor.localPosition : UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.RightEye);
#else
        centerEyeAnchor.localPosition = UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.CenterEye);
        leftEyeAnchor.localPosition   = monoscopic ? centerEyeAnchor.localPosition : UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.LeftEye);
        rightEyeAnchor.localPosition  = monoscopic ? centerEyeAnchor.localPosition : UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.RightEye);
#endif
        leftHandAnchor.localPosition  = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch);
        rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch);

        if (UpdatedAnchors != null)
        {
            UpdatedAnchors(this);
        }
    }
예제 #28
0
 public override void Update(Camera mainCamera)
 {
     if (!this.hasCameraDeviceOpened)
     {
         return;
     }
     if (!OVRPlugin.SetHandNodePoseStateLatency((double)OVRManager.instance.handPoseStateLatency))
     {
         Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
     }
     this.directCompositionCamera.clearFlags      = mainCamera.clearFlags;
     this.directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
     this.directCompositionCamera.cullingMask     = (mainCamera.cullingMask & ~OVRManager.instance.extraHiddenLayers);
     this.directCompositionCamera.nearClipPlane   = mainCamera.nearClipPlane;
     this.directCompositionCamera.farClipPlane    = mainCamera.farClipPlane;
     OVRPlugin.CameraExtrinsics extrinsics;
     OVRPlugin.CameraIntrinsics cameraIntrinsics;
     if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
     {
         OVRPose pose = default(OVRPose);
         pose = OVRExtensions.ToWorldSpacePose(new OVRPose
         {
             position    = OVRMixedReality.fakeCameraPositon,
             orientation = OVRMixedReality.fakeCameraRotation
         });
         this.directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
         this.directCompositionCamera.aspect      = OVRMixedReality.fakeCameraAspect;
         this.directCompositionCamera.transform.FromOVRPose(pose, false);
     }
     else if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out cameraIntrinsics))
     {
         OVRPose pose2       = base.ComputeCameraWorldSpacePose(extrinsics);
         float   fieldOfView = Mathf.Atan(cameraIntrinsics.FOVPort.UpTan) * 57.29578f * 2f;
         float   aspect      = cameraIntrinsics.FOVPort.LeftTan / cameraIntrinsics.FOVPort.UpTan;
         this.directCompositionCamera.fieldOfView = fieldOfView;
         this.directCompositionCamera.aspect      = aspect;
         this.directCompositionCamera.transform.FromOVRPose(pose2, false);
     }
     else
     {
         Debug.LogWarning("Failed to get external camera information");
     }
     if (this.hasCameraDeviceOpened)
     {
         if (this.boundaryMeshMaskTexture == null || this.boundaryMeshMaskTexture.width != Screen.width || this.boundaryMeshMaskTexture.height != Screen.height)
         {
             this.boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
             this.boundaryMeshMaskTexture.Create();
         }
         base.UpdateCameraFramePlaneObject(mainCamera, this.directCompositionCamera, this.boundaryMeshMaskTexture);
         this.directCompositionCamera.GetComponent <OVRCameraComposition.OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = this.boundaryMeshMaskTexture;
     }
 }
예제 #29
0
    bool GetIRCamera(ref Vector3 position,
                     ref Quaternion rotation,
                     ref float cameraHFov,
                     ref float cameraVFov,
                     ref float cameraNearZ,
                     ref float cameraFarZ)
    {
        //if (!OVRManager.isSupportedPlatform || Hmd==null) return false;

        /*
         * ovrTrackingState ss = OVRDevice.HMD.GetTrackingState();
         *
         * rotation = new Quaternion(	ss.CameraPose.Orientation.x,
         *                        ss.CameraPose.Orientation.y,
         *                        ss.CameraPose.Orientation.z,
         *                        ss.CameraPose.Orientation.w);
         *
         * position = new Vector3(	ss.CameraPose.Position.x,
         *                     ss.CameraPose.Position.y,
         *                     ss.CameraPose.Position.z);
         */
        OVRPose ss = OVRManager.tracker.GetPose();

        rotation = new Quaternion(ss.orientation.x,
                                  ss.orientation.y,
                                  ss.orientation.z,
                                  ss.orientation.w);

        position = new Vector3(ss.position.x,
                               ss.position.y,
                               ss.position.z);

        OVRTracker.Frustum ff = OVRManager.tracker.GetFrustum();


        cameraHFov  = ff.fov.x * (float)Mathf.PI / 180.0f;
        cameraVFov  = ff.fov.y * (float)Mathf.PI / 180.0f;
        cameraNearZ = ff.nearZ;
        cameraFarZ  = ff.farZ;

        /*
         *      HmdDesc desc = Hmd.HmdDesc();
         *
         *      cameraHFov = desc.CameraFrustumHFovInRadians;
         *      cameraVFov = desc.CameraFrustumVFovInRadians;
         *      cameraNearZ = desc.CameraFrustumNearZInMeters;
         *      cameraFarZ = desc.CameraFrustumFarZInMeters;
         */
        //OVRDevice.OrientSensor (ref rotation);

        return(true);
    }
 internal static void FromOVRPose(this Transform t, OVRPose pose, bool isLocal = false)
 {
     if (isLocal)
     {
         t.localRotation = pose.orientation;
         t.localPosition = pose.position;
     }
     else
     {
         t.rotation = pose.orientation;
         t.position = pose.position;
     }
 }
	internal static void FromOVRPose(this Transform t, OVRPose pose, bool isLocal = false)
	{
		if (isLocal)
		{
			t.localRotation = pose.orientation;
			t.localPosition = pose.position;
		}
		else
		{
			t.rotation = pose.orientation;
			t.position = pose.position;
		}
	}
예제 #32
0
    private void UpdateAnchors()
    {
        OVRPose leftEye  = OVRManager.display.GetEyePose(OVREye.Left);
        OVRPose rightEye = OVRManager.display.GetEyePose(OVREye.Right);

        leftEyeAnchor.localRotation   = leftEye.orientation;
        centerEyeAnchor.localRotation = leftEye.orientation;         // using left eye for now
        rightEyeAnchor.localRotation  = rightEye.orientation;

        leftEyeAnchor.localPosition   = leftEye.position;
        centerEyeAnchor.localPosition = 0.5f * (leftEye.position + rightEye.position);
        rightEyeAnchor.localPosition  = rightEye.position;
    }