The (symmetric) visible area in front of the tracker.
Пример #1
0
    public static void GetPositionalTrackingCameraProperties(
        ref Vector3 position,
        ref Quaternion rotation,
        ref float cameraHFov,
        ref float cameraVFov,
        ref float cameraNearZ,
        ref float cameraFarZ)
    {
        OVRPose ss = OVRManager.tracker.GetPose(0.0f);

        rotation = new Quaternion(ss.orientation.x,
                                  ss.orientation.y,
                                  ss.orientation.z,
                                  ss.orientation.w);

        position = new Vector3(ss.position.x, // meters
                               ss.position.y,
                               ss.position.z);

        OVRTracker.Frustum ff = OVRManager.tracker.frustum;

        cameraHFov  = ff.fov.x; // degrees
        cameraVFov  = ff.fov.y; // degrees
        cameraNearZ = ff.nearZ; // meters
        cameraFarZ  = ff.farZ;  // meters
    }
Пример #2
0
    bool GetIRCamera(ref Vector3 position,
                     ref Quaternion rotation,
                     ref float cameraHFov,
                     ref float cameraVFov,
                     ref float cameraNearZ,
                     ref float cameraFarZ)
    {
        //if (!OVRManager.isSupportedPlatform || Hmd==null) return false;

        /*
         * ovrTrackingState ss = OVRDevice.HMD.GetTrackingState();
         *
         * rotation = new Quaternion(	ss.CameraPose.Orientation.x,
         *                        ss.CameraPose.Orientation.y,
         *                        ss.CameraPose.Orientation.z,
         *                        ss.CameraPose.Orientation.w);
         *
         * position = new Vector3(	ss.CameraPose.Position.x,
         *                     ss.CameraPose.Position.y,
         *                     ss.CameraPose.Position.z);
         */
        OVRPose ss = OVRManager.tracker.GetPose();

        rotation = new Quaternion(ss.orientation.x,
                                  ss.orientation.y,
                                  ss.orientation.z,
                                  ss.orientation.w);

        position = new Vector3(ss.position.x,
                               ss.position.y,
                               ss.position.z);

        OVRTracker.Frustum ff = OVRManager.tracker.GetFrustum();


        cameraHFov  = ff.fov.x * (float)Mathf.PI / 180.0f;
        cameraVFov  = ff.fov.y * (float)Mathf.PI / 180.0f;
        cameraNearZ = ff.nearZ;
        cameraFarZ  = ff.farZ;

        /*
         *      HmdDesc desc = Hmd.HmdDesc();
         *
         *      cameraHFov = desc.CameraFrustumHFovInRadians;
         *      cameraVFov = desc.CameraFrustumVFovInRadians;
         *      cameraNearZ = desc.CameraFrustumNearZInMeters;
         *      cameraFarZ = desc.CameraFrustumFarZInMeters;
         */
        //OVRDevice.OrientSensor (ref rotation);

        return(true);
    }
    /// <summary>
    /// Computes frustum planes from the sensor's frustum parameters.
    /// </summary>
    void ComputePlanes()
    {
        OVRTracker.Frustum frustum = OVRManager.tracker.GetFrustum();
        float nearZ = frustum.nearZ;
        float farZ  = frustum.farZ;
        float hFOV  = Mathf.Deg2Rad * frustum.fov.x * 0.5f;
        float vFOV  = Mathf.Deg2Rad * frustum.fov.y * 0.5f;
        float sx    = Mathf.Sin(hFOV);
        float sy    = Mathf.Sin(vFOV);

        plane[0] = new Plane(Vector3.zero, farZ * new Vector3(sx, sy, 1f), farZ * new Vector3(sx, -sy, 1f));                        // right
        plane[1] = new Plane(Vector3.zero, farZ * new Vector3(-sx, -sy, 1f), farZ * new Vector3(-sx, sy, 1f));                      // left
        plane[2] = new Plane(Vector3.zero, farZ * new Vector3(-sx, sy, 1f), farZ * new Vector3(sx, sy, 1f));                        // top
        plane[3] = new Plane(Vector3.zero, farZ * new Vector3(sx, -sy, 1f), farZ * new Vector3(-sx, -sy, 1f));                      // bottom
        plane[4] = new Plane(farZ * new Vector3(sx, sy, 1f), farZ * new Vector3(-sx, sy, 1f), farZ * new Vector3(-sx, -sy, 1f));    // far
        plane[5] = new Plane(nearZ * new Vector3(-sx, -sy, 1f), nearZ * new Vector3(-sx, sy, 1f), nearZ * new Vector3(sx, sy, 1f)); // near
    }
    // called after camera finishes rendering scene
    // render tracker frustum
    void OnRenderObject()
    {
        if (!OVRManager.tracker.isPresent)
        {
            return;
        }

        OVRTracker.Frustum frustum = OVRManager.tracker.GetFrustum();
        float nearZ = frustum.nearZ;
        float farZ  = frustum.farZ;
        float hFOV  = Mathf.Deg2Rad * frustum.fov.x * 0.5f;
        float vFOV  = Mathf.Deg2Rad * frustum.fov.y * 0.5f;
        float sx    = Mathf.Sin(hFOV);
        float sy    = Mathf.Sin(vFOV);

        // get tracker pose
        OVRPose   trackerPose = OVRManager.tracker.GetPose(0);
        Matrix4x4 trackerMat  = Matrix4x4.TRS(trackerPose.position, trackerPose.orientation, Vector3.one);

        // draw frustum
        GL.PushMatrix();
        GL.MultMatrix(trackerMat);

        Color col = OVRManager.tracker.isPositionTracked ? color : errorColor;

        // draw camera
        if (drawCamera)
        {
            cameraDisplaySize = Mathf.Clamp(cameraDisplaySize, 0.0f, nearZ);
            DrawFrustumWire(sx, sy, 0.0f, cameraDisplaySize, lineMaterial, col);
        }

        if (wireframe)
        {
            DrawFrustumWire(sx, sy, nearZ, farZ, lineMaterial, col);
        }
        else
        {
            DrawFrustumSolid(sx, sy, nearZ, farZ, solidMaterial, col);
        }

        GL.PopMatrix();
    }