예제 #1
0
    ///////////////////////////////////////////////////////////
    // VISION FUNCTIONS
    ///////////////////////////////////////////////////////////

    /// <summary>
    /// Mainly to be used to reset camera position orientation
    /// camOffset will move the center eye position to an optimal location
    /// clampX/Y/Z will zero out the offset that is used (restricts offset in a given axis)
    /// </summary>
    /// <param name="camOffset">Cam offset.</param>
    /// <param name="clampX">If set to <c>true</c> clamp x.</param>
    /// <param name="clampY">If set to <c>true</c> clamp y.</param>
    /// <param name="clampZ">If set to <c>true</c> clamp z.</param>
    static public void ResetCameraPositionOrientation(ref Vector3 camOffset,
                                                      bool clampX, bool clampY, bool clampZ)
    {
        Vector3    camPos = Vector3.zero;
        Quaternion camO   = Quaternion.identity;

        OVRDevice.GetCameraPositionOrientation(ref camPos, ref camO);

        // Set position offset
        CameraPositionOffset = camPos;

        // restrict offset in the desired axis
        if (clampX == true)
        {
            CameraPositionOffset.x = 0.0f;
        }
        if (clampY == true)
        {
            CameraPositionOffset.y = 0.0f;
        }
        if (clampZ == true)
        {
            CameraPositionOffset.z = 0.0f;
        }

        // Adjust for optimal offset from zero (for eye position from neck etc.)
        CameraPositionOffset -= camOffset;
    }
예제 #2
0
    /// <summary>
    /// Mainly to be used to reset camera position orientation
    /// camOffset will move the center eye position to an optimal location
    /// clampX/Y/Z will zero out the offset that is used (restricts offset in a given axis)
    /// </summary>
    /// <param name="posScale">Scale for positional change.</param>
    /// <param name="posOffset">Positional offset.</param>
    /// <param name="posOffset">Positional offset.</param>
    /// <param name="posOffset">Positional offset.</param>
    static public void ResetCameraPositionOrientation(Vector3 posScale, Vector3 posOffset, Vector3 ortScale, Vector3 ortOffset)
    {
        Vector3    camPos = Vector3.zero;
        Quaternion camO   = Quaternion.identity;

        OVRDevice.GetCameraPositionOrientation(ref camPos, ref camO, OVRDevice.PredictionTime);

        CameraPositionOffset = Vector3.Scale(camPos, posScale) - posOffset;

        Vector3 euler = Quaternion.Inverse(camO).eulerAngles;

        CameraOrientationOffset = Quaternion.Euler(Vector3.Scale(euler, ortScale) - ortOffset);
    }
    /// <summary>
    /// Enables the yaw correction.
    /// </summary>
    void EnableYawCorrection()
    {
        OVRDevice.EnableMagYawCorrection(true);

        // All set, we can update the geometry with camera and positon values
        Quaternion q = Quaternion.identity;
        Vector3    o = Vector3.zero;         // This is not used

        if (CameraController != null)
        {
            OVRDevice.GetCameraPositionOrientation(ref o, ref q);
        }

        CurEulerRef = q.eulerAngles;
    }
    /// <summary>
    /// Updates the geometry.
    /// </summary>
    public void UpdateGeometry()
    {
        if (MagShowGeometry == false)
        {
            return;
        }
        if (CameraController == null)
        {
            return;
        }
        if ((GeometryReference == null) || (GeometryCompass == null))
        {
            return;
        }

        // All set, we can update the geometry with camera and positon values
        Quaternion q = Quaternion.identity;
        Vector3    o = Vector3.zero;         // This is not used

        if (CameraController != null)
        {
            OVRDevice.GetCameraPositionOrientation(ref o, ref q);
        }

        Vector3 v = GeometryCompass.transform.localEulerAngles;

        v.y = -q.eulerAngles.y + CurEulerRef.y;
        GeometryCompass.transform.localEulerAngles = v;

        // Set the color of the marker to red if we are calibrating
        if (GeometryReferenceMarkMat != null)
        {
            Color c = Color.red;

            GeometryReferenceMarkMat.SetColor("_Color", c);
        }
    }
예제 #5
0
    /// <summary>
    /// Sets the camera orientation.
    /// </summary>
    void SetCameraOrientation()
    {
#if (!UNITY_ANDROID || UNITY_EDITOR)
        // Main camera has a depth of 0, so it will be rendered first
        if (camera.depth == 0.0f)
        {
            // If desired, update parent transform y rotation here
            // This is useful if we want to track the current location of
            // of the head.
            // TODO: Future support for x and z, and possibly change to a quaternion
            // NOTE: This calculation is one frame behind
            if (CameraController.TrackerRotatesY == true)
            {
                Vector3 a = camera.transform.rotation.eulerAngles;
                a.x = 0;
                a.z = 0;
                transform.parent.transform.eulerAngles = a;
            }

            /*
             * else
             * {
             *      // We will still rotate the CameraController in the y axis
             *      // based on the fact that we have a Y rotation being passed
             *      // in from above that still needs to take place (this functionality
             *      // may be better suited to be calculated one level up)
             *      Vector3 a = Vector3.zero;
             *      float y = 0.0f;
             *      CameraController.GetYRotation(ref y);
             *      a.y = y;
             *      gameObject.transform.parent.transform.eulerAngles = a;
             * }
             */

            // Get camera orientation and position from vision
            Quaternion camOrt = Quaternion.identity;
            Vector3    camPos = Vector3.zero;
            OVRDevice.GetCameraPositionOrientation(ref camPos, ref camOrt);

            if (CameraController.EnablePosition)
            {
                CameraPosition = camPos;
            }

            bool useOrt = (CameraController.EnableOrientation && !(CameraController.TimeWarp && CameraController.FreezeTimeWarp));
            if (useOrt)
            {
                SampleStartRotation();
                CameraOrientation = camOrt;
            }

            // This needs to go as close to reading Rift orientation inputs
            OVRDevice.ProcessLatencyInputs();
        }

        // Calculate the rotation Y offset that is getting updated externally
        // (i.e. like a controller rotation)
        float yRotation = 0.0f;
        float xRotation = 0.0f;
        CameraController.GetYRotation(ref yRotation);
        CameraController.GetXRotation(ref xRotation);
        Quaternion qp  = Quaternion.Euler(xRotation, yRotation, 0.0f);
        Vector3    dir = qp * Vector3.forward;
        qp.SetLookRotation(dir, Vector3.up);

        // Multiply the camera controllers offset orientation (allow follow of orientation offset)
        Quaternion orientationOffset = Quaternion.identity;
        CameraController.GetOrientationOffset(ref orientationOffset);
        qp = orientationOffset * qp * CameraOrientationOffset;

        // Multiply in the current HeadQuat (q is now the latest best rotation)
        Quaternion q = qp * CameraOrientation;

        // * * *
        // Update camera rotation
        camera.transform.rotation = q;

        // * * *
        // Update camera position (first add Offset to parent transform)
        camera.transform.localPosition = NeckPosition;

        // Adjust neck by taking eye position and transforming through q
        // Get final camera position as well as the clipping difference
        // (to allow for absolute location of center of camera grid space)
        Vector3 newCamPos = Vector3.zero;
        CameraPositionOffsetAndClip(ref CameraPosition, ref newCamPos);

        // Update list of game objects with new CameraOrientation / newCamPos here
        // For example, this location is used to update the GridCube
        foreach (OVRCameraGameObject obj in CameraLocalSetList)
        {
            if (obj.CameraController.GetCameraDepth() == camera.depth)
            {
                // Initial difference
                Vector3 newPos = -(qp * CameraPositionOffset);
                // Final position
                newPos += camera.transform.position;

                // Set the game object info
                obj.CameraGameObject.transform.position = newPos;
                obj.CameraGameObject.transform.rotation = qp;
            }
        }

        // Adjust camera position with offset/clipped cam location
        camera.transform.localPosition += Quaternion.Inverse(camera.transform.parent.rotation) * qp * newCamPos;

        // PGG: Call delegate function with new CameraOrientation / newCamPos here
        // This location will be used to update the arrow pointer
        // NOTE: Code below might not be needed. Please Look at OVRVisionGuide for potential
        // location for arrow pointer

/*
 *              // This will set the orientation of the arrow
 *              if (camera.depth == 2.0f)
 *              {
 *                      // Set the location of the top node to follow the camera
 *                      OVRMainMenu.PointerSetPosition(camera.transform.position);
 *                      OVRMainMenu.PointerSetOrientation(camera.transform.rotation);
 *                      Quaternion foo = Quaternion.LookRotation(-CameraPosition);
 *                      OVRMainMenu.PointerRotatePointerGeometry(qp * foo);
 *              }
 */
        // move eyes out by x (IPD)
        Vector3 newEyePos = Vector3.zero;
        newEyePos.x = EyePosition.x;
        camera.transform.localPosition += camera.transform.localRotation * newEyePos;
#else
        // NOTE: On Android, camera orientation is set from OVRCameraController Update()
#endif
    }