Beispiel #1
0
    /// <summary>
    /// Enables the yaw correction.
    /// </summary>
    void EnableYawCorrection()
    {
        OVRDevice.EnableMagYawCorrection(true);

        Quaternion q = Quaternion.identity;

        if ((CameraController != null) && (CameraController.PredictionOn == true))
        {
            OVRDevice.GetPredictedOrientation(ref q);
        }
        else
        {
            OVRDevice.GetOrientation(ref q);
        }

        CurEulerRef = q.eulerAngles;
    }
Beispiel #2
0
    // UpdateGeometry
    public void UpdateGeometry()
    {
        if (MagShowGeometry == false)
        {
            return;
        }
        if (CameraController == null)
        {
            return;
        }
        if ((GeometryReference == null) || (GeometryCompass == null))
        {
            return;
        }

        // All set, we can update the geometry with camera and positon values
        Quaternion q = Quaternion.identity;

        if ((CameraController != null) && (CameraController.PredictionOn == true))
        {
            OVRDevice.GetPredictedOrientation(0, ref q);
        }
        else
        {
            OVRDevice.GetOrientation(0, ref q);
        }

        Vector3 v = GeometryCompass.transform.localEulerAngles;

        v.y = -q.eulerAngles.y + CurEulerRef.y;
        GeometryCompass.transform.localEulerAngles = v;

        // Set the color of the marker to red if we are calibrating
        if (GeometryReferenceMarkMat != null)
        {
            Color c = Color.green;

            if (OVRDevice.IsMagYawCorrectionInProgress(0) == true)
            {
                c = Color.red;
            }

            GeometryReferenceMarkMat.SetColor("_Color", c);
        }
    }
Beispiel #3
0
    public void Shoot(string mes, ApplyShootFunc applyFunc)
    {
        OVRDevice.GetPredictedOrientation(0, ref OVR_angle);
        var shootWorldQuat = PlayerObj.transform.rotation;

        GameObject createBlock = (GameObject)Instantiate(this.BlockPrefab, PlayerObj.transform.position, shootWorldQuat);


        createBlock.SendMessage("ChangeText", mes);
        if (Random.Range(0, 3) == 0)
        {
            createBlock.SendMessage("ChangeColor", new Color(1.0f, 0.0f, 0.0f));
        }
        createBlock.rigidbody.velocity = shootWorldQuat * new Vector3(0, 5, ShootPow);

        if (null != applyFunc)
        {
            applyFunc(createBlock);
        }

//		this.gameObject.rigidbody.velocity=new Vector3(ShootPow,0,0);
    }
Beispiel #4
0
    // UpdateMagYawDriftCorrection
    public void UpdateMagYawDriftCorrection()
    {
        if (Input.GetKeyDown(KeyCode.Z) == true)
        {
            if (MagCalState == MagCalibrationState.MagDisabled)
            {
                // Start calibration process
                if (MagAutoCalibrate == true)
                {
                    OVRDevice.BeginMagAutoCalibration(0);
                    MagCalState = MagCalibrationState.MagCalibrating;
                }
                else
                {
                    // Go to pre-manual calibration state (to allow for
                    // setting refrence point)
                    MagCalState = MagCalibrationState.MagManualGetReady;
                    return;
                }
            }
            else if (MagCalState == MagCalibrationState.MagManualGetReady)
            {
                OVRDevice.SetMagReference(0);
                OVRDevice.EnableMagYawCorrection(0, true);

                Quaternion q = Quaternion.identity;
                if ((CameraController != null) && (CameraController.PredictionOn == true))
                {
                    OVRDevice.GetPredictedOrientation(0, ref q);
                }
                else
                {
                    OVRDevice.GetOrientation(0, ref q);
                }

                CurEulerRef = q.eulerAngles;

                // Begin manual calibration
                OVRDevice.BeginMagManualCalibration(0);
                MagCalState = MagCalibrationState.MagCalibrating;
            }
            else
            {
                // Reset calibration process
                if (MagAutoCalibrate == true)
                {
                    OVRDevice.StopMagAutoCalibration(0);
                }
                else
                {
                    OVRDevice.StopMagManualCalibration(0);
                }

                OVRDevice.EnableMagYawCorrection(0, false);

                MagCalState = MagCalibrationState.MagDisabled;

                // Do not show geometry
                MagShowGeometry = false;
                ShowGeometry(MagShowGeometry);

                return;
            }
        }


        // Check to see if calibration is completed
        if (MagCalState == MagCalibrationState.MagCalibrating)
        {
            if (MagAutoCalibrate == true)
            {
                OVRDevice.UpdateMagAutoCalibration(0);
            }
            else
            {
                OVRDevice.UpdateMagManualCalibration(0);
            }

            if (OVRDevice.IsMagCalibrated(0) == true)
            {
                if (MagAutoCalibrate == true)
                {
                    MagCalState = MagCalibrationState.MagCalibrated;
                }
                else
                {
                    // Manual Calibration take account of having set the
                    // reference orientation.
                    MagCalState = MagCalibrationState.MagReady;
                }
            }
        }

        // If we are calibrated, we will set mag reference and
        // enable yaw correction on a buton press
        if ((MagCalState == MagCalibrationState.MagCalibrated) ||
            (MagCalState == MagCalibrationState.MagReady))
        {
            if (Input.GetKeyDown(KeyCode.X) == true)
            {
                OVRDevice.SetMagReference(0);
                OVRDevice.EnableMagYawCorrection(0, true);
                MagCalState = MagCalibrationState.MagReady;

                Quaternion q = Quaternion.identity;
                if ((CameraController != null) && (CameraController.PredictionOn == true))
                {
                    OVRDevice.GetPredictedOrientation(0, ref q);
                }
                else
                {
                    OVRDevice.GetOrientation(0, ref q);
                }

                CurEulerRef = q.eulerAngles;
            }

            if ((MagCalState == MagCalibrationState.MagReady) &&
                (Input.GetKeyDown(KeyCode.F6)))
            {
                // Toggle showing geometry either on or off
                if (MagShowGeometry == false)
                {
                    MagShowGeometry = true;
                    ShowGeometry(MagShowGeometry);
                }
                else
                {
                    MagShowGeometry = false;
                    ShowGeometry(MagShowGeometry);
                }
            }

            UpdateGeometry();
        }
    }
    // SetCameraOrientation
    void SetCameraOrientation()
    {
        Quaternion q   = Quaternion.identity;
        Vector3    dir = Vector3.forward;

        // Main camera has a depth of 0, so it will be rendered first
        if (gameObject.camera.depth == 0.0f)
        {
            // If desired, update parent transform y rotation here
            // This is useful if we want to track the current location of
            // of the head.
            // TODO: Future support for x and z, and possibly change to a quaternion
            // NOTE: This calculation is one frame behind
            if (CameraController.TrackerRotatesY == true)
            {
                Vector3 a = gameObject.camera.transform.rotation.eulerAngles;
                a.x = 0;
                a.z = 0;
                gameObject.transform.parent.transform.eulerAngles = a;
            }

            /*
             * else
             * {
             *      // We will still rotate the CameraController in the y axis
             *      // based on the fact that we have a Y rotation being passed
             *      // in from above that still needs to take place (this functionality
             *      // may be better suited to be calculated one level up)
             *      Vector3 a = Vector3.zero;
             *      float y = 0.0f;
             *      CameraController.GetYRotation(ref y);
             *      a.y = y;
             *      gameObject.transform.parent.transform.eulerAngles = a;
             * }
             */
            // Read shared data from CameraController
            if (CameraController != null)
            {
                // Read sensor here (prediction on or off)
                if (CameraController.PredictionOn == false)
                {
                    OVRDevice.GetOrientation(0, ref CameraOrientation);
                }
                else
                {
                    OVRDevice.GetPredictedOrientation(0, ref CameraOrientation);
                }
            }

            // This needs to go as close to reading Rift orientation inputs
            OVRDevice.ProcessLatencyInputs();
        }

        // Calculate the rotation Y offset that is getting updated externally
        // (i.e. like a controller rotation)
        float yRotation = 0.0f;

        CameraController.GetYRotation(ref yRotation);
        q   = Quaternion.Euler(0.0f, yRotation, 0.0f);
        dir = q * Vector3.forward;
        q.SetLookRotation(dir, Vector3.up);

        // Multiply the camera controllers offset orientation (allow follow of orientation offset)
        Quaternion orientationOffset = Quaternion.identity;

        CameraController.GetOrientationOffset(ref orientationOffset);
        q = orientationOffset * q;

        // Multiply in the current HeadQuat (q is now the latest best rotation)
        if (CameraController != null)
        {
            q = q * CameraOrientation;
        }

        // * * *
        // Update camera rotation
        gameObject.camera.transform.rotation = q;

        // * * *
        // Update camera position (first add Offset to parent transform)
        gameObject.camera.transform.position =
            gameObject.camera.transform.parent.transform.position + NeckPosition;

        // Adjust neck by taking eye position and transforming through q
        gameObject.camera.transform.position += q * EyePosition;
    }
    // Update
    new public virtual void Update()
    {
        base.Update();

        // Test: get Y from sensor 2
        if (OVRDevice.SensorCount == 2)
        {
            Quaternion q = Quaternion.identity;
            OVRDevice.GetPredictedOrientation(1, ref q);
            YfromSensor2 = q.eulerAngles.y;
        }

        UpdateMovement();

        Vector3 moveDirection = Vector3.zero;

        float motorDamp = (1.0f + (Damping * DeltaTime));

        MoveThrottle.x /= motorDamp;
        MoveThrottle.y  = (MoveThrottle.y > 0.0f) ? (MoveThrottle.y / motorDamp) : MoveThrottle.y;
        MoveThrottle.z /= motorDamp;

        moveDirection += MoveThrottle * DeltaTime;

        // Gravity
        if (Controller.isGrounded && FallSpeed <= 0)
        {
            FallSpeed = ((Physics.gravity.y * (GravityModifier * 0.002f)));
        }
        else
        {
            FallSpeed += ((Physics.gravity.y * (GravityModifier * 0.002f)) * DeltaTime);
        }

        moveDirection.y += FallSpeed * DeltaTime;

        // Offset correction for uneven ground
        float bumpUpOffset = 0.0f;

        if (Controller.isGrounded && MoveThrottle.y <= 0.001f)
        {
            bumpUpOffset = Mathf.Max(Controller.stepOffset,
                                     new Vector3(moveDirection.x, 0, moveDirection.z).magnitude);
            moveDirection -= bumpUpOffset * Vector3.up;
        }

        Vector3 predictedXZ = Vector3.Scale((Controller.transform.localPosition + moveDirection),
                                            new Vector3(1, 0, 1));

        // Move contoller
        Controller.Move(moveDirection);

        Vector3 actualXZ = Vector3.Scale(Controller.transform.localPosition, new Vector3(1, 0, 1));

        if (predictedXZ != actualXZ)
        {
            MoveThrottle += (actualXZ - predictedXZ) / DeltaTime;
        }

        // Update rotation using CameraController transform, possibly proving some rules for
        // sliding the rotation for a more natural movement and body visual
        //UpdatePlayerForwardDirTransform();
    }
Beispiel #7
0
    // SetCameraOrientation
    void SetCameraOrientation()
    {
        Quaternion q   = Quaternion.identity;
        Vector3    dir = Vector3.forward;

        // Main camera has a depth of 0, so it will be rendered first
        if (gameObject.camera.depth == 0.0f)
        {
            // If desired, update parent transform y rotation here
            // This is useful if we want to track the current location of
            // of the head.
            // TODO: Future support for x and z, and possibly change to a quaternion
            if (CameraController.TrackerRotatesY == true)
            {
                Vector3 a = gameObject.camera.transform.rotation.eulerAngles;
                a.x = 0;
                a.z = 0;
                gameObject.transform.parent.transform.eulerAngles = a;
            }

            // Read shared data from CameraController
            if (CameraController != null)
            {
                Quaternion DirQ = Quaternion.identity;

                // Read sensor here (prediction on or off)
                if (CameraController.PredictionOn == false)
                {
                    OVRDevice.GetOrientation(ref DirQ);
                }
                else
                {
                    OVRDevice.GetPredictedOrientation(ref DirQ);
                }

                CameraController.SetSharedOrientation(DirQ);
            }

            // This needs to go as close to reading Rift orientation inputs
            OVRDevice.ProcessLatencyInputs();
        }

        // Calculate the rotation Y offset that is getting updated externally
        // (i.e. like a controller rotation)
        float yRotation = 0.0f;

        CameraController.GetYRotation(ref yRotation);
        q   = Quaternion.Euler(0.0f, yRotation, 0.0f);
        dir = q * Vector3.forward;
        q.SetLookRotation(dir, Vector3.up);

        // Multiply the camera controllers offset orientation (allow follow of orientation offset)
        Quaternion orientationOffset = Quaternion.identity;

        CameraController.GetOrientationOffset(ref orientationOffset);
        q = orientationOffset * q;

        // Multiply in the current HeadQuat (q is now the latest best rotation)
        if (CameraController != null)
        {
            Quaternion DirQ = Quaternion.identity;
            CameraController.GetSharedOrientation(ref DirQ);
            q = q * DirQ;
        }

        // * * *
        // Update camera rotation
        gameObject.camera.transform.rotation = q;

        // * * *
        // Update camera position (first add Offset to parent transform)
        gameObject.camera.transform.position =
            gameObject.camera.transform.parent.transform.position + NeckPosition;

        // Adjust neck by taking eye position and transforming through q
        gameObject.camera.transform.position += q * EyePosition;
    }
Beispiel #8
0
    // SetCameraOrientation
    void SetCameraOrientation()
    {
        Quaternion q   = Quaternion.identity;
        Vector3    dir = Vector3.forward;

        // Main camera has a depth of 0, so it will be rendered first
        if (gameObject.camera.depth == 0.0f)
        {
            // If desired, update parent transform y rotation here
            // This is useful if we want to track the current location of
            // of the head.
            // TODO: Future support for x and z, and possibly change to a quaternion
            if (SetParentYRotation == true)
            {
                Vector3 a = gameObject.camera.transform.rotation.eulerAngles;
                a.x = 0;
                a.z = 0;
                gameObject.transform.parent.transform.eulerAngles = a;
            }

            // Read sensor here (prediction on or off)
            if (PredictionOn == false)
            {
                OVRDevice.GetOrientation(ref DirQ);
            }
            else
            {
                OVRDevice.GetPredictedOrientation(ref DirQ);
            }

            // This needs to go as close to reading Rift orientation inputs
            OVRDevice.ProcessLatencyInputs();
        }

        // Calculate the rotation Y offset that is getting updated externally
        // (i.e. like a controller rotation)
        q   = Quaternion.Euler(0.0f, YRotation, 0.0f);
        dir = q * Vector3.forward;
        q.SetLookRotation(dir, Vector3.up);

        // Multiply the offset orientation first
        q = OrientationOffset * q;

        // Multiply in the current HeadQuat (q is now the latest best rotation)
        q = q * DirQ;

        // * * *
        // Update camera rotation
        gameObject.camera.transform.rotation = q;

        // * * *
        // Update camera position (first add Offset to parent transform)
        gameObject.camera.transform.position =
            gameObject.camera.transform.parent.transform.position + NeckPosition;

        // Adjust neck by taking eye position and transforming through q
        gameObject.camera.transform.position += q * EyePosition;

        // PGG Alternate calculation for above...
        //Vector3 EyePositionNoX = EyePosition; EyePositionNoX.x = 0.0f;
        //gameObject.camera.transform.position += q * EyePositionNoX;
        //gameObject.camera.ResetWorldToCameraMatrix();
        //Matrix4x4 m = camera.worldToCameraMatrix;
        //Matrix4x4 tm = Matrix4x4.identity;
        //tm.SetColumn (3, new Vector4 (-EyePosition.x, 0.0f, 0.0f, 1));
        //gameObject.camera.worldToCameraMatrix  = tm * m;
    }