상속: MonoBehaviour
예제 #1
0
    /// <summary>
    /// Configures the camera.
    /// </summary>
    /// <returns><c>true</c>, if camera was configured, <c>false</c> otherwise.</returns>
    /// <param name="camera">Camera.</param>
    /// <param name="eyePositionOffset">Eye position offset.</param>
    void ConfigureCamera(Camera camera, float eyePositionOffset)
    {
        OVRCamera cam = camera.GetComponent <OVRCamera>();

        // Always set  camera fov and aspect ratio
        camera.fieldOfView = VerticalFOV;
        camera.aspect      = AspectRatio;

        // Background color
        camera.backgroundColor = BackgroundColor;

        // Clip Planes
        camera.nearClipPlane = NearClipPlane;
        camera.farClipPlane  = FarClipPlane;

#if OVR_USE_PROJ_MATRIX
        // Projection Matrix
        Matrix4x4 camMat = Matrix4x4.identity;
        OVRDevice.GetCameraProjection(cam.EyeId, NearClipPlane, FarClipPlane, ref camMat);
        camera.projectionMatrix = camMat;
        OVR_ForceSymmetricProj(false);
#else
        OVR_ForceSymmetricProj(true);
#endif

        // Set camera variables that pertain to the neck and eye position
        // NOTE: We will want to add a scale vlue here in the event that the player
        // grows or shrinks in the world. This keeps head modelling behaviour
        // accurate
        cam.NeckPosition = NeckPosition;
        cam.EyePosition  = new Vector3(eyePositionOffset, 0f, 0f);
    }
    /// <summary>
    /// Update this instance.
    /// </summary>
    void Update()
    {
        Vector3 absVisionCam = Vector3.zero;

        OVRCamera.GetAbsoluteCameraFromVisionPosition(ref absVisionCam);
        Vector3 relVisionCam = Vector3.zero;

        OVRCamera.GetRelativeCameraFromVisionPosition(ref relVisionCam);

/*
 *              Debug.LogWarning(System.String.Format("ABS: {0:F2} {1:F2} {2:F2}",
 *                                                    absVisionCam.x, absVisionCam.y, absVisionCam.z));
 *              Debug.LogWarning(System.String.Format("REL: {0:F2} {1:F2} {2:F2}",
 *                                                    relVisionCam.x, relVisionCam.y, relVisionCam.z));
 */
        // R will reset the orientation based on player input ('R' key)
        UpdateResetOrientation();
        // Fade screen out based on location of relative Vision Camera
        UpdateFadeValueFromRelCamPosition(ref relVisionCam);

        if (Input.GetKeyDown(KeyCode.T))
        {
            CameraController.TimeWarp = !CameraController.TimeWarp;
        }

        if (Input.GetKeyDown(KeyCode.F))
        {
            CameraController.FreezeTimeWarp = !CameraController.FreezeTimeWarp;
        }
    }
예제 #3
0
 // Use this for initialization
 void Start()
 {
     cameraRight = GameObject.Find ("CameraRight").GetComponent<OVRCamera> ();
     rabbit = GameObject.Find ("rabbit_doll_holder").GetComponent<CollectableItem> ();
     //		Debug.Log ("HeadPointer, cameraRight = " + cameraRight);
     //		Debug.Log ("Camera.main = " + Camera.main);
     Debug.Log ("rabbit = " + rabbit);
 }
예제 #4
0
	/// <summary>
	/// Updates the reset orientation.
	/// </summary>
	void UpdateResetOrientation()
	{
		// Reset the view on 'R'
		if (Input.GetKeyDown(KeyCode.R) == true)
		{
			// Reset tracker position.
			OVRCamera.ResetCameraPositionOrientation(Vector3.one, Vector3.zero, Vector3.up, Vector3.zero);
		}
	}
예제 #5
0
    /// <summary>
    /// Updates the cube grid.
    /// </summary>
    void UpdateCubeGrid()
    {
        // Toggle the grid cube display on 'G'
        if (Input.GetKeyDown(KeyCode.G))
        {
            if (CubeGridOn == false)
            {
                CubeGridOn = true;
                Debug.LogWarning("CubeGrid ON");
                if (CubeGrid != null)
                {
                    CubeGrid.SetActive(true);
                }
                else
                {
                    CreateCubeGrid();
                }

                // Add the CameraCubeGrid to the camera list for update
                OVRCamera.AddToLocalCameraSetList(ref CameraCubeGrid);
            }
            else
            {
                CubeGridOn = false;
                Debug.LogWarning("CubeGrid OFF");

                if (CubeGrid != null)
                {
                    CubeGrid.SetActive(false);
                }

                // Remove the CameraCubeGrid from the camera list
                OVRCamera.RemoveFromLocalCameraSetList(ref CameraCubeGrid);
            }
        }

        if (CubeGrid != null)
        {
            // Set cube colors to let user know if camera is tracking
            CubeSwitchColor = !OVRDevice.IsCameraTracking();

            if (CubeSwitchColor != CubeSwitchColorOld)
            {
                CubeGridSwitchColor(CubeSwitchColor);
            }
            CubeSwitchColorOld = CubeSwitchColor;
        }
    }
예제 #6
0
    // Start
    new void Start()
    {
        base.Start();

        // Get the OVRCameraController
        CameraController = gameObject.transform.parent.GetComponent <OVRCameraController>();

        // Get the cameras
        OVRCamera[] cameras = gameObject.transform.parent.GetComponentsInChildren <OVRCamera>();
        for (int i = 0; i < cameras.Length; i++)
        {
            if (cameras[i].name == "CameraRight")
            {
                CameraRight = cameras[i];
            }
        }

        if (CameraController == null)
        {
            Debug.LogWarning("WARNING: OVRCameraController not found!");
        }

        // Set CameraTextureScale (increases the size of the texture we are rendering into
        // for a better pixel match when post processing the image through lens distortion)

        // CameraTextureScale = OVRDevice.DistortionScale();

        // If CameraTextureScale is not 1.0f, create a new texture and assign to target texture
        // Otherwise, fall back to normal camera rendering
        if ((CameraTexture == null) && (CameraTextureScale != 1.0f))
        {
            int w = (int)(Screen.width / 2.0f * CameraTextureScale);
            int h = (int)(Screen.height * CameraTextureScale);

            if (camera.hdr)
            {
                CameraTexture = new RenderTexture(w, h, 24, RenderTextureFormat.ARGBFloat);
            }
            else
            {
                CameraTexture = new RenderTexture(w, h, 24);
            }

            // Use MSAA settings in QualitySettings for new RenderTexture
            CameraTexture.antiAliasing = (QualitySettings.antiAliasing == 0) ? 1 : QualitySettings.antiAliasing;
        }
    }
    /// <summary>
    /// Updates the reset orientation.
    /// </summary>
    void UpdateResetOrientation()
    {
        // Reset the view on 'R'
        if (Input.GetKeyDown(KeyCode.R) == true)
        {
            // Reset tracker position.
            // We assume that the CameraController is at the desired neck location
            Vector3 eyeOffset = Vector3.zero;

            if (CameraController != null)
            {
                CameraController.GetEyeCenterPosition(ref eyeOffset);
            }

            OVRCamera.ResetCameraPositionOrientation(ref eyeOffset, true, false, false);
        }
    }
예제 #8
0
        /// <summary>
        /// sets various parameters on the Oculus scripts
        /// </summary>
        private static void SetOVRParameters(GameObject arCameraGameObject)
        {
#if ENABLE_VUFORIA_OCULUS_INTEGRATION
            QCARAbstractBehaviour qcarBehaviour = arCameraGameObject.GetComponent <QCARAbstractBehaviour>();
            // set black background color
            OVRCameraController ovrCameraController = arCameraGameObject.GetComponent <OVRCameraController>();
            ovrCameraController.BackgroundColor   = Color.black;
            ovrCameraController.FlipCorrectionInY = true;

            // set up cameras as Oculus expects them to be:
            qcarBehaviour.PrimaryCamera.depth = 1;
            OVRCamera leftOVRCamera = qcarBehaviour.PrimaryCamera.GetComponent <OVRCamera>();
            leftOVRCamera.RightEye = false;

            qcarBehaviour.SecondaryCamera.depth = 0;
            OVRCamera rightOVRCamera = qcarBehaviour.SecondaryCamera.GetComponent <OVRCamera>();
            rightOVRCamera.RightEye = true;
#endif
        }
 /// <summary>
 /// Start this instance.
 /// </summary>
 void Start()
 {
     if (CameraController != null)
     {
         // Set the GUI target
         VisionGuide = GameObject.Instantiate(Resources.Load("OVRVisionGuideMessage")) as GameObject;
         // Grab transform of GUI object
         Transform t = VisionGuide.transform;
         // Attach the GUI object to the camera
         CameraController.AttachGameObjectToCamera(ref VisionGuide);
         // Reset the transform values
         OVRUtils.SetLocalTransform(ref VisionGuide, ref t);
         // Deactivate the object
         VisionGuide.SetActive(false);
         // Set layer on object
         VisionGuide.layer = LayerMask.NameToLayer(LayerName);
         // Set initial location of offset to be the players center eye location
         Vector3 eyeOffset = Vector3.zero;
         CameraController.GetEyeCenterPosition(ref eyeOffset);
         OVRCamera.SetCameraPositionOffset(ref eyeOffset);
     }
 }
    /// <summary>
    /// Configures the camera.
    /// </summary>
    /// <returns><c>true</c>, if camera was configured, <c>false</c> otherwise.</returns>
    /// <param name="camera">Camera.</param>
    /// <param name="eyePositionOffset">Eye position offset.</param>
    bool ConfigureCamera(Camera camera, float eyePositionOffset)
    {
#if (!UNITY_ANDROID || UNITY_EDITOR)
        OVRCamera cam = camera.GetComponent <OVRCamera>();

        if (UpdateDistortionDirtyFlag)
        {
            // Always set  camera fov and aspect ration
            camera.fieldOfView = VerticalFOV;
            camera.aspect      = AspectRatio;

            // Push params also into the mesh distortion instance (if there is one)
            OVRLensCorrection lc = camera.GetComponent <OVRLensCorrection>();
            cam.UpdateDistortionMeshParams(ref lc, (camera == CameraRight), FlipCorrectionInY);

            if (!UseCameraTexture && SystemInfo.graphicsDeviceVersion.Contains("Direct3D"))                     // this doesn't work with graphics emulation enabled (ie for Android)
            {
                lc._DMScale = new Vector2(lc._DMScale.x, -lc._DMScale.y);
            }
        }

        if (UpdateCamerasDirtyFlag)
        {
            // Background color
            camera.backgroundColor = BackgroundColor;

            // Clip Planes
            camera.nearClipPlane = NearClipPlane;
            camera.farClipPlane  = FarClipPlane;

            // Set camera variables that pertain to the neck and eye position
            // NOTE: We will want to add a scale value here in the event that the player
            // grows or shrinks in the world. This keeps head modelling behaviour
            // accurate
            cam.NeckPosition = NeckPosition;
            cam.EyePosition  = new Vector3(eyePositionOffset, 0f, 0f);
        }
#else
        // Camera.targetTexture will be set each frame in Update to a different buffer
        // to allow overlap with async time warp.
        camera.fieldOfView = 90.0f;
        camera.aspect      = 1.0f;
        camera.rect        = new Rect(0, 0, 1, 1);      // Does this matter when targetTexture is set?

        if (UpdateCamerasDirtyFlag)
        {
            // Background color
            camera.backgroundColor = BackgroundColor;

            // Clip Planes
            camera.nearClipPlane = NearClipPlane;
            camera.farClipPlane  = FarClipPlane;

            // If we don't clear the color buffer with a glClear, tiling GPUs
            // will be forced to do an "unresolve" and read back the color buffer information.
            // The clear is free on PowerVR, and possibly Mali, but it is a performance cost
            // on Adreno, and we would be better off if we had the ability to discard/invalidate
            // the color buffer instead of clearing.

            // NOTE: The color buffer is not being invalidated in skybox mode, forcing an additional,
            // wasted color buffer read before the skybox is drawn.
            camera.clearFlags = (HasSkybox) ? CameraClearFlags.Skybox : CameraClearFlags.SolidColor;
        }
#endif

        return(true);
    }
예제 #11
0
    /// <summary>
    /// Start this instance.
    /// </summary>
    void Start()
    {
        AlphaFadeValue = 1.0f;
        CurrentLevel   = 0;
        PrevStartDown  = false;
        PrevHatDown    = false;
        PrevHatUp      = false;
        ShowVRVars     = false;
        OldSpaceHit    = false;
        strFPS         = "FPS: 0";
        LoadingLevel   = false;
        ScenesVisible  = false;

        // Ensure that camera controller variables have been properly
        // initialized before we start reading them
        if (CameraController != null)
        {
            CameraController.InitCameraControllerVariables();
        }

        // Set the GUI target
        GUIRenderObject = GameObject.Instantiate(Resources.Load("OVRGUIObjectMain")) as GameObject;

        if (GUIRenderObject != null)
        {
            // Chnge the layer
            GUIRenderObject.layer = LayerMask.NameToLayer(LayerName);

            if (GUIRenderTexture == null)
            {
                int w = Screen.width;
                int h = Screen.height;

                // We don't need a depth buffer on this texture
                GUIRenderTexture = new RenderTexture(w, h, 0);
                GuiHelper.SetPixelResolution(w, h);
                // NOTE: All GUI elements are being written with pixel values based
                // from DK1 (1280x800). These should change to normalized locations so
                // that we can scale more cleanly with varying resolutions
                //GuiHelper.SetDisplayResolution(OVRDevice.HResolution,
                //								 OVRDevice.VResolution);
                GuiHelper.SetDisplayResolution(1280.0f, 800.0f);
            }
        }

        // Attach GUI texture to GUI object and GUI object to Camera
        if (GUIRenderTexture != null && GUIRenderObject != null)
        {
            GUIRenderObject.renderer.material.mainTexture = GUIRenderTexture;

            if (CameraController != null)
            {
                // Grab transform of GUI object
                Transform t = GUIRenderObject.transform;
                // Attach the GUI object to the camera
                CameraController.AttachGameObjectToCamera(ref GUIRenderObject);
                // Reset the transform values (we will be maintaining state of the GUI object
                // in local state)
                OVRUtils.SetLocalTransform(ref GUIRenderObject, ref t);
                // Deactivate object until we have completed the fade-in
                // Also, we may want to deactive the render object if there is nothing being rendered
                // into the UI
                // we will move the position of everything over to the left, so get
                // IPD / 2 and position camera towards negative X
                Vector3 lp  = GUIRenderObject.transform.localPosition;
                float   ipd = 0.0f;
                CameraController.GetIPD(ref ipd);
                lp.x -= ipd * 0.5f;
                GUIRenderObject.transform.localPosition = lp;

                GUIRenderObject.SetActive(false);
            }
        }

        // Save default values initially
        StoreSnapshot("DEFAULT");

        // Make sure to hide cursor
        if (Application.isEditor == false)
        {
            Screen.showCursor = false;
            Screen.lockCursor = true;
        }

        // Add delegates to update; useful for ordering menu tasks, if required
        UpdateFunctions += UpdateFPS;

        // CameraController updates
        if (CameraController != null)
        {
            UpdateFunctions += UpdateIPD;
            UpdateFunctions += UpdatePrediction;
            // Set LPM on by default
            UpdateFunctions += UpdateLowPersistanceMode;
            OVRDevice.SetLowPersistanceMode(LowPersistanceMode);
            UpdateFunctions += UpdateVisionMode;
            UpdateFunctions += UpdateFOV;
            UpdateFunctions += UpdateEyeHeightOffset;

            // Add a GridCube component to this object
            GridCube = gameObject.AddComponent <OVRGridCube>();
            GridCube.SetOVRCameraController(ref CameraController);

            // Add a VisionGuide component to this object
            VisionGuide = gameObject.AddComponent <OVRVisionGuide>();
            VisionGuide.SetOVRCameraController(ref CameraController);
            VisionGuide.SetFadeTexture(ref FadeInTexture);
            VisionGuide.SetVisionGuideLayer(ref LayerName);
        }

        // PlayerController updates
        if (PlayerController != null)
        {
            UpdateFunctions += UpdateSpeedAndRotationScaleMultiplier;
            UpdateFunctions += UpdatePlayerControllerMovement;
        }

        // MainMenu updates
        UpdateFunctions += UpdateSelectCurrentLevel;
        UpdateFunctions += UpdateHandleSnapshots;

        // Device updates
        UpdateFunctions += UpdateDeviceDetection;

        // Add a callback to detect device detection
        OVRMessenger.AddListener <Device, bool>("Sensor_Attached", UpdateDeviceDetectionMsgCallback);

        // Mag Yaw-Drift correction
        // We will test to see if we are already calibrated by the
        // Calibration tool
        MagCal.SetInitialCalibarationState();
        UpdateFunctions += MagCal.UpdateMagYawDriftCorrection;
        MagCal.SetOVRCameraController(ref CameraController);

        // Crosshair functionality
        Crosshair.Init();
        Crosshair.SetCrosshairTexture(ref CrosshairImage);
        Crosshair.SetOVRCameraController(ref CameraController);
        Crosshair.SetOVRPlayerController(ref PlayerController);
        UpdateFunctions += Crosshair.UpdateCrosshair;

        // Check for HMD and sensor
        CheckIfRiftPresent();

        // Reset tracker position.
        // We assume that the CameraController is at the desired neck location
        Vector3 eyeOffset = Vector3.zero;

        if (CameraController != null)
        {
            CameraController.GetEyeCenterPosition(ref eyeOffset);
        }

        OVRCamera.ResetCameraPositionOrientation(ref eyeOffset, true, false, false);
    }
예제 #12
0
    // Start
    new void Start()
    {
        base.Start ();

        // Get the OVRCameraController
        CameraController = gameObject.transform.parent.GetComponent<OVRCameraController>();

        // Get the cameras
        OVRCamera[] cameras = gameObject.transform.parent.GetComponentsInChildren<OVRCamera>();
        for (int i = 0; i < cameras.Length; i++)
        {
            if(cameras[i].name == "CameraRight")
                CameraRight = cameras[i];
        }

        if(CameraController == null)
            Debug.LogWarning("WARNING: OVRCameraController not found!");

        // Set CameraTextureScale (increases the size of the texture we are rendering into
        // for a better pixel match when post processing the image through lens distortion)

        // CameraTextureScale = OVRDevice.DistortionScale();

        // If CameraTextureScale is not 1.0f, create a new texture and assign to target texture
        // Otherwise, fall back to normal camera rendering
        if((CameraTexture == null) && (CameraTextureScale != 1.0f))
        {
            int w = (int)(Screen.width / 2.0f * CameraTextureScale);
            int h = (int)(Screen.height * CameraTextureScale);

            if ( camera.hdr )
                CameraTexture = new RenderTexture(  w, h, 24, RenderTextureFormat.ARGBFloat );
            else
                CameraTexture = new RenderTexture(  w, h, 24 );

            // Use MSAA settings in QualitySettings for new RenderTexture
            CameraTexture.antiAliasing = ( QualitySettings.antiAliasing == 0 ) ? 1 : QualitySettings.antiAliasing;
        }
    }
예제 #13
0
    void Start()
    {
        AlphaFadeValue = 1.0f;
        CurrentLevel   = 0;
        PrevStartDown  = false;
        PrevHatDown    = false;
        PrevHatUp      = false;
        ShowVRVars     = false;
        OldOpenMenu    = false;
        strFPS         = "FPS: 0";
        LoadingLevel   = false;
        ScenesVisible  = false;

        // Ensure that camera controller variables have been properly
        // initialized before we start reading them
        if (CameraController != null)
        {
            CameraController.InitCameraControllerVariables();
        }

        // Set the GUI target
        GUIRenderObject = GameObject.Instantiate(Resources.Load("OVRGUIObjectMain")) as GameObject;

        if (GUIRenderObject != null)
        {
            // Chnge the layer
            GUIRenderObject.layer = LayerMask.NameToLayer(LayerName);

            if (GUIRenderTexture == null)
            {
                int w = Screen.width;
                int h = Screen.height;

                // We don't need a depth buffer on this texture
                GUIRenderTexture = new RenderTexture(w, h, 0);
                GuiHelper.SetPixelResolution(w, h);
                // NOTE: All GUI elements are being written with pixel values based
                // from DK1 (1280x800). These should change to normalized locations so
                // that we can scale more cleanly with varying resolutions
                //GuiHelper.SetDisplayResolution(OVRDevice.HResolution,
                //								 OVRDevice.VResolution);
                GuiHelper.SetDisplayResolution(1280.0f, 800.0f);
            }
        }

        // Attach GUI texture to GUI object and GUI object to Camera
        if (GUIRenderTexture != null && GUIRenderObject != null)
        {
            GUIRenderObject.renderer.material.mainTexture = GUIRenderTexture;

            if (CameraController != null)
            {
                // Grab transform of GUI object
                Vector3    ls = GUIRenderObject.transform.localScale;
                Vector3    lp = GUIRenderObject.transform.localPosition;
                Quaternion lr = GUIRenderObject.transform.localRotation;

                // Attach the GUI object to the camera
                CameraController.AttachGameObjectToCamera(ref GUIRenderObject);
                // Reset the transform values (we will be maintaining state of the GUI object
                // in local state)

                GUIRenderObject.transform.localScale    = ls;
                GUIRenderObject.transform.localRotation = lr;

                // Deactivate object until we have completed the fade-in
                // Also, we may want to deactive the render object if there is nothing being rendered
                // into the UI
                // we will move the position of everything over to account for the IPD camera offset.
                float     ipdOffsetDirection = 1.0f;
                Transform guiParent          = GUIRenderObject.transform.parent;
                if (guiParent != null)
                {
                    OVRCamera ovrCamera = guiParent.GetComponent <OVRCamera>();
                    if (ovrCamera != null && ovrCamera.RightEye)
                    {
                        ipdOffsetDirection = -1.0f;
                    }
                }

                float ipd = 0.0f;
                CameraController.GetIPD(ref ipd);
                lp.x += ipd * 0.5f * ipdOffsetDirection;
                GUIRenderObject.transform.localPosition = lp;

                GUIRenderObject.SetActive(false);
            }
        }

        // Save default values initially
        //StoreSnapshot("DEFAULT");

        // Make sure to hide cursor
        if (Application.isEditor == false)
        {
            Screen.showCursor = false;
            Screen.lockCursor = true;
        }

        // CameraController updates
        if (CameraController != null)
        {
            // Set LPM on by default
            OVRDevice.SetLowPersistenceMode(LowPersistenceMode);

            // Add a GridCube component to this object
            GridCube = gameObject.AddComponent <OVRGridCube>();
            GridCube.SetOVRCameraController(ref CameraController);

            // Add a VisionGuide component to this object
            VisionGuide = gameObject.AddComponent <OVRVisionGuide>();
            VisionGuide.SetOVRCameraController(ref CameraController);
            VisionGuide.SetFadeTexture(ref FadeInTexture);
            VisionGuide.SetVisionGuideLayer(ref LayerName);
        }

        // Crosshair functionality
        Crosshair.Init();
        Crosshair.SetCrosshairTexture(ref CrosshairImage);
        Crosshair.SetOVRCameraController(ref CameraController);
        Crosshair.SetOVRPlayerController(ref PlayerController);

        // Check for HMD and sensor
        CheckIfRiftPresent();
    }
예제 #14
0
 // Use this for initialization
 void Start()
 {
     //		Debug.Log ("pops = " + pops.transform.position + ", " + pops.transform.rotation);
     cameraRight = GameObject.Find ("CameraRight").GetComponent<OVRCamera> ();
 }