コード例 #1
0
    /// <summary>
    /// Inits the camera controller variables.
    /// Made public so that it can be called by classes that require information about the
    /// camera to be present when initing variables in 'Start'
    /// </summary>
    public void InitCameraControllerVariables()
    {
        // Get the IPD value (distance between eyes in meters)
        OVRDevice.GetIPD(ref ipd);

        // Using the calculated FOV, based on distortion parameters, yeilds the best results.
        // However, public functions will allow to override the FOV if desired
        VerticalFOV = CameraLeft.GetComponent <OVRCamera>().GetIdealVFOV();
        // Get aspect ratio as well
        AspectRatio = CameraLeft.GetComponent <OVRCamera>().CalculateAspectRatio();

        // Get our initial world orientation of the cameras from the scene (we can grab it from
        // the set FollowOrientation object or this OVRCameraController gameObject)
        if (FollowOrientation != null)
        {
            OrientationOffset = FollowOrientation.rotation;
        }
        else
        {
            OrientationOffset = transform.rotation;
        }

        // Set initial head model
//		OVRDevice.SetHeadModel(EyeCenterPosition.x, EyeCenterPosition.y, EyeCenterPosition.z);
    }
コード例 #2
0
    // InitCameraControllerVariables
    // Made public so that it can be called by classes that require information about the
    // camera to be present when initing variables in 'Start'
    public void InitCameraControllerVariables()
    {
        // Get the IPD value (distance between eyes in meters)
        OVRDevice.GetIPD(ref IPD);

        // Get the values for both IPD and lens distortion correction shift. We don't normally
        // need to set the PhysicalLensOffset once it's been set here.
        OVRDevice.GetPhysicalLensOffsetsFromIPD(IPD, ref IPDOffsetLeft, ref IPDOffsetRight);
        LensOffsetLeft  = IPDOffsetLeft;
        LensOffsetRight = IPDOffsetRight;

        // Using the calculated FOV, based on distortion parameters, yeilds the best results.
        // However, public functions will allow to override the FOV if desired
        VerticalFOV = OVRDevice.VerticalFOV();

        // Store aspect ratio as well
        AspectRatio = OVRDevice.CalculateAspectRatio();

        OVRDevice.GetDistortionCorrectionCoefficients(ref DistK0, ref DistK1, ref DistK2, ref DistK3);

        // Get our initial world orientation of the cameras from the scene (we can grab it from
        // the set FollowOrientation object or this OVRCameraController gameObject)
        if (FollowOrientation != null)
        {
            OrientationOffset = FollowOrientation.rotation;
        }
        else
        {
            OrientationOffset = transform.rotation;
        }
    }
コード例 #3
0
    // * * * * * * * * * * * * *

    // SetInitialCalibrationState
    // We call this before we start the Update loop to see if
    // Mag has been set by Calibration tool
    public void SetInitialCalibarationState()
    {
        if (OVRDevice.IsMagCalibrated(0) && OVRDevice.IsYawCorrectionEnabled(0))
        {
            MagCalState = MagCalibrationState.MagReady;
        }
    }
コード例 #4
0
    /// <summary>
    /// Configures the camera.
    /// </summary>
    /// <returns><c>true</c>, if camera was configured, <c>false</c> otherwise.</returns>
    /// <param name="camera">Camera.</param>
    /// <param name="eyePositionOffset">Eye position offset.</param>
    void ConfigureCamera(Camera camera, float eyePositionOffset)
    {
        OVRCamera cam = camera.GetComponent <OVRCamera>();

        // Always set  camera fov and aspect ratio
        camera.fieldOfView = VerticalFOV;
        camera.aspect      = AspectRatio;

        // Background color
        camera.backgroundColor = BackgroundColor;

        // Clip Planes
        camera.nearClipPlane = NearClipPlane;
        camera.farClipPlane  = FarClipPlane;

#if OVR_USE_PROJ_MATRIX
        // Projection Matrix
        Matrix4x4 camMat = Matrix4x4.identity;
        OVRDevice.GetCameraProjection(cam.EyeId, NearClipPlane, FarClipPlane, ref camMat);
        camera.projectionMatrix = camMat;
        OVR_ForceSymmetricProj(false);
#else
        OVR_ForceSymmetricProj(true);
#endif

        // Set camera variables that pertain to the neck and eye position
        // NOTE: We will want to add a scale vlue here in the event that the player
        // grows or shrinks in the world. This keeps head modelling behaviour
        // accurate
        cam.NeckPosition = NeckPosition;
        cam.EyePosition  = new Vector3(eyePositionOffset, 0f, 0f);
    }
コード例 #5
0
    // Start
    new void Start()
    {
        base.Start();

        // NOTE: MSAA TEXTURES NOT AVAILABLE YET
        // Set CameraTextureScale (increases the size of the texture we are rendering into
        // for a better pixel match when post processing the image through lens distortion)
#if MSAA_ENABLED
        CameraTextureScale = OVRDevice.DistortionScale();
#endif
        // If CameraTextureScale is not 1.0f, create a new texture and assign to target texture
        // Otherwise, fall back to normal camera rendering
        if ((CameraTexture == null) && (CameraTextureScale > 1.0f))
        {
            int w = (int)(Screen.width / 2.0f * CameraTextureScale);
            int h = (int)(Screen.height * CameraTextureScale);
            CameraTexture = new RenderTexture(w, h, 24);               // 24 bit colorspace

            // NOTE: MSAA TEXTURES NOT AVAILABLE YET
            // This value should be the default for MSAA textures
            //CameraTexture.antiAliasing = 2;
            // Set it within the project
#if MSAA_ENABLED
            CameraTexture.antiAliasing = QualitySettings.antiAliasing;
#endif
        }
    }
コード例 #6
0
ファイル: OVRCamera.cs プロジェクト: MaximKloster/WakeUp
    ///////////////////////////////////////////////////////////
    // VISION FUNCTIONS
    ///////////////////////////////////////////////////////////

    /// <summary>
    /// Mainly to be used to reset camera position orientation
    /// camOffset will move the center eye position to an optimal location
    /// clampX/Y/Z will zero out the offset that is used (restricts offset in a given axis)
    /// </summary>
    /// <param name="camOffset">Cam offset.</param>
    /// <param name="clampX">If set to <c>true</c> clamp x.</param>
    /// <param name="clampY">If set to <c>true</c> clamp y.</param>
    /// <param name="clampZ">If set to <c>true</c> clamp z.</param>
    static public void ResetCameraPositionOrientation(ref Vector3 camOffset,
                                                      bool clampX, bool clampY, bool clampZ)
    {
        Vector3    camPos = Vector3.zero;
        Quaternion camO   = Quaternion.identity;

        OVRDevice.GetCameraPositionOrientation(ref camPos, ref camO);

        // Set position offset
        CameraPositionOffset = camPos;

        // restrict offset in the desired axis
        if (clampX == true)
        {
            CameraPositionOffset.x = 0.0f;
        }
        if (clampY == true)
        {
            CameraPositionOffset.y = 0.0f;
        }
        if (clampZ == true)
        {
            CameraPositionOffset.z = 0.0f;
        }

        // Adjust for optimal offset from zero (for eye position from neck etc.)
        CameraPositionOffset -= camOffset;
    }
コード例 #7
0
    // FormatCalibratingString
    void FormatCalibratingString(ref string str)
    {
        if (MagAutoCalibrate == true)
        {
            str = System.String.Format("Mag Calibrating (AUTO)... Point {0} set",
                                       OVRDevice.MagNumberOfSamples(0));
        }
        else
        {
            // Manual Calibration: Make sure to get proper direction
            str = "Mag Calibrating (MANUAL)... LOOK ";

            switch (OVRDevice.MagManualCalibrationState(0))
            {
            case (0): str += "FORWARD"; break;

            case (1): str += "UP"; break;

            case (2): str += "LEFT"; break;

            case (3): str += "RIGHT"; break;

            case (4): str += "UPPER-RIGHT"; break;

            // failure case, user will need to be reset mag calibration manually
            case (5): str = "MANUAL CALIBRATION FAILED. PLEASE TRY AGAIN."; break;
            }
        }
    }
コード例 #8
0
    // Start
    new void Start()
    {
        base.Start();

        // Get the OVRCameraController
        CameraController = gameObject.transform.parent.GetComponent <OVRCameraController>();

        if (CameraController == null)
        {
            Debug.LogWarning("WARNING: OVRCameraController not found!");
        }

        // NOTE: MSAA TEXTURES NOT AVAILABLE YET
        // Set CameraTextureScale (increases the size of the texture we are rendering into
        // for a better pixel match when post processing the image through lens distortion)
#if MSAA_ENABLED
        CameraTextureScale = OVRDevice.DistortionScale();
#endif
        // If CameraTextureScale is not 1.0f, create a new texture and assign to target texture
        // Otherwise, fall back to normal camera rendering
        if ((CameraTexture == null) && (CameraTextureScale > 1.0f))
        {
            int w = (int)(Screen.width / 2.0f * CameraTextureScale);
            int h = (int)(Screen.height * CameraTextureScale);
            CameraTexture = new RenderTexture(w, h, 24);

#if MSAA_ENABLED
            // NOTE: AA on RenderTexture not available yet
            //CameraTexture.antiAliasing = QualitySettings.antiAliasing;
#endif
        }
    }
コード例 #9
0
    /// <summary>
    /// Stores the snapshot.
    /// </summary>
    /// <returns><c>true</c>, if snapshot was stored, <c>false</c> otherwise.</returns>
    /// <param name="snapshotName">Snapshot name.</param>
    bool StoreSnapshot(string snapshotName)
    {
        float f = 0;

        PresetManager.SetCurrentPreset(snapshotName);

        if (CameraController != null)
        {
            CameraController.GetIPD(ref f);
            PresetManager.SetPropertyFloat("IPD", ref f);

            f = OVRDevice.GetPredictionTime();
            PresetManager.SetPropertyFloat("PREDICTION", ref f);

            CameraController.GetVerticalFOV(ref f);
            PresetManager.SetPropertyFloat("FOV", ref f);

            Vector3 neckPosition = Vector3.zero;
            CameraController.GetNeckPosition(ref neckPosition);
            PresetManager.SetPropertyFloat("HEIGHT", ref neckPosition.y);
        }

        if (PlayerController != null)
        {
            PlayerController.GetMoveScaleMultiplier(ref f);
            PresetManager.SetPropertyFloat("SPEEDMULT", ref f);

            PlayerController.GetRotationScaleMultiplier(ref f);
            PresetManager.SetPropertyFloat("ROTMULT", ref f);
        }

        return(true);
    }
コード例 #10
0
#pragma warning restore 414             // The private field 'x' is assigned but its value is never used

    // * * * * * * * * * * * * *

    /// <summary>
    /// Awake this instance.
    /// </summary>
    void Awake()
    {
        // Init
        theDevice = this;
        OVRInit   = OVR_Initialize();
        if (OVRInit == false)
        {
            return;
        }

#if (UNITY_ANDROID && !UNITY_EDITOR)
        // Don't allow the application to run if it's not in landscape left.
        if (Screen.orientation != ScreenOrientation.LandscapeLeft)
        {
            // make the error nice and big so you can see it in logcat or in the Unity Editor
            Debug.LogError("**************************************************************************************************************\n" +
                           "**************************************************************************************************************\n");
            Debug.LogError("***** Default screen orientation must be set to landscape left for VR.\n" +
                           "***** Stopping application.\n");
            Debug.LogError("**************************************************************************************************************\n" +
                           "**************************************************************************************************************");

            Debug.Break();
            Application.Quit();
        }
#endif

#if (UNITY_ANDROID && !UNITY_EDITOR)
        // We want to set up our touchpad messaging system
        OVRTouchpad.Create();
#endif

        // Set initial prediction time
        SetPredictionTime(PredictionTime);
    }
コード例 #11
0
	// UpdatePrediction
	void UpdatePrediction()
	{
		// Turn prediction on/off
		if(Input.GetKeyDown (KeyCode.P))
		{		
			if( CameraController.PredictionOn == false) 
				CameraController.PredictionOn = true;
			else
				CameraController.PredictionOn = false;
		}
		
		// Update prediction value (only if prediction is on)
		if(CameraController.PredictionOn == true)
		{
			float pt = OVRDevice.GetPredictionTime(0); 
			if(Input.GetKeyDown (KeyCode.Comma))
				pt -= PredictionIncrement;
			else if(Input.GetKeyDown (KeyCode.Period))
				pt += PredictionIncrement;
			
			OVRDevice.SetPredictionTime(0, pt);
			
			// re-get the prediction time to make sure it took
			pt = OVRDevice.GetPredictionTime(0) * 1000.0f;
			
			if(ShowVRVars == true)// limit gc
				strPrediction = System.String.Format ("Pred (ms): {0:F3}", pt);								 
		}
		else
		{
			strPrediction = "Pred: OFF";
		}
	}
コード例 #12
0
    // GUIStereoBox - Values based on pixels in DK1 resolution of W: (1280 / 2) H: 800
    void GUIStereoBox(int X, int Y, int wX, int wY, ref string text, Color color)
    {
        float ploLeft = 0, ploRight = 0;
        float sSX = (float)Screen.width / 1280.0f;

        float sSY = ((float)Screen.height / 800.0f);

        OVRDevice.GetPhysicalLensOffsets(ref ploLeft, ref ploRight);
        int xL       = (int)((float)X * sSX);
        int sSpreadX = (int)((float)StereoSpreadX * sSX);
        int xR       = (Screen.width / 2) + xL + sSpreadX -
                       // required to adjust for physical lens shift
                       (int)(ploLeft * (float)Screen.width / 2);
        int y = (int)((float)Y * sSY);

        GUI.contentColor = color;

        int sWX = (int)((float)wX * sSX);
        int sWY = (int)((float)wY * sSY);

        // Change font size based on screen scale
        if (Screen.height > 800)
        {
            GUI.skin.font = FontReplaceLarge;
        }
        else
        {
            GUI.skin.font = FontReplaceSmall;
        }

        GUI.Box(new Rect(xL, y, sWX, sWY), text);
        GUI.Box(new Rect(xR, y, sWX, sWY), text);
    }
コード例 #13
0
    // RIFT RESET ORIENTATION

    // UpdateResetOrientation
    void UpdateResetOrientation()
    {
        if (((sShowLevels == false) && (OVRGamepadController.GetDPadDown() == true)) ||
            (Input.GetKeyDown(KeyCode.B) == true))
        {
            OVRDevice.ResetOrientation(0);
        }
    }
コード例 #14
0
    /// <summary>
    /// Call this in CameraController to set up the ideal FOV as
    /// defined by the SDK
    /// </summary>
    /// <returns>The ideal FOV.</returns>
    public float GetIdealVFOV()
    {
        int resH = 0; int resV = 0; float fovH = 0; float fovV = 0;

        OVRDevice.GetImageInfo(ref resH, ref resV, ref fovH, ref fovV);

        return(fovV);
    }
コード例 #15
0
    /// <summary>
    /// Calculates the aspect ratio.
    /// </summary>
    /// <returns>The aspect ratio.</returns>
    public float CalculateAspectRatio()
    {
        int resH = 0; int resV = 0; float fovH = 0; float fovV = 0;

        OVRDevice.GetImageInfo(ref resH, ref resV, ref fovH, ref fovV);

        return((float)resH / (float)resV);
    }
コード例 #16
0
 /// <summary>
 /// Updates resolution of eye texture
 /// </summary>
 void UpdateResolutionEyeTexture()
 {
     if (ShowVRVars == true) // limit gc
     {
         int w = 0, h = 0;
         OVRDevice.GetResolutionEyeTexture(ref w, ref h);
         strResolutionEyeTexture = System.String.Format("Resolution : {0} x {1}", w, h);
     }
 }
コード例 #17
0
    // RIFT RESET ORIENTATION

    // UpdateResetOrientation
    void UpdateResetOrientation()
    {
        if (((ScenesVisible == false) &&
             (OVRGamepadController.GPC_GetButton((int)OVRGamepadController.Button.Down) == true)) ||
            (Input.GetKeyDown(KeyCode.B) == true))
        {
            OVRDevice.ResetOrientation(0);
        }
    }
コード例 #18
0
 // Update is called once per frame
 void Update()
 {
     if (Input.GetKeyDown(KeyCode.Space))
     {
         if (OVRDevice.IsHMDPresent())
         {
             OVRDevice.ResetOrientation(0);
         }
     }
 }
コード例 #19
0
	// LoadSnapshot
	bool LoadSnapshot(string snapshotName)
	{
		float f = 0;
		
		PresetManager.SetCurrentPreset(snapshotName);
		
		if(CameraController != null)
		{
			if(PresetManager.GetPropertyFloat("IPD", ref f) == true)
				CameraController.SetIPD(f);
		
			if(PresetManager.GetPropertyFloat("PREDICTION", ref f) == true)
				OVRDevice.SetPredictionTime(0, f);
		
			if(PresetManager.GetPropertyFloat("FOV", ref f) == true)
				CameraController.SetVerticalFOV(f);
		
			if(PresetManager.GetPropertyFloat("HEIGHT", ref f) == true)
			{
				Vector3 neckPosition = Vector3.zero;
				CameraController.GetNeckPosition(ref neckPosition);
				neckPosition.y = f;
				CameraController.SetNeckPosition(neckPosition);
			}

			float Dk0 = 0.0f;
			float Dk1 = 0.0f;
			float Dk2 = 0.0f;
			float Dk3 = 0.0f;
			CameraController.GetDistortionCoefs(ref Dk0, ref Dk1, ref Dk2, ref Dk3);
		
			if(PresetManager.GetPropertyFloat("DISTORTIONK0", ref f) == true)
				Dk0 = f;
			if(PresetManager.GetPropertyFloat("DISTORTIONK1", ref f) == true)
				Dk1 = f;
			if(PresetManager.GetPropertyFloat("DISTORTIONK2", ref f) == true)
				Dk2 = f;
			if(PresetManager.GetPropertyFloat("DISTORTIONK3", ref f) == true)
				Dk3 = f;
		
			CameraController.SetDistortionCoefs(Dk0, Dk1, Dk2, Dk3);
		
		}
		
		if(PlayerController != null)
		{
			if(PresetManager.GetPropertyFloat("SPEEDMULT", ref f) == true)
				PlayerController.SetMoveScaleMultiplier(f);

			if(PresetManager.GetPropertyFloat("ROTMULT", ref f) == true)
				PlayerController.SetRotationScaleMultiplier(f);
		}
			
		return true;
	}
コード例 #20
0
    // StereoBox - Values based on pixels in DK1 resolution of W: (1280 / 2) H: 800
    // TODO: Create overloaded function to take normalized float values from 0 - 1 on screen
    public void StereoBox(int X, int Y, int wX, int wY, ref string text, Color color)
    {
        Font prevFont = GUI.skin.font;

        if (Draw3D == true)
        {
            GUI.contentColor = color;

            if (GUI.skin.font != FontReplace)
            {
                GUI.skin.font = FontReplace;
            }

            float sSX = (float)Screen.width / PixelWidth;
            float sSY = (float)Screen.height / PixelHeight;

            int x  = (int)((float)X * sSX * 1.75f);
            int wx = (int)((float)wX * sSY * 1.0f);

            GUI.Box(new Rect(x, Y, wx, wY), text);
        }
        else
        {
            // Deprecate this part of code; we will want to do everything in 3D space
            // on the RIFT (especially when HD versions of the Rift are available)
            float ploLeft = 0, ploRight = 0;
            float sSX = (float)Screen.width / PixelWidth;
            float sSY = (float)Screen.height / PixelHeight;

            OVRDevice.GetPhysicalLensOffsets(ref ploLeft, ref ploRight);

            int xL       = (int)((float)X * sSX);
            int sSpreadX = (int)(StereoSpreadX * sSX);
            int xR       = (Screen.width / 2) + xL + sSpreadX -
                           // required to adjust for physical lens shift
                           (int)(ploLeft * (float)Screen.width / 2);
            int y = (int)((float)Y * sSY);

            GUI.contentColor = color;

            int sWX = (int)((float)wX * sSX);
            int sWY = (int)((float)wY * sSY);

            if (FontReplace != null)
            {
                GUI.skin.font = FontReplace;
            }

            GUI.Box(new Rect(xL, y, sWX, sWY), text);
            GUI.Box(new Rect(xR, y, sWX, sWY), text);
        }

        GUI.skin.font = prevFont;
    }
コード例 #21
0
 /// <summary>
 /// Check input and reset orientation if necessary
 /// See the input mapping setup in the Unity Integration guide
 /// </summary>
 void Update()
 {
     // NOTE: some of the buttons defined in OVRGamepadController.Button are not available on the Android game pad controller
     if (Input.GetButtonDown(OVRGamepadController.ButtonNames[(int)resetButton]))
     {
         //*************************
         // reset orientation
         //*************************
         OVRDevice.ResetOrientation();
     }
 }
コード例 #22
0
 void Start()
 {
     SetupUi();
     OVRDevice.ResetOrientation();
     // gets the renderer of the child, parent is an empty controller
     selectionPlaneRenderer = selectionPlane.GetChild(0).renderer;
     // get instance of output box.
     manager      = (uiManager)manageSource.GetComponent(typeof(uiManager));
     textVal      = textBox.GetComponent <Text>();
     textVal.text = "";
     eqInfo       = GraphData.gd;
     transform.parent.gameObject.SetActive(false);
 }
コード例 #23
0
    /// <summary>
    /// Mainly to be used to reset camera position orientation
    /// camOffset will move the center eye position to an optimal location
    /// clampX/Y/Z will zero out the offset that is used (restricts offset in a given axis)
    /// </summary>
    /// <param name="posScale">Scale for positional change.</param>
    /// <param name="posOffset">Positional offset.</param>
    /// <param name="posOffset">Positional offset.</param>
    /// <param name="posOffset">Positional offset.</param>
    static public void ResetCameraPositionOrientation(Vector3 posScale, Vector3 posOffset, Vector3 ortScale, Vector3 ortOffset)
    {
        Vector3    camPos = Vector3.zero;
        Quaternion camO   = Quaternion.identity;

        OVRDevice.GetCameraPositionOrientation(ref camPos, ref camO, OVRDevice.PredictionTime);

        CameraPositionOffset = Vector3.Scale(camPos, posScale) - posOffset;

        Vector3 euler = Quaternion.Inverse(camO).eulerAngles;

        CameraOrientationOffset = Quaternion.Euler(Vector3.Scale(euler, ortScale) - ortOffset);
    }
コード例 #24
0
 public void Update()
 {
     if (s_OculusRiftActive)
     {
         if (Input.GetKeyDown(KeyCode.F2))
         {
             for (int i = 0; i < OVRDevice.SensorCount; ++i)
             {
                 OVRDevice.ResetOrientation(i);
             }
         }
     }
 }
コード例 #25
0
    private void Start()
    {
        _initialEyePos = _controller.EyeCenterPosition;
        OVRDevice.GetIPD(ref _initialIPD);
        CameraScale = _defaultSettings.Scale;

        _lastTargetPos = transform.position;
        _lastJump      = -DirtyJumpTime;

        RefreshModeInfo();

        _currentTrackPos      = transform.position;
        _currentTrackRotation = transform.rotation;
    }
コード例 #26
0
    // Start
    void Start()
    {
        DisplayCrosshair      = false;
        CollisionWithGeometry = false;
        FadeVal = 0.0f;
        MainCam = Camera.main;

        // Initialize screen location of cursor
        XL = Screen.width * 0.25f;
        YL = Screen.height * 0.5f;

        // Get the values for both IPD and lens distortion correction shift
        OVRDevice.GetPhysicalLensOffsets(ref LensOffsetLeft, ref LensOffsetRight);
    }
コード例 #27
0
    // UpdatePlayerEyeHeight
    void UpdatePlayerEyeHeight()
    {
        if ((UsePlayerEyeHeight == true) && (PrevUsePlayerEyeHeight == false))
        {
            // Calculate neck position to use based on Player configuration
            float peh = 0.0f;

            if (OVRDevice.GetPlayerEyeHeight(ref peh) != false)
            {
                NeckPosition.y = peh - CameraRootPosition.y - EyeCenterPosition.y;
            }
        }

        PrevUsePlayerEyeHeight = UsePlayerEyeHeight;
    }
コード例 #28
0
    /// <summary>
    /// Enables the yaw correction.
    /// </summary>
    void EnableYawCorrection()
    {
        OVRDevice.EnableMagYawCorrection(true);

        // All set, we can update the geometry with camera and positon values
        Quaternion q = Quaternion.identity;
        Vector3    o = Vector3.zero;         // This is not used

        if (CameraController != null)
        {
            OVRDevice.GetCameraPositionOrientation(ref o, ref q);
        }

        CurEulerRef = q.eulerAngles;
    }
コード例 #29
0
    /// <summary>
    /// Updates the cube grid.
    /// </summary>
    void UpdateCubeGrid()
    {
        // Toggle the grid cube display on 'G'
        if (Input.GetKeyDown(KeyCode.G))
        {
            if (CubeGridOn == false)
            {
                CubeGridOn = true;
                Debug.LogWarning("CubeGrid ON");
                if (CubeGrid != null)
                {
                    CubeGrid.SetActive(true);
                }
                else
                {
                    CreateCubeGrid();
                }

                // Add the CameraCubeGrid to the camera list for update
                OVRCamera.AddToLocalCameraSetList(ref CameraCubeGrid);
            }
            else
            {
                CubeGridOn = false;
                Debug.LogWarning("CubeGrid OFF");

                if (CubeGrid != null)
                {
                    CubeGrid.SetActive(false);
                }

                // Remove the CameraCubeGrid from the camera list
                OVRCamera.RemoveFromLocalCameraSetList(ref CameraCubeGrid);
            }
        }

        if (CubeGrid != null)
        {
            // Set cube colors to let user know if camera is tracking
            CubeSwitchColor = !OVRDevice.IsCameraTracking();

            if (CubeSwitchColor != CubeSwitchColorOld)
            {
                CubeGridSwitchColor(CubeSwitchColor);
            }
            CubeSwitchColorOld = CubeSwitchColor;
        }
    }
コード例 #30
0
 /// <summary>
 /// Updates latency values
 /// </summary>
 void UpdateLatencyValues()
 {
     if (ShowVRVars == true) // limit gc
     {
         float Ren = 0.0f, TWrp = 0.0f, PostPresent = 0.0f;
         OVRDevice.GetLatencyValues(ref Ren, ref TWrp, ref PostPresent);
         if (Ren < 0.000001f && TWrp < 0.000001f && PostPresent < 0.000001f)
         {
             strLatencies = System.String.Format("Ren : N/A TWrp: N/A PostPresent: N/A");
         }
         else
         {
             strLatencies = System.String.Format("Ren : {0:F3} TWrp: {1:F3} PostPresent: {2:F3}", Ren, TWrp, PostPresent);
         }
     }
 }
コード例 #31
0
 // Use this for initialization
 void Start()
 {
     ovrDevice = this.GetComponent<OVRDevice>();
 }
コード例 #32
0
ファイル: OVRDevice.cs プロジェクト: iveos/SIG-ET-2015-OR
#pragma warning restore 414		// The private field 'x' is assigned but its value is never used

	// * * * * * * * * * * * * *

	/// <summary>
	/// Awake this instance.
	/// </summary>
	void Awake () 
	{	
		// Init
		theDevice = this;
		OVRInit = OVR_Initialize();
		if(OVRInit == false) return;

#if (UNITY_ANDROID && !UNITY_EDITOR)
		// Don't allow the application to run if it's not in landscape left.
		if ( Screen.orientation != ScreenOrientation.LandscapeLeft )
		{
			// make the error nice and big so you can see it in logcat or in the Unity Editor
			Debug.LogError( "**************************************************************************************************************\n" +
			               "**************************************************************************************************************\n" );
			Debug.LogError( "***** Default screen orientation must be set to landscape left for VR.\n" +
			                "***** Stopping application.\n" );
			Debug.LogError( "**************************************************************************************************************\n" +
			               "**************************************************************************************************************" );

			Debug.Break();
			Application.Quit();
		}
#endif

#if (UNITY_ANDROID && !UNITY_EDITOR)
        // We want to set up our touchpad messaging system
        OVRTouchpad.Create();
#endif

		// Set initial prediction time
		SetPredictionTime(PredictionTime);
	}
コード例 #33
0
    // * * * * * * * * * * * * *
    // Awake
    void Awake()
    {
        Instance = this;
        // Initialize static Dictionary lists first
        InitSensorList(false);
        InitOrientationSensorList();

        OVRInit = OVR_Initialize();

        if(OVRInit == false)
            return;

        // * * * * * * *
        // DISPLAY SETUP

        // We will get the HMD so that we can eventually target it within Unity
        DisplayDeviceName += Marshal.PtrToStringAnsi(OVR_GetDisplayDeviceName());

        OVR_GetScreenResolution (ref HResolution, ref VResolution);
        OVR_GetScreenSize (ref HScreenSize, ref VScreenSize);
        OVR_GetEyeToScreenDistance(ref EyeToScreenDistance);
        OVR_GetLensSeparationDistance(ref LensSeparationDistance);
        OVR_GetEyeOffset (ref LeftEyeOffset, ref RightEyeOffset);
        OVR_GetScreenVCenter (ref ScreenVCenter);
        OVR_GetDistortionCoefficients( ref DistK0, ref DistK1, ref DistK2, ref DistK3);

        // Distortion fit parameters based on if we are using a 5" (Prototype, DK2+) or 7" (DK1)
        if (HScreenSize < 0.140f) 	// 5.5"
        {
            DistortionFitX 		= 0.0f;
            DistortionFitY 		= 1.0f;
        }
        else 						// 7" (DK1)
        {
            DistortionFitX 		= -1.0f;
            DistortionFitY 		=  0.0f;
            DistortionFitScale 	=  0.7f;
        }

        // Calculate the lens offsets for each eye and store
        CalculatePhysicalLensOffsets(ref LensOffsetLeft, ref LensOffsetRight);

        // * * * * * * *
        // SENSOR SETUP

        SensorCount = OVR_GetSensorCount();

        // PredictionTime set, to init sensor directly
        if(PredictionTime > 0.0f)
            OVR_SetSensorPredictionTime(SensorList[0], PredictionTime);
        else
            SetPredictionTime(SensorList[0], InitialPredictionTime);
    }
コード例 #34
0
ファイル: OVRDeviceEditor.cs プロジェクト: KimTaehee/vdesk
	// OnEnable
	void OnEnable()
	{
		m_Component = (OVRDevice)target;
	}