Inheritance: MonoBehaviour
コード例 #1
0
ファイル: MouseLookJoy.cs プロジェクト: Tarwine/Ziggurat
    void Awake()
    {
        Controller = gameObject.GetComponent<CharacterController>();

        if(Controller == null)
            Debug.LogWarning("OVRPlayerController: No CharacterController attached.");

        // We use OVRCameraRig to set rotations to cameras,
        // and to be influenced by rotation
        OVRCameraRig[] CameraControllers;
        CameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();

        if(CameraControllers.Length == 0)
            Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
        else if (CameraControllers.Length > 1)
            Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
        else
            CameraController = CameraControllers[0];

        DirXform = transform.Find("ForwardDirection");

        if(DirXform == null)
            Debug.LogWarning("OVRPlayerController: ForwardDirection game object not found. Do not use.");

        #if UNITY_ANDROID && !UNITY_EDITOR
        OVRManager.display.RecenteredPose += ResetOrientation;
        #endif
    }
コード例 #2
0
ファイル: SharkPC.cs プロジェクト: MrIThompson/Dino
		// Use this for initialization
		void Start () {
			ovrRig = GetComponent<OVRCameraRig> ();
			controller = GameObject.Find ("GM");
			endIt = true;
			InvokeRepeating ("StartUI", 1, 1);
		#if UNITY_EDITOR
		power = 50;
		#endif
		}
	void Awake()
	{
		// locate the camera rig so we can use it to get the current camera transform each frame
		OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren<OVRCameraRig>();
		
		if( CameraRigs.Length == 0 )
			Debug.LogWarning("OVRCamParent: No OVRCameraRig attached.");
		else if (CameraRigs.Length > 1)
			Debug.LogWarning("OVRCamParent: More then 1 OVRCameraRig attached.");
		else
			CameraRig = CameraRigs[0];
	}
コード例 #4
0
    void Awake()
    {
        // We use OVRCameraRig to set rotations to cameras,
        // and to be influenced by rotation
        OVRCameraRig[] CameraControllers;
        CameraControllers = gameObject.GetComponents<OVRCameraRig>();

        if (CameraControllers.Length == 0)
            Debug.LogWarning("PlayerController : No OVRCameraRig attached.");
        else if (CameraControllers.Length > 1)
            Debug.LogWarning("PlayerController : More then 1 OVRCameraRig attached.");
        else
            CameraController = CameraControllers[0];

        YRotation = transform.rotation.eulerAngles.y;

    }
コード例 #5
0
    private IEnumerator DelayedDeath()
    {
        AudioSource snd = TheCellGameMgr.instance.Audio_DeathScream[0];

        snd.Play();

        yield return(new WaitForSecondsRealtime(3.0f));

        Debug.Log($"[OneCellClass] Kill the player sub {cellSubType}, go back at start. DeathTime = {Time.fixedTime - TheCellGameMgr.instance.GetGameStartTime()}");

        OVRCameraRig  rig = FindObjectOfType <OVRCameraRig>();
        OVRScreenFade _screenFadeScript = rig.GetComponent <OVRScreenFade>();

        if (_screenFadeScript != null)
        {
            _screenFadeScript.fadeColor = new Color(0.5f, 0.0f, 0.0f);
            _screenFadeScript.FadeOut();
        }

        StartCoroutine(TeleportToStart());
    }
コード例 #6
0
		// Token: 0x06003B20 RID: 15136 RVA: 0x00129FE4 File Offset: 0x001283E4
		public static TrackedController FindOrCreate(HandednessId handedness)
		{
			TrackedController[] array = UnityEngine.Object.FindObjectsOfType<TrackedController>();
			foreach (TrackedController trackedController in array)
			{
				if (trackedController.Handedness == handedness)
				{
					return trackedController;
				}
			}
			GameObject gameObject = new GameObject("TrackedController");
			TrackedController trackedController2 = gameObject.AddComponent<TrackedController>();
			Transform trackedTransform = null;
			OVRCameraRig ovrcameraRig = UnityEngine.Object.FindObjectOfType<OVRCameraRig>();
			if (ovrcameraRig != null)
			{
				trackedTransform = ((handedness != HandednessId.Left) ? ovrcameraRig.rightHandAnchor : ovrcameraRig.leftHandAnchor);
			}
			trackedController2.Initialize(handedness, trackedTransform);
			return trackedController2;
		}
コード例 #7
0
    void Start()
    {
        if (CharacterController == null)
        {
            CharacterController = GetComponentInParent <CharacterController>();
        }
        Assert.IsNotNull(CharacterController);
        if (PlayerController == null)
        {
            PlayerController = GetComponentInParent <OVRPlayerController>();
        }
        Assert.IsNotNull(PlayerController);
        if (CameraRig == null)
        {
            CameraRig = FindObjectOfType <OVRCameraRig>();
        }
        Assert.IsNotNull(CameraRig);
#if UNITY_EDITOR
        OVRPlugin.SendEvent("locomotion_controller", (SceneManager.GetActiveScene().name == "Locomotion").ToString(), "sample_framework");
#endif
    }
コード例 #8
0
    // Use this for initialization
    void Start()
    {
        OVRCameraRig OculusCamera = gameObject.GetComponent <OVRCameraRig>();

        _handController = gameObject.AddComponent <HandController> ();
        _handController.isHeadMounted = true;
        _handController.destroyHands  = true;


        LeapImageRetriever.EYE[] eyes = new LeapImageRetriever.EYE[] { LeapImageRetriever.EYE.RIGHT, IsStereo? LeapImageRetriever.EYE.LEFT:LeapImageRetriever.EYE.RIGHT };
        if (OculusCamera != null)
        {
            Camera[] cams = new Camera[] { OculusCamera.rightEyeCamera, OculusCamera.leftEyeCamera };

            for (int i = 0; i < cams.Length; ++i)
            {
                LeapImageRetriever lret = cams[i].gameObject.AddComponent <LeapImageRetriever>();
                lret.eye             = eyes[i];
                lret.syncMode        = LeapImageRetriever.SYNC_MODE.LOW_LATENCY;
                lret.gammaCorrection = 1.0f;

                _retrivals[i] = lret;

                cams[i].gameObject.AddComponent <EnableDepthBuffer>();
            }

            //OculusCamera.centerEyeAnchor.gameObject;

            GameObject HandsRenderer = GameObject.CreatePrimitive(PrimitiveType.Quad);
            HandsRenderer.name                    = "LeapMotionHandsRenderer";
            HandsRenderer.transform.parent        = OculusCamera.centerEyeAnchor.transform;
            HandsRenderer.transform.localPosition = new Vector3(0, 0, 0.137f);
            HandsRenderer.transform.localRotation = Quaternion.identity;
            LeapImageBasedMaterial lmat = HandsRenderer.AddComponent <LeapImageBasedMaterial>();
            lmat.imageMode = IsStereo?LeapImageBasedMaterial.ImageMode.STEREO:LeapImageBasedMaterial.ImageMode.RIGHT_ONLY;
            HandsRenderer.GetComponent <MeshRenderer>().material = HandsMaterial;
            _Hands = HandsRenderer;
            _Hands.GetComponent <MeshRenderer>().enabled = IsActive;
        }
    }
コード例 #9
0
    protected virtual void InitializeCamera()
    {
#if USES_OPEN_VR
        OVRCameraRig rig = GameObject.FindObjectOfType <OVRCameraRig>();
        Assert.IsNotNull(rig, "To use Open VR Portal mode you need to have an OVRCameraRig in your scene.");
        mainCamera  = rig.leftEyeCamera;
        rightCamera = rig.rightEyeCamera;
#else
        mainCamera = Camera.main;
#endif
        Assert.IsNotNull(mainCamera, "Pocket Portal could not find a main camera in your scene.");

        gameObject.layer = FromDimension().layer;

        Vector3 convertedPoint = transform.InverseTransformPoint(mainCamera.transform.position);
        triggerZDirection = (convertedPoint.z > 0);

        if (!mainCamera.GetComponent <MainCameraLayerManager>())
        {
            mainCamera.gameObject.AddComponent <MainCameraLayerManager>();             // this allows us to alter layers before / after a render!
        }
    }
コード例 #10
0
        bool InitializeTrackingReference()
        {
            if (_initialized)
            {
                return(true);
            }

            if (_handedness != Handedness.Left && _handedness != Handedness.Right)
            {
                return(false);
            }

            _cameraRig   = FindObjectOfType <OVRCameraRig>();
            _initialized = _cameraRig != null;
            if (_initialized)
            {
                _cameraRig.EnsureGameObjectIntegrity();
                _controllerAnchor = _handedness == Handedness.Left ? _cameraRig.leftControllerAnchor : _cameraRig.rightControllerAnchor;
            }

            return(_initialized);
        }
コード例 #11
0
        protected virtual void Awake()
        {
            // TODO: find a proper way to disable the controller that is not being used. disabling a controller doesn't work.

            m_anchorOffsetPosition = transform.localPosition;
            m_anchorOffsetRotation = transform.localRotation;

            // If we are being used with an OVRCameraRig, let it drive input updates, which may come from Update or FixedUpdate.

            OVRCameraRig rig = null;

            if (transform.parent != null && transform.parent.parent != null)
            {
                rig = transform.parent.parent.GetComponent <OVRCameraRig>();
            }

            if (rig != null)
            {
                rig.UpdatedAnchors          += (r) => { OnUpdatedAnchors(); };
                operatingWithoutOVRCameraRig = false;
            }
        }
コード例 #12
0
    protected virtual void Awake()
    {
        m_parentTransform = GameObject.Find("OVRPlayerController/OVRCameraRig").transform;
        //-----------------
        m_anchorOffsetPosition = transform.localPosition;
        m_anchorOffsetRotation = transform.localRotation;

        // If we are being used with an OVRCameraRig, let it drive input updates, which may come from Update or FixedUpdate.

        OVRCameraRig rig = null;

        if (transform.parent != null && transform.parent.parent != null)
        {
            rig = transform.parent.parent.GetComponent <OVRCameraRig>();
        }

        if (rig != null)
        {
            rig.UpdatedAnchors          += (r) => { OnUpdatedAnchors(); };
            operatingWithoutOVRCameraRig = false;
        }
    }
コード例 #13
0
    /// <summary>
    /// Invoked by OVRCameraRig's UpdatedAnchors callback. Allows the Hmd rotation to update the facing direction of the player.
    /// </summary>
    public void UpdateTransform(OVRCameraRig rig)
    {
        Transform root      = CameraRig.trackingSpace;
        Transform centerEye = CameraRig.centerEyeAnchor;

        if (HmdRotatesY && !Teleported)
        {
            Vector3    prevPos = root.position;
            Quaternion prevRot = root.rotation;

            transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);

            root.position = prevPos;
            root.rotation = prevRot;
        }

        UpdateController();
        if (TransformUpdated != null)
        {
            TransformUpdated(root);
        }
    }
コード例 #14
0
    protected virtual void Awake()
    {
        DMS = dummyManager.GetComponent <DummyManagerScript>();

        m_anchorOffsetPosition = transform.localPosition;
        m_anchorOffsetRotation = transform.localRotation;

        // If we are being used with an OVRCameraRig, let it drive input updates, which may come from Update or FixedUpdate.

        OVRCameraRig rig = null;

        if (transform.parent != null && transform.parent.parent != null)
        {
            rig = transform.parent.parent.GetComponent <OVRCameraRig>();
        }

        if (rig != null)
        {
            rig.UpdatedAnchors          += (r) => { OnUpdatedAnchors(); };
            operatingWithoutOVRCameraRig = false;
        }
    }
コード例 #15
0
    protected virtual void Awake()
    {
        //Debug.Log("OVRGrabber detected in object "+gameObject.name);

        m_anchorOffsetPosition = transform.localPosition;
        m_anchorOffsetRotation = transform.localRotation;

        // If we are being used with an OVRCameraRig, let it drive input updates, which may come from Update or FixedUpdate.

        OVRCameraRig rig = null;

        if (transform.parent != null && transform.parent.parent != null)
        {
            rig = transform.parent.parent.GetComponent <OVRCameraRig>();
        }

        if (rig != null)
        {
            rig.UpdatedAnchors          += (r) => { OnUpdatedAnchors(); };
            operatingWithoutOVRCameraRig = false;
        }
    }
コード例 #16
0
        /// <inheritdoc />
        public override void Initialize(Transform playerRoot, VusrEventSystem eventSystem)
        {
            OVRCameraRig cameraRig = Instantiate(_ovrCameraRigPrefab, playerRoot);

            _laserInputModule = eventSystem.gameObject.AddComponent <MotionControllerInputModule>();

            _laserInputModule.LeftController = cameraRig.leftHandAnchor.gameObject.AddComponent <GearVRController>();
            (_laserInputModule.LeftController as GearVRController).ControllerType = Application.isEditor ? OVRInput.Controller.LTouch : OVRInput.Controller.LTrackedRemote;
            OVRTrackedRemote lModel = Instantiate(_trackedRemotePrefab, _laserInputModule.LeftController.transform).GetComponent <OVRTrackedRemote>();

            lModel.m_controller = (_laserInputModule.LeftController as GearVRController).ControllerType;
            _laserInputModule.LeftController.ControllerModel = lModel.gameObject;

            _laserInputModule.RightController = cameraRig.rightHandAnchor.gameObject.AddComponent <GearVRController>();
            (_laserInputModule.RightController as GearVRController).ControllerType = Application.isEditor ? OVRInput.Controller.RTouch : OVRInput.Controller.RTrackedRemote;
            OVRTrackedRemote rModel = Instantiate(_trackedRemotePrefab, _laserInputModule.RightController.transform).GetComponent <OVRTrackedRemote>();

            rModel.m_controller = (_laserInputModule.RightController as GearVRController).ControllerType;
            _laserInputModule.RightController.ControllerModel = rModel.gameObject;

            StartOVRSettings();
        }
コード例 #17
0
    protected override void Start()
    {
        // get the Animator
        m_animator = this.gameObject.GetComponent <Animator>();

        // get stereo dialog
        stereoDialog = GameObject.Find("StereoDialog").GetComponent <StereoDialog> ();

        // check if HMD is on
#if !UNITY_WEBPLAYER
        HMDPresent = OVRManager.display.isPresent;
#else
        HMDPresent = false;
#endif

        // find all objects with mouse look script
        ovrCameraRig     = transform.root.GetComponentInChildren <OVRCameraRig> ();
        mouseLookObjects = ovrCameraRig.GetComponentsInChildren <MouseLook> ();
        hydraLookObjects = ovrCameraRig.GetComponentsInChildren <HydraLook> ();

        base.Start();
    }
コード例 #18
0
    void Awake()
    {
        Controller = gameObject.GetComponent <CharacterController>();

        if (Controller == null)
        {
            Debug.LogWarning("OVRPlayerController: No CharacterController attached.");
        }

        // We use OVRCameraRig to set rotations to cameras,
        // and to be influenced by rotation
        List <OVRCameraRig> cameraRigs = new List <OVRCameraRig>();

        foreach (Transform child in transform)
        {
            OVRCameraRig childCameraRig = child.gameObject.GetComponent <OVRCameraRig>();
            if (childCameraRig != null)
            {
                cameraRigs.Add(childCameraRig);
            }
        }

        if (cameraRigs.Count == 0)
        {
            Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
        }
        else if (cameraRigs.Count > 1)
        {
            Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
        }
        else
        {
            CameraRig = cameraRigs[0];
        }

        InitialYRotation_ = transform.rotation.eulerAngles.y;

        footstepSource = GetComponent <AudioSource>();
    }
コード例 #19
0
        protected override void OnEnable()
        {
            base.OnEnable();

            _checkDeviceRequiered = true;

            _rig = GetComponent <OVRCameraRig>();

            if (_rig != null)
            {
                _rig.UpdatedAnchors += OnUpdatedAnchors;
            }
            else
            {
                Debug.LogError("OVRCameraRig not found");
            }

            if (OVRManager.boundary != null)
            {
                OVRManager.boundary.SetVisible(false);
            }
        }
コード例 #20
0
    void Awake()
    {
        Controller = gameObject.GetComponent <CharacterController>();

        if (Controller == null)
        {
            Debug.LogWarning("OVRPlayerController: No CharacterController attached.");
        }

        // We use OVRCameraRig to set rotations to cameras,
        // and to be influenced by rotation
        OVRCameraRig[] CameraControllers;
        CameraControllers = gameObject.GetComponentsInChildren <OVRCameraRig>();

        if (CameraControllers.Length == 0)
        {
            Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
        }
        else if (CameraControllers.Length > 1)
        {
            Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
        }
        else
        {
            CameraController = CameraControllers[0];
        }

        DirXform = transform.Find("ForwardDirection");

        if (DirXform == null)
        {
            Debug.LogWarning("OVRPlayerController: ForwardDirection game object not found. Do not use.");
        }

#if UNITY_ANDROID && !UNITY_EDITOR
        OVRManager.display.RecenteredPose += ResetOrientation;
#endif
    }
コード例 #21
0
        /// <inheritdoc />
        public override void Initialize(Transform playerRoot, VusrEventSystem eventSystem)
        {
            _laserInputModule = eventSystem.gameObject.AddComponent <MotionControllerInputModule>();

            DontDestroyOnLoad(OvrAvatarSDKManager.Instance.gameObject);

            OVRCameraRig cameraRig = Instantiate(_ovrCameraRigPrefab, playerRoot);

            _localAvatarPrefab.StartWithControllers = true;
            if (_localAvatarPrefab.SurfaceShader == null)
            {
                _localAvatarPrefab.SurfaceShader = _localAvatarPrefab.SurfaceShaderPBS;
            }
            if (_localAvatarPrefab.SurfaceShaderSelfOccluding == null)
            {
                _localAvatarPrefab.SurfaceShaderSelfOccluding = _localAvatarPrefab.SurfaceShaderPBS;
            }
            _ovrAvatar = Instantiate(_localAvatarPrefab, playerRoot);
            _laserInputModule.LeftController = cameraRig.leftHandAnchor.gameObject.AddComponent <OculusTouchController>();
            _ovrAvatar.ShowControllers(true);
            GameObject leftParent = new GameObject("LeftVisuals");

            leftParent.transform.parent = _ovrAvatar.ControllerLeft.transform.parent;
            _ovrAvatar.ControllerLeft.transform.parent       = leftParent.transform;
            _ovrAvatar.HandLeft.transform.parent             = leftParent.transform;
            _laserInputModule.LeftController.ControllerModel = leftParent;
            (_laserInputModule.LeftController as OculusTouchController).ControllerType = OVRInput.Controller.LTouch;

            _laserInputModule.RightController = cameraRig.rightHandAnchor.gameObject.AddComponent <OculusTouchController>();
            GameObject rightParent = new GameObject("RightVisuals");

            rightParent.transform.parent = _ovrAvatar.ControllerRight.transform.parent;
            _ovrAvatar.ControllerRight.transform.parent       = rightParent.transform;
            _ovrAvatar.HandRight.transform.parent             = rightParent.transform;
            _laserInputModule.RightController.ControllerModel = rightParent;
            (_laserInputModule.RightController as OculusTouchController).ControllerType = OVRInput.Controller.RTouch;
            StartCoroutine(DelayedApply());
        }
コード例 #22
0
    /// <summary>
    /// Awake this instance.
    /// </summary>
    void Awake()
    {
        // Find camera controller
        OVRCameraRig[] cameraControllers;
        cameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();

        if (cameraControllers.Length == 0)
        {
            Debug.LogWarning("OVRMainMenu: No OVRCameraRig attached.");
        }
        else if (cameraControllers.Length > 1)
        {
            Debug.LogWarning("OVRMainMenu: More then 1 OVRCameraRig attached.");
        }
        else
        {
            cameraController = cameraControllers[0];
        }

        // Find player controller
        OVRPlayerController[] playerControllers;
        playerControllers = gameObject.GetComponentsInChildren<OVRPlayerController>();

        if (playerControllers.Length == 0)
        {
            Debug.LogWarning("OVRMainMenu: No OVRPlayerController attached.");
        }
        else if (playerControllers.Length > 1)
        {
            Debug.LogWarning("OVRMainMenu: More then 1 OVRPlayerController attached.");
        }
        else
        {
            playerController = playerControllers[0];
        }

        StartCoroutine(hideGUIOnTime());
    }
コード例 #23
0
    // Use this for initialization
    void  Awake()
    {
        motor        = GetComponent <CharacterMotor>();
        playerHealth = gameObject.GetComponent <PlayerHealth> ();
#if !UNITY_WEBPLAYER
        HMDPresent = OVRManager.display.isPresent;
#else
        HMDPresent = false;
#endif
        enableMovement         = true;
        defaultMaxForwardSpeed = motor.movement.maxForwardSpeed;
        defaultMaxForwardSpeed = motor.movement.maxBackwardsSpeed;
        hydraLookObjects       = gameObject.GetComponentsInChildren <HydraLook> ();

        // Camera rig
        ovrCameraRig = gameObject.GetComponentInChildren <OVRCameraRig> ();
        ovrManager   = gameObject.GetComponentInChildren <OVRManager> ();

        // Cameras
        ovrCameraLeft  = transform.Find("OVRCameraRig/TrackingSpace/LeftEyeAnchor").gameObject;
        ovrCameraRight = transform.Find("OVRCameraRig/TrackingSpace/RightEyeAnchor").gameObject;
        generalCamera  = transform.Find("OVRCameraRig/MonoEyeAnchor").gameObject;
    }
コード例 #24
0
ファイル: BoundaryCheck.cs プロジェクト: nietongyu/RDW
    // Start is called before the first frame update
    void Start()
    {
        // Detect all the boundaries
        rawBoundary    = OVRManager.boundary;
        boundaryPoints = rawBoundary.GetGeometry(OVRBoundary.BoundaryType.PlayArea);
        boundarySize   = rawBoundary.GetDimensions(OVRBoundary.BoundaryType.PlayArea);
        // shrink the boundary to sth like 3m * 3m space

        // get camera postion and directions
        OVRCameraRig rig = cameraObject.GetComponent <OVRCameraRig>();

        cameraPostion   = rig.trackingSpace.position;
        cameraDirection = rig.trackingSpace.rotation * new Vector3(1.0f, 1.0f, 1.0f);

        // Make the world disappear until the user walks to the way pointer
        SetObjInvisible(world, false);

        // Get RDWTest component
        rdw = cameraObject.GetComponent <RDW>();

        initWayPointers = new List <GameObject>();
        FetchInitWayPointers();
    }
コード例 #25
0
    protected void Awake()
    {
        // Objects
        objectLogo      = GameObject.Find("Logo");
        objectMarioHead = GameObject.Find("Mario Head");
        objectRift      = GameObject.Find("Rift");

        // Camera rig
        cameraController = GameObject.Find("CameraController").gameObject;
        ovrCameraRig     = cameraController.GetComponentInChildren <OVRCameraRig> ();

        // Cameras
        ovrCameraLeft  = GameObject.Find("OVRCameraRig/TrackingSpace/LeftEyeAnchor").gameObject;
        ovrCameraRight = GameObject.Find("OVRCameraRig/TrackingSpace/RightEyeAnchor").gameObject;
        generalCamera  = GameObject.Find("OVRCameraRig/MonoEyeAnchor").gameObject;

        // Transition to Mario's viewpoint
        startMarkerTransitionCamera = new GameObject();
        endMarkerTransitionCamera   = new GameObject();

        PlayerPrefs.SetString("previousSceneName", null);
        PlayerPrefs.SetString("previousSceneExitAction", null);
    }
コード例 #26
0
        void InitializeRuntime()
        {
            if (!Application.isPlaying || ovrRig != null)
            {
                return;
            }

            ovrRig = FindObjectOfType <OVRCameraRig>();
            ovrRig.EnsureGameObjectIntegrity();

            foreach (var skeleton in ovrRig.GetComponentsInChildren <OVRSkeleton>())
            {
                if (skeleton.GetSkeletonType() != GetSkeletonTypeFromHandedness(handedness))
                {
                    continue;
                }

                skeletonReference = skeleton;
                break;
            }

            Debug.Log($"Hand Runtime {handedness} initilized");
        }
コード例 #27
0
    /// <summary>
    /// Awake this instance.
    /// </summary>
    void Awake()
    {
        OVRTouchpad.Update();         //后添加的
        // Find camera controller
        OVRCameraRig[] cameraControllers;
        cameraControllers = gameObject.GetComponentsInChildren <OVRCameraRig>();

        if (cameraControllers.Length == 0)
        {
            Debug.LogWarning("OVRMainMenu: No OVRCameraRig attached.");
        }
        else if (cameraControllers.Length > 1)
        {
            Debug.LogWarning("OVRMainMenu: More then 1 OVRCameraRig attached.");
        }
        else
        {
            cameraController = cameraControllers[0];
        }

        // Find player controller
        OVRPlayerController[] playerControllers;
        playerControllers = gameObject.GetComponentsInChildren <OVRPlayerController>();

        if (playerControllers.Length == 0)
        {
            Debug.LogWarning("OVRMainMenu: No OVRPlayerController attached.");
        }
        else if (playerControllers.Length > 1)
        {
            Debug.LogWarning("OVRMainMenu: More then 1 OVRPlayerController attached.");
        }
        else
        {
            playerController = playerControllers[0];
        }
    }
コード例 #28
0
    private Camera FindMainCamera()
    {
        GameObject[]  objects = GameObject.FindGameObjectsWithTag("MainCamera");
        List <Camera> cameras = new List <Camera>(4);

        foreach (GameObject obj in objects)
        {
            Camera camera = obj.GetComponent <Camera>();
            if (camera != null && camera.enabled)
            {
                OVRCameraRig cameraRig = camera.GetComponentInParent <OVRCameraRig>();
                if (cameraRig != null && cameraRig.trackingSpace != null)
                {
                    cameras.Add(camera);
                }
            }
        }
        if (cameras.Count == 0)
        {
            return(Camera.main);                        // pick one of the cameras which tagged as "MainCamera"
        }
        else if (cameras.Count == 1)
        {
            return(cameras[0]);
        }
        else
        {
            if (!multipleMainCameraWarningPresented)
            {
                Debug.LogWarning("Multiple MainCamera found. Assume the real MainCamera is the camera with the least depth");
                multipleMainCameraWarningPresented = true;
            }
            // return the camera with least depth
            cameras.Sort((Camera c0, Camera c1) => { return(c0.depth < c1.depth ? -1 : (c0.depth > c1.depth ? 1 : 0)); });
            return(cameras[0]);
        }
    }
コード例 #29
0
    //TCPImage tcpImage;

    // Use this for initialization
    void Start()
    {
        connectTCP = true;
        //connectUDP = false;
        dataList = new List <string>();

        log("->Start()");

        String Host = "10.255.24.97"; //"10.255.24.140";
        Int32  Port = 5113;

        if (connectTCP)
        {
            tcp.setupSocket(Host, Port);
        }

        //if (connectUDP)
        //{
        //    Debug.Log("Setting UDP");
        //    udp = new UDP();
        //    udp.setupUDP(Port);
        //    Debug.Log("UDP set");
        //}

        //tcpImage = new TCPImage();
        //tcpImage.setupSocket(Host, Port);


        camera_rig        = GameObject.FindObjectOfType <OVRCameraRig>();
        player_controller = GameObject.FindObjectOfType <OVRPlayerController>();
        manager           = GameObject.FindObjectOfType <OVRManager>();

        log("Start()->");

        // haptics
        haptics = new OculusHaptics();
    }
    private bool HasIntersection()
    {
        MyPlane = new Plane(transform.TransformDirection(Vector3.forward), transform.position);
        // Eyal Maoz
        //Ray ray = Camera.main.ScreenPointToRay(Input.mousePosition);

        OVRCameraRig rig = FindObjectOfType <OVRCameraRig>();
        Ray          ray = new Ray(rig.rightControllerAnchor.transform.position, rig.rightControllerAnchor.transform.forward);
        //
        float rayDistance;

        if (MyPlane.Raycast(ray, out rayDistance))
        {
            Vector3 p = ray.GetPoint(rayDistance);
            CurLocalPos = transform.worldToLocalMatrix.MultiplyPoint(p);
            if (CurLocalPos.magnitude > MainRadius)
            {
                return(false);
            }
            return(true);
        }

        return(false);
    }
コード例 #31
0
        /// <summary>
        /// Applies tracking data to CameraRig. We used %Oculus native rotation as base and then smoothly correct it with our tracking data
        /// to avoid glitches that can be seen because %Oculus Asynchronous TimeWarp system uses only native rotation data provided by headset.
        /// </summary>
        /// <param name="rig">Pointer to OVRCameraRig</param>
        protected virtual void OnUpdatedAnchors(OVRCameraRig rig)
        {
            if (_rig != rig)
            {
                return;
            }

            Alt.Tracking.State trackingState;
            var trackingActive = GetTrackingState(out trackingState);

            if (trackingActive)
            {
                var oculusRotation = _rig.centerEyeAnchor.localRotation;

                var altEnvSpaceRotation = trackingState.pose.rotation;
                var pa   = altEnvSpaceRotation * Quaternion.Inverse(oculusRotation);
                var diff = Quaternion.Inverse(_rig.trackingSpace.localRotation) * pa;
                diff = Quaternion.Lerp(Quaternion.identity, diff, 1.0f - Fx(Quaternion.Angle(Quaternion.identity, diff), _k));
                _rig.trackingSpace.localRotation = _rig.trackingSpace.localRotation * diff;

                if (_deviceType == OculusDeviceType.Quest || _deviceType == OculusDeviceType.RiftS)
                {
                    var oculusPosition      = _rig.centerEyeAnchor.position;
                    var altEnvSpacePosition = transform.TransformPoint(trackingState.pose.position);

                    var altToOculus = altEnvSpacePosition - oculusPosition;
                    var posDiff     = Vector3.Lerp(Vector3.zero, altToOculus, 1.0f - Fx(altToOculus.magnitude, 0.05f));

                    _rig.trackingSpace.position = _rig.trackingSpace.position + posDiff;
                }
                else
                {
                    _rig.trackingSpace.localPosition = trackingState.pose.position;
                }
            }
        }
コード例 #32
0
    /// <summary>
    /// Awake this instance.
    /// </summary>
    void Awake()
    {
        // Find camera controller
        OVRCameraRig[] cameraControllers:
        cameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>():

        if (cameraControllers.Length == 0)
        {
            Debug.LogWarning("OVRMainMenu: No OVRCameraRig attached."):
        }
        else if (cameraControllers.Length > 1)
        {
            Debug.LogWarning("OVRMainMenu: More then 1 OVRCameraRig attached."):
        }
        else
        {
            cameraController = cameraControllers[0]:
        }       

        // Find player controller
        OVRPlayerController[] playerControllers:
        playerControllers = gameObject.GetComponentsInChildren<OVRPlayerController>():

        if (playerControllers.Length == 0)
        {
            Debug.LogWarning("OVRMainMenu: No OVRPlayerController attached."):
        }
        else if (playerControllers.Length > 1)
        {
            Debug.LogWarning("OVRMainMenu: More then 1 OVRPlayerController attached."):
        }
        else
        {
            playerController = playerControllers[0]:
        }
    }
コード例 #33
0
    void Awake()
    {
        m_anchorOffsetPosition = transform.localPosition;
        m_anchorOffsetRotation = transform.localRotation;

        // If we are being used with an OVRCameraRig, let it drive input updates, which may come from Update or FixedUpdate.

        OVRCameraRig rig = null;

        if (transform.parent != null && transform.parent.parent != null)
        {
            rig = transform.parent.parent.GetComponent <OVRCameraRig>();
        }

        if (rig != null)
        {
            rig.UpdatedAnchors          += (r) => { OnUpdatedAnchors(); };
            operatingWithoutOVRCameraRig = false;
        }

        /**/
        hand = m_handTransform.transform;
        /**/
    }
コード例 #34
0
ファイル: Look_Teleport.cs プロジェクト: nerd3d/CrossRoadVR
    // Use this for initialization
    void Start()
    {
        if (_TargetPrefab == null)
        {
            Debug.LogError(name + ": Missing Teleport Preview Prefab");
            enabled = false;
            return;
        }

        OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren <OVRCameraRig>();

        if (CameraRigs.Length == 0)
        {
            Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
        }
        else if (CameraRigs.Length > 1)
        {
            Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
        }
        else
        {
            _CameraRig = CameraRigs[0];
        }
    }
コード例 #35
0
    void Awake()
    {
        Controller = gameObject.GetComponent <CharacterController>();

        if (Controller == null)
        {
            Debug.LogWarning("OVRPlayerController: No CharacterController attached.");
        }

        // We use OVRCameraRig to set rotations to cameras,
        // and to be influenced by rotation
        OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren <OVRCameraRig>();

        if (CameraRigs.Length == 0)
        {
            Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
        }
        else
        {
            CameraRig = CameraRigs[0];
        }

        InitialYRotation = transform.rotation.eulerAngles.y;
    }
コード例 #36
0
    void Awake()
    {
        Controller = gameObject.GetComponent<CharacterController>();
        holdRotationValue = RotationAmount;
        if(Controller == null)
            Debug.LogWarning("OVRPlayerController: No CharacterController attached.");

        // We use OVRCameraRig to set rotations to cameras,
        // and to be influenced by rotation
        OVRCameraRig[] CameraControllers;
        CameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();

        if(CameraControllers.Length == 0)
            Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
        else if (CameraControllers.Length > 1)
            Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
        else
            CameraController = CameraControllers[0];

        YRotation = transform.rotation.eulerAngles.y;

        #if UNITY_ANDROID && !UNITY_EDITOR
        OVRManager.display.RecenteredPose += ResetOrientation;
        #endif
    }
コード例 #37
0
    // Use this for initialization
    void Start()
    {
        Application.targetFrameRate = TargetFrameRate;
        if(OculusCamera==null)	//Try to find OVRCameraRig component
            OculusCamera = GameObject.FindObjectOfType<OVRCameraRig> ();

        if (Configuration == null)
            Configuration = gameObject.AddComponent<TELUBeeConfiguration>();

        if(TargetMaterial!=null)
            Init();

        RobotConnector.AddDependencyNode (this);

        GStreamerCore.Ref ();
    }
コード例 #38
0
    void Awake()
    {
        networkView = gameObject.GetComponent<NetworkView>();
        bullet = Resources.Load<HomingBullet>(bulletPath);
        Controller = gameObject.GetComponent<CharacterController>();

        //if(Controller == null)
            //Debug.LogWarning("OVRPlayerController: No CharacterController attached.");

        // We use OVRCameraRig to set rotations to cameras,
        // and to be influenced by rotation
        OVRCameraRig[] CameraControllers;
        CameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();

        //if(CameraControllers.Length == 0)
            //Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
        //else if (CameraControllers.Length > 1)
            //Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
        //else
            CameraController = CameraControllers[0];

        DirXform = transform;

        //if(DirXform == null)
            //Debug.LogWarning("OVRPlayerController: ForwardDirection game object not found. Do not use.");
        networkView.RPC("Respawn", RPCMode.All, Network.player);
        #if UNITY_ANDROID && !UNITY_EDITOR
        OVRManager.display.RecenteredPose += ResetOrientation;
        #endif
    }
コード例 #39
0
    public override void OnInspectorGUI()
    {
        serializedObject.Update();

        EditorGUILayout.PropertyField(defaultPosition, new GUIContent("Default Position (meters)", "Head position before tracking starts"));
        //EditorGUILayout.PropertyField(skeletonManager, new GUIContent("skeletonManager", "Can be None"));

		if(serializedObject.targetObject is RUISTracker)
		{
			trackerScript = (RUISTracker) serializedObject.targetObject;
			if(trackerScript)
				ovrCameraRig = trackerScript.gameObject.GetComponentInChildren<OVRCameraRig>();
			if(ovrCameraRig)
			{
				riftFound = true;
			}
			else
			{
				riftFound = false;
			}
		}
		
		if(!riftFound)
		{
			EditorGUILayout.PropertyField(pickRotationSource, new GUIContent( "Pick Rotation Source", "If disabled, then the Rotation "
																			+ "Tracker is same as Position Tracker"));
		}
		
        EditorGUILayout.Space();
        EditorGUILayout.PropertyField(headPositionInput, new GUIContent("Position Tracker", "Device that tracks the head position"));
		
        EditorGUI.indentLevel += 2;
        switch (headPositionInput.enumValueIndex)
        {
			case (int)RUISTracker.HeadPositionSource.OculusDK2:
				EditorGUILayout.PropertyField(positionOffsetOculus, new GUIContent("Position Offset (meters)", "Adds an position offset to Oculus Rift's "
				                                                                   + "tracked position. This should be zero when using Oculus Rift positional "
				                                                                   + "tracking together with Kinect skeleton tracking."));

				break;
            case (int)RUISTracker.HeadPositionSource.Kinect1:
			case (int)RUISTracker.HeadPositionSource.Kinect2:
				positionPlayerID.intValue = Mathf.Clamp(positionPlayerID.intValue, 0, maxKinectSkeletons - 1);
				if(positionNoiseCovarianceKinect.floatValue < minNoiseCovariance)
					positionNoiseCovarianceKinect.floatValue = minNoiseCovariance;
                EditorGUILayout.PropertyField(positionPlayerID, new GUIContent("Kinect Player Id", "Between 0 and 3"));

                EditorGUILayout.PropertyField(positionJoint, new GUIContent("Joint", "Head is the best joint for tracking head position"));
                EditorGUILayout.PropertyField(positionOffsetKinect, new GUIContent("Position Offset (meters)", "Kinect joint's position in "
                															+ "the tracked object's local coordinate system. Set these values "
																			+ "according to the joint's offset from the tracked object's "
																			+ "origin (head etc.). When using Kinect for head tracking, then zero " 
																			+ "vector is the best choice if head is the position Joint."));
		        EditorGUILayout.PropertyField(filterPositionKinect, new GUIContent("Filter Position", "Enables simple Kalman filtering for position "
																			+ "tracking. Recommended for Kinect."));
				if(filterPositionKinect.boolValue)
			        EditorGUILayout.PropertyField(positionNoiseCovarianceKinect, new GUIContent("Filter Strength", "Noise covariance of Kalman filtering: " 
																			+ "a bigger value means smoother results but a slower "
																			+ "response to changes."));
                break;
            case (int)RUISTracker.HeadPositionSource.PSMove:
				positionPSMoveID.intValue = Mathf.Clamp(positionPSMoveID.intValue, 0, maxPSMoveControllers - 1);
				if(positionNoiseCovariancePSMove.floatValue < minNoiseCovariance)
					positionNoiseCovariancePSMove.floatValue = minNoiseCovariance;
                EditorGUILayout.PropertyField(positionPSMoveID, new GUIContent("PS Move ID", "Between 0 and 3"));
                EditorGUILayout.PropertyField(positionOffsetPSMove, new GUIContent("Position Offset (meters)", "PS Move controller's position in "
                															+ "the tracked object's local coordinate system. Set these values "
																			+ "according to the controller's offset from the tracked object's "
																			+ "origin (head etc.)."));
		        EditorGUILayout.PropertyField(filterPositionPSMove, new GUIContent("Filter Position", "Enables simple Kalman filtering for position "
																			+ "tracking. Best left disabled for PS Move."));
				if(filterPositionPSMove.boolValue)
			        EditorGUILayout.PropertyField(positionNoiseCovariancePSMove, new GUIContent("Filter Strength", "Noise covariance of Kalman filtering: " 
																			+ "a bigger value means smoother results but a slower "
																			+ "response to changes."));
                break;
            case (int)RUISTracker.HeadPositionSource.RazerHydra:
				if(positionNoiseCovarianceHydra.floatValue < minNoiseCovariance)
					positionNoiseCovarianceHydra.floatValue = minNoiseCovariance;
			
		        EditorGUILayout.PropertyField(isRazerBaseMobile, new GUIContent("Moving Base Station", "Enable this if the Razer Hydra base station is "
																			+ "attached to something that is moving (e.g. Kinect tracked player's belt)"));
				
			    EditorGUILayout.PropertyField(positionRazerID, new GUIContent("Razer Hydra ID", "Either LEFT or RIGHT"));
                EditorGUILayout.PropertyField(positionOffsetHydra, new GUIContent("Position Offset (meters)", "Razer Hydra controller's position in "
                															+ "the tracked object's local coordinate system. Set these values "
																			+ "according to the controller's offset from the tracked object's "
																			+ "origin (head etc.)."));
		        EditorGUILayout.PropertyField(filterPositionHydra, new GUIContent("Filter Position", "Enables simple Kalman filtering for position "
																			+ "tracking. Best left disabled for Razer Hydra."));
				if(filterPositionHydra.boolValue)
			        EditorGUILayout.PropertyField(positionNoiseCovarianceHydra, new GUIContent("Filter Strength", "Noise covariance of Kalman filtering: " 
																			+ "a bigger value means smoother results but a slower "
																			+ "response to changes."));
				break;
            case (int)RUISTracker.HeadPositionSource.InputTransform:
				if(positionNoiseCovarianceTransform.floatValue < minNoiseCovariance)
					positionNoiseCovarianceTransform.floatValue = minNoiseCovariance;
                EditorGUILayout.PropertyField(positionInput, new GUIContent("Input Transform", "All other position trackers are supported "
																			+ "through this transform. Drag and drop here a transform "
																			+ "whose position is controlled by a tracking device."));
		        EditorGUILayout.PropertyField(filterPositionTransform, new GUIContent("Filter Position", "Enables simple Kalman filtering for position "
																			+ "tracking."));
				if(filterPositionTransform.boolValue)
			        EditorGUILayout.PropertyField(positionNoiseCovarianceTransform, new GUIContent("Filter Strength", "Noise covariance of Kalman filtering: " 
																			+ "a bigger value means smoother results but a slower "
																			+ "response to changes."));
				break;
        }
		
//		if(headPositionInput.enumValueIndex != (int)RUISTracker.HeadPositionSource.None)
//		{
//	        EditorGUILayout.PropertyField(filterPosition, new GUIContent("Filter Position", "Enables simple Kalman filtering for position "
//																					+ "tracking. Only recommended for Kinect."));
//	        EditorGUILayout.PropertyField(positionNoiseCovariance, new GUIContent("Filter Strength", "Noise covariance of Kalman filtering: " 
//																					+ "a bigger value means smoother results but a slower "
//																					+ "response to changes."));
//		}
				
        EditorGUI.indentLevel -= 2;

		
        EditorGUILayout.Space();
		
		if(riftFound)
		{
			
			EditorGUILayout.LabelField("Rotation Tracker:    Oculus Rift", EditorStyles.boldLabel);
        	EditorGUI.indentLevel += 2;
			
        	EditorGUILayout.PropertyField(oculusID, new GUIContent("Oculus Rift ID", "Choose which Rift is the source of the head tracking. "
																	+ "Leave this to 0 (multiple Rifts are not supported yet)."));
			
			
			EditorStyles.textField.wordWrap = true;
			EditorGUILayout.TextArea(typeof(OVRCameraRig) + " script detected in a child object of this " + trackerScript.gameObject.name
										+ ". Assuming that you want to use rotation from Oculus Rift. Disabling other Rotation Tracker "
										+ "options. You can access other rotation trackers when you remove or disable the child object "
			                         + "that has the " + typeof(OVRCameraRig) + " component.", GUILayout.Height(120));
			
			EditorGUILayout.LabelField( new GUIContent("Reset Orientation Button(s):", "The button(s) that reset Oculus Rift's yaw "
										+ "rotation to zero."), 
										EditorStyles.boldLabel);
        	EditorGUI.indentLevel += 1;
			EditorGUILayout.PropertyField(resetKey, new GUIContent("KeyCode", "The button that resets Oculus Rift's yaw rotation to zero."));
			
			if(externalDriftCorrection.boolValue && compass.enumValueIndex == (int)RUISTracker.CompassSource.RazerHydra)
				EditorGUILayout.LabelField(new GUIContent(("BUMPER+START Razer Hydra " + compassRazerID.enumNames[compassRazerID.enumValueIndex]),
										   "BUMPER and START of the Razer Hydra controller that you use for Yaw Drift Correction."),
										   EditorStyles.label);
			else if(headPositionInput.enumValueIndex == (int)RUISTracker.HeadPositionSource.RazerHydra)
				EditorGUILayout.LabelField(new GUIContent(("BUMPER+START Razer Hydra " + positionRazerID.enumNames[positionRazerID.enumValueIndex]),
										   "BUMPER and START of the Razer Hydra controller that you use for position tracking."),
										   EditorStyles.label);
			if(externalDriftCorrection.boolValue && compass.enumValueIndex == (int)RUISTracker.CompassSource.PSMove)
				EditorGUILayout.LabelField(new GUIContent(("MOVE button on PS Move #" + compassPSMoveID.intValue),
										   "MOVE button of the PS Move controller that you use for Yaw Drift Correction."),
										   EditorStyles.label);
			else if(headPositionInput.enumValueIndex == (int)RUISTracker.HeadPositionSource.PSMove)
				EditorGUILayout.LabelField(new GUIContent(("MOVE button on PS Move #" + positionPSMoveID.intValue),
										   "MOVE button of the PS Move controller that you use for position tracking."),
										   EditorStyles.label);
        	EditorGUI.indentLevel -= 1;
		}
		else
		{
			if(!pickRotationSource.boolValue)
			{
				switch (headPositionInput.enumValueIndex)
        		{	
					case (int)RUISTracker.HeadPositionSource.Kinect1:
					{
						headRotationInput.enumValueIndex = (int)RUISTracker.HeadRotationSource.Kinect1;
						rotationPlayerID.intValue = positionPlayerID.intValue;
						rotationJoint.enumValueIndex = positionJoint.enumValueIndex;
						break;
					}
					case (int)RUISTracker.HeadPositionSource.Kinect2:
					{
						headRotationInput.enumValueIndex = (int)RUISTracker.HeadRotationSource.Kinect2;
						rotationPlayerID.intValue = positionPlayerID.intValue;
						rotationJoint.enumValueIndex = positionJoint.enumValueIndex;
						break;
					}
					case (int)RUISTracker.HeadPositionSource.PSMove:
					{
						headRotationInput.enumValueIndex = (int)RUISTracker.HeadRotationSource.PSMove;
						rotationPSMoveID.intValue = positionPSMoveID.intValue;
						break;
					}
					case (int)RUISTracker.HeadPositionSource.RazerHydra:
					{
						headRotationInput.enumValueIndex = (int)RUISTracker.HeadRotationSource.RazerHydra;
						rotationRazerID.intValue = positionRazerID.intValue;
						break;
					}
					case (int)RUISTracker.HeadPositionSource.InputTransform:
					{
						headRotationInput.enumValueIndex = (int)RUISTracker.HeadRotationSource.InputTransform;
						rotationInput.objectReferenceValue = positionInput.objectReferenceValue;
						break;
					}
					case (int)RUISTracker.HeadPositionSource.None:
					{
						headRotationInput.enumValueIndex = (int)RUISTracker.HeadRotationSource.None;
						break;
					}
				}
			}
			
			EditorGUI.BeginDisabledGroup(!pickRotationSource.boolValue);
        	EditorGUILayout.PropertyField(headRotationInput, new GUIContent("Rotation Tracker", "Device that tracks the head rotation"));
			EditorGUI.EndDisabledGroup();
			
        	EditorGUI.indentLevel += 2;
			
	        switch (headRotationInput.enumValueIndex)
	        {
	            case (int)RUISTracker.HeadRotationSource.Kinect1:
				case (int)RUISTracker.HeadRotationSource.Kinect2:
					rotationPlayerID.intValue = Mathf.Clamp(rotationPlayerID.intValue, 0, maxKinectSkeletons - 1);
					if(rotationNoiseCovarianceKinect.floatValue < minNoiseCovariance)
						rotationNoiseCovarianceKinect.floatValue = minNoiseCovariance;
					EditorGUI.BeginDisabledGroup(!pickRotationSource.boolValue);
	                EditorGUILayout.PropertyField(rotationPlayerID, new GUIContent("Kinect Player ID", "Between 0 and 3"));
	                EditorGUILayout.PropertyField(rotationJoint, new GUIContent("Joint", "ATTENTION: Torso has most stable joint rotation "
																				+ "for head tracking! Currently OpenNI's head joint rotation "
																				+ "is always the same as torso rotation, except its tracking "
																				+ "fails more often."));
					EditorGUI.EndDisabledGroup();
	                EditorGUILayout.PropertyField(rotationOffsetKinect, new GUIContent("Rotation Offset", "Tracked joint's rotation in tracked "
																				+ "object's local coordinate system. If using Kinect for head "
																				+ "tracking, then zero vector is usually the best choice if "
																				+ "torso or head is the Rotation Joint."));
			        EditorGUILayout.PropertyField(filterRotationKinect, new GUIContent("Filter Rotation", "Enables simple Kalman filtering for rotation "
																				+ "tracking. Recommended for Kinect."));
					if(filterRotationKinect.boolValue)
				        EditorGUILayout.PropertyField(rotationNoiseCovarianceKinect, new GUIContent("Filter Strength", "Noise covariance of Kalman filtering: " 
																				+ "a bigger value means smoother results but a slower "
																				+ "response to changes."));
	                break;
	            case (int)RUISTracker.HeadRotationSource.PSMove:
					rotationPSMoveID.intValue = Mathf.Clamp(rotationPSMoveID.intValue, 0, maxPSMoveControllers - 1);
					if(rotationNoiseCovariancePSMove.floatValue < minNoiseCovariance)
						rotationNoiseCovariancePSMove.floatValue = minNoiseCovariance;
					EditorGUI.BeginDisabledGroup(!pickRotationSource.boolValue);
	                EditorGUILayout.PropertyField(rotationPSMoveID, new GUIContent("PS Move ID", "Between 0 and 3"));
					EditorGUI.EndDisabledGroup();
	                EditorGUILayout.PropertyField(rotationOffsetPSMove, new GUIContent("Rotation Offset", "Tracked PS Move controller's "
																				+ "rotation in tracked object's local coordinate system. "
																				+ "Set these euler angles according to the orientation in "
																				+ "which Move is attached to the tracked object (head etc.)."));
			        EditorGUILayout.PropertyField(filterRotationPSMove, new GUIContent("Filter Rotation", "Enables simple Kalman filtering for rotation "
																				+ "tracking. Best left disabled for PS Move."));
					if(filterRotationPSMove.boolValue)
				        EditorGUILayout.PropertyField(rotationNoiseCovariancePSMove, new GUIContent("Filter Strength", "Noise covariance of Kalman filtering: " 
																				+ "a bigger value means smoother results but a slower "
																				+ "response to changes."));
	                break;
	            case (int)RUISTracker.HeadRotationSource.RazerHydra:
					if(rotationNoiseCovarianceHydra.floatValue < minNoiseCovariance)
						rotationNoiseCovarianceHydra.floatValue = minNoiseCovariance;
					
					EditorGUI.BeginDisabledGroup(!pickRotationSource.boolValue);
			        EditorGUILayout.PropertyField(isRazerBaseMobile, new GUIContent("Moving Base Station", "Enable this if the Razer Hydra base station is "
																				+ "attached to something that is moving (e.g. Kinect tracked player's "
																				+ "belt)"));
	                EditorGUILayout.PropertyField(rotationRazerID, new GUIContent("Razer Hydra ID", "Either LEFT or RIGHT"));
					EditorGUI.EndDisabledGroup();
	                EditorGUILayout.PropertyField(rotationOffsetHydra, new GUIContent("Rotation Offset", "Tracked Razer Hydra controller's "
																				+ "rotation in tracked object's local coordinate system. "
																				+ "Set these euler angles according to the orientation in which "
																				+ "the Razer Hydra is attached to the tracked object (head etc.)."));
			        EditorGUILayout.PropertyField(filterRotationHydra, new GUIContent("Filter Rotation", "Enables simple Kalman filtering for rotation "
																				+ "tracking. Best left disabled for Razer Hydra."));
					if(filterRotationHydra.boolValue)
				        EditorGUILayout.PropertyField(rotationNoiseCovarianceHydra, new GUIContent("Filter Strength", "Noise covariance of Kalman filtering: " 
																				+ "a bigger value means smoother results but a slower "
																				+ "response to changes."));
					break;
	            case (int)RUISTracker.HeadRotationSource.InputTransform:
					if(rotationNoiseCovarianceTransform.floatValue < minNoiseCovariance)
						rotationNoiseCovarianceTransform.floatValue = minNoiseCovariance;
					EditorGUI.BeginDisabledGroup(!pickRotationSource.boolValue);
	                EditorGUILayout.PropertyField(rotationInput, new GUIContent("Input Transform", "All other rotation trackers are supported "
																				+ "through this transform. Drag and drop here a transform "
																				+ "whose rotation is controlled by a tracking device."));
					EditorGUI.EndDisabledGroup();
			        EditorGUILayout.PropertyField(filterRotationTransform, new GUIContent("Filter Rotation", "Enables simple Kalman filtering "
																				+ "for rotation tracking."));
					if(filterRotationTransform.boolValue)
				        EditorGUILayout.PropertyField(rotationNoiseCovarianceTransform, new GUIContent("Filter Strength", "Noise covariance of Kalman " 
																				+ "filtering: a bigger value means smoother results but a slower "
																				+ "response to changes."));
					break;
	        }
			
//			if(headRotationInput.enumValueIndex != (int)RUISTracker.HeadRotationSource.None)
//			{
//		        EditorGUILayout.PropertyField(filterRotation, new GUIContent("Filter Rotation", "Enables simple Kalman filtering for rotation "
//																			+ "tracking. Only recommended for Kinect."));
//		        EditorGUILayout.PropertyField(rotationNoiseCovariance, new GUIContent("Filter Strength", "Noise covariance of Kalman filtering: " 
//																			+ "a bigger value means smoother results but a slower "
//																			+ "response to changes."));
//			}
		}
		
        EditorGUI.indentLevel -= 2;
				
        EditorGUILayout.Space();

		
		if(!riftFound && headRotationInput.enumValueIndex != (int)RUISTracker.HeadRotationSource.InputTransform)
		{
			EditorGUI.BeginDisabledGroup(true);
	        EditorGUILayout.PropertyField(externalDriftCorrection, new GUIContent("Yaw Drift Correction", "Enables external yaw drift correction "
																		+ "using Kinect, PS Move, or some other device"));
			if(	  headRotationInput.enumValueIndex == (int)RUISTracker.HeadRotationSource.Kinect1
			   || headRotationInput.enumValueIndex == (int)RUISTracker.HeadRotationSource.Kinect2)
				EditorGUILayout.LabelField("Kinect joints don't need drift correction");
			if(headRotationInput.enumValueIndex == (int)RUISTracker.HeadRotationSource.PSMove)
				EditorGUILayout.LabelField("PS Move doesn't need drift correction");
			if(headRotationInput.enumValueIndex == (int)RUISTracker.HeadRotationSource.RazerHydra)
				EditorGUILayout.LabelField("Razer Hydra doesn't need drift correction");
			if(headRotationInput.enumValueIndex == (int)RUISTracker.HeadRotationSource.None)
				EditorGUILayout.LabelField("No Rotation Tracker: Drift correction disabled");
			EditorGUI.EndDisabledGroup();
		}
		else
		{
	        EditorGUILayout.PropertyField(externalDriftCorrection, new GUIContent("Yaw Drift Correction", "Enables external yaw drift correction "
																				+ "using Kinect, PS Move, or some other device"));
			if(externalDriftCorrection.boolValue)
			{
			
		        EditorGUI.indentLevel += 2;
				
				EditorGUILayout.PropertyField(compassIsPositionTracker, new GUIContent("Use Position Tracker", "If enabled, rotation from the "
																					+ "above Position Tracker will act as a compass that "
																					+ "corrects yaw drift of the Rotation Tracker"));
				
				if(compassIsPositionTracker.boolValue)
				{
					switch(headPositionInput.enumValueIndex)
					{
						case (int)RUISTracker.HeadPositionSource.Kinect1:
						{
							compass.enumValueIndex = (int)RUISTracker.CompassSource.Kinect1;
							compassPlayerID.intValue = positionPlayerID.intValue;
							compassJoint.enumValueIndex = positionJoint.enumValueIndex;
							break;
						}
						case (int)RUISTracker.HeadPositionSource.Kinect2:
						{
							compass.enumValueIndex = (int)RUISTracker.CompassSource.Kinect2;
							compassPlayerID.intValue = positionPlayerID.intValue;
							compassJoint.enumValueIndex = positionJoint.enumValueIndex;
							break;
						}
						case (int)RUISTracker.HeadPositionSource.PSMove:
						{
							compass.enumValueIndex = (int)RUISTracker.CompassSource.PSMove;
							compassPSMoveID.intValue = positionPSMoveID.intValue;
							break;
						}
						case (int)RUISTracker.HeadPositionSource.RazerHydra:
						{
							compass.enumValueIndex = (int)RUISTracker.CompassSource.RazerHydra;
							compassRazerID.enumValueIndex = positionRazerID.enumValueIndex;
							break;
						}
						case (int)RUISTracker.HeadPositionSource.InputTransform:
						{
							compass.enumValueIndex = (int)RUISTracker.CompassSource.InputTransform;
							compassTransform.objectReferenceValue = positionInput.objectReferenceValue;
							break;
						}
						case (int)RUISTracker.HeadPositionSource.None:
						{
							compass.enumValueIndex = (int)RUISTracker.CompassSource.None;
							break;
						}
					}
				}
				
				
				EditorGUI.BeginDisabledGroup(compassIsPositionTracker.boolValue);
		        EditorGUILayout.PropertyField(compass, new GUIContent("Compass Tracker", "Tracker that will be used to correct the yaw drift of "
																	+ "Rotation Tracker"));
				EditorGUI.EndDisabledGroup();
				
				if(compassIsPositionTracker.boolValue && headPositionInput.enumValueIndex == (int)RUISTracker.HeadPositionSource.None)
					EditorGUILayout.LabelField("Position Tracker is set to None!");
				else
			        switch (compass.enumValueIndex)
			        {
						case (int)RUISTracker.CompassSource.Kinect1:
						case (int)RUISTracker.CompassSource.Kinect2:
							compassPlayerID.intValue = Mathf.Clamp(compassPlayerID.intValue, 0, maxKinectSkeletons - 1);
							driftCorrectionRateKinect.floatValue = Mathf.Clamp(driftCorrectionRateKinect.floatValue, minDriftCorrectionRate, 
																													 maxDriftCorrectionRate );
							EditorGUI.BeginDisabledGroup(compassIsPositionTracker.boolValue);
			                EditorGUILayout.PropertyField(compassPlayerID, new GUIContent("Kinect Player ID", "Between 0 and 3"));
			                EditorGUILayout.PropertyField(compassJoint, new GUIContent("Compass Joint", "ATTENTION: Torso has most stable "
																				+ "joint rotation for drift correction! Currently OpenNI's "
																				+ "head joint rotation is always the same as torso rotation, "
																				+ "except its tracking fails more often."));
							EditorGUI.EndDisabledGroup();
			                EditorGUILayout.PropertyField(compassRotationOffsetKinect, new GUIContent("Compass Rotation Offset", "Kinect joint's "
																				+ "rotation in tracked object's local coordinate system. If using "
																				+ "Kinect for head tracking yaw drif correction, then zero vector is "
																				+ "usually the best choice if torso or head is the Compass Joint. "
																				+ "IT IS IMPORTANT THAT THIS PARAMETER IS CORRECT."
																				+ "Use 'Optional visualizers' to help finding the right values."));
			                EditorGUILayout.PropertyField(correctOnlyWhenFacingForward, new GUIContent("Only Forward Corrections", "Allows drift "
																				+ "correction to occur only when the player is detected as "
																				+ "standing towards Kinect (+-90 degrees). This is useful when "
																				+ "you know that the players will be mostly facing towards Kinect "
																				+ "and you want to improve drift correction by ignoring OpenNI's "
																				+ "tracking errors where the player is detected falsely "
																				+ "standing backwards (happens often)."));
					        EditorGUILayout.PropertyField(driftCorrectionRateKinect, new GUIContent("Correction Rate", "Positive values only. How fast "
																				+ "the drifting rotation is shifted towards the compass' "
																				+ "rotation. Kinect tracked skeleton is quite inaccurate as a " 
																				+ "compass, and the default 0.08 might be good. You might want "
																				+ "to adjust this to suit your liking."));
			                break;
			            case (int)RUISTracker.CompassSource.PSMove:
							compassPSMoveID.intValue = Mathf.Clamp(compassPSMoveID.intValue, 0, maxPSMoveControllers - 1);
							driftCorrectionRatePSMove.floatValue = Mathf.Clamp(driftCorrectionRatePSMove.floatValue, minDriftCorrectionRate, 
																													 maxDriftCorrectionRate );
							EditorGUI.BeginDisabledGroup(compassIsPositionTracker.boolValue);
		                	EditorGUILayout.PropertyField(compassPSMoveID, new GUIContent("PS Move ID", "Between 0 and 3"));
							EditorGUI.EndDisabledGroup();
			                EditorGUILayout.PropertyField(compassRotationOffsetPSMove, new GUIContent("Compass Rotation Offset", "Tracked PS Move "
																				+ "controller's rotation in tracked object's local coordinate "
																				+ "system. Set these euler angles according to the orientation "
																				+ "in which Move Compass is attached to your tracked object "
																				+ "(head etc.). IT IS IMPORTANT THAT THIS PARAMETER IS CORRECT. "
																				+ "Use 'Optional visualizers' to help finding the right values."));
					        EditorGUILayout.PropertyField(driftCorrectionRatePSMove, new GUIContent("Correction Rate", "Positive values only. How fast "
																				+ "the drifting rotation is shifted towards the compass' "
																				+ "rotation. Default of 0.1 is good."));
			                break;
			            case (int)RUISTracker.CompassSource.RazerHydra:
							driftCorrectionRateHydra.floatValue = Mathf.Clamp(driftCorrectionRateHydra.floatValue,  minDriftCorrectionRate, 
																													maxDriftCorrectionRate );

							EditorGUI.BeginDisabledGroup(compassIsPositionTracker.boolValue);
					        EditorGUILayout.PropertyField(isRazerBaseMobile, new GUIContent("Moving Base Station", "Enable this if the Razer Hydra "
																				+ "base station is attached to something that is "
																				+ "moving (e.g. Kinect tracked player's belt)"));
			
			                EditorGUILayout.PropertyField(compassRazerID, new GUIContent("Razer Hydra ID", "Either LEFT or RIGHT"));
							EditorGUI.EndDisabledGroup();
			                EditorGUILayout.PropertyField(compassRotationOffsetHydra, new GUIContent("Compass Rotation Offset", "Tracked Razer Hydra "
																				+ "controller's rotation in tracked object's local "
																				+ "coordinate system.  Set these euler angles according "
																				+ "to the orientation in which the Razer Hydra is "
																				+ "attached to your tracked object (head etc.). "
																				+ "IT IS IMPORTANT THAT THIS PARAMETER IS CORRECT."
																				+ "Use 'Optional visualizers' to help finding the right values."));
					        EditorGUILayout.PropertyField(driftCorrectionRateHydra, new GUIContent("Correction Rate", "Positive values only. How fast "
																				+ "the drifting rotation is shifted towards the compass' "
																				+ "rotation. Default of 0.1 is good."));
			                break;
			            case (int)RUISTracker.CompassSource.InputTransform:
							driftCorrectionRateTransform.floatValue = Mathf.Clamp(driftCorrectionRateTransform.floatValue,  minDriftCorrectionRate, 
																															maxDriftCorrectionRate );
						
							EditorGUI.BeginDisabledGroup(compassIsPositionTracker.boolValue);
			                EditorGUILayout.PropertyField(compassTransform, new GUIContent("Input Transform", "Drift correction via all other "
																				+ "trackers is supported through this transform. Drag "
																				+ "and drop here a transform whose rotation cannot drift."));
							EditorGUI.EndDisabledGroup();
					        EditorGUILayout.PropertyField(driftCorrectionRateTransform, new GUIContent("Correction Rate", "Positive values only. "
																				+ "How fast the drifting rotation is shifted towards the "
																				+ "compass' rotation."));
							break;
			        }
				
	       		EditorGUILayout.Space();
				EditorGUILayout.LabelField("Optional visualizers:");
		        EditorGUILayout.PropertyField(enableVisualizers, new GUIContent("Enable Visualizers", "Below visualizers are optional and meant to"
																	+ "illustrate the performance of the drift correction."));
				if(enableVisualizers.boolValue)
				{
					
		        	EditorGUI.indentLevel += 1;
			        EditorGUILayout.PropertyField(driftingDirectionVisualizer, new GUIContent("Drifter Rotation Visualizer", "Drag and drop a Game "
																		+ "Object here to visualize rotation from Rotation Tracker"));
			        EditorGUILayout.PropertyField(compassDirectionVisualizer, new GUIContent("Compass Yaw Visualizer", "Drag and drop a Game Object "
																		+ "here to visualize yaw rotation from Compass Tracker"));
			        EditorGUILayout.PropertyField(correctedDirectionVisualizer, new GUIContent("Corrected Rotation Visualizer", "Drag and drop a Game "
																		+ "Object here to visualize the final, corrected rotation"));
			        EditorGUILayout.PropertyField(driftVisualizerPosition, new GUIContent("Visualizer Position", "Drag and drop a Transform here "
																		+ "that defines the position where the above three "
																		+ "visualizers will appear"));
	        		EditorGUI.indentLevel -= 1;
				}
				
	        	EditorGUI.indentLevel -= 2;
			}
		
		}
		
		if(isRazerBaseMobile.boolValue)
		{
			if(headPositionInput.enumValueIndex == (int)RUISTracker.HeadPositionSource.RazerHydra)
				movingBaseAnnouncement = "Razer Hydra base station set as moving in Position Tracker";
			else if(headRotationInput.enumValueIndex == (int)RUISTracker.HeadRotationSource.RazerHydra && !riftFound)
				movingBaseAnnouncement = "Razer Hydra base station set as moving in Rotation Tracker";
			else if(	compass.enumValueIndex == (int)RUISTracker.CompassSource.RazerHydra && externalDriftCorrection.boolValue
					&&	(riftFound || headRotationInput.enumValueIndex == (int)RUISTracker.HeadRotationSource.InputTransform)	)
				movingBaseAnnouncement = "Razer Hydra base station set as moving in Yaw Drift Correction";
			else
			{
				movingBaseAnnouncement = "";
				isRazerBaseMobile.boolValue = false; // When all the ifs fail, we can set isRazerBaseMobile to false
			}
		}
		
		if(isRazerBaseMobile.boolValue)
		{
			
    		EditorGUILayout.PropertyField(mobileRazerBase, new GUIContent("Razer Base Tracker", "The tracker onto which the Razer Hydra "
																		+ "base station is attached to"));
			EditorGUI.indentLevel += 2;
			
			if(movingBaseAnnouncement.Length > 0)
			{
				EditorStyles.textField.wordWrap = true;
				EditorGUILayout.TextArea(movingBaseAnnouncement, GUILayout.Height(30));
			}
			switch(mobileRazerBase.enumValueIndex)
			{
				case (int) RUISTracker.RazerHydraBase.Kinect1:
					hydraBaseKinectPlayerID.intValue = Mathf.Clamp(hydraBaseKinectPlayerID.intValue, 0, maxKinectSkeletons - 1);
					if(hydraBasePositionCovarianceKinect.floatValue < minNoiseCovariance)
						hydraBasePositionCovarianceKinect.floatValue = minNoiseCovariance;
					if(hydraBaseRotationCovarianceKinect.floatValue < minNoiseCovariance)
						hydraBaseRotationCovarianceKinect.floatValue = minNoiseCovariance;
					EditorGUILayout.PropertyField(hydraBaseKinectPlayerID, new GUIContent("Kinect Player Id", "Between 0 and 3"));
					EditorGUILayout.PropertyField(hydraBaseJoint, new GUIContent("Joint", "Kinect joint onto which Razer Hydra base station "
																		+ "is attached to"));
			        EditorGUILayout.PropertyField(hydraBasePositionOffsetKinect, new GUIContent("Base Position Offset (meters)", "Razer Hydra "
            															+ "base station's position in the tracked joint's local coordinate "
																		+ "system. Set these values according to the base station's position "
																		+ "offset from the tracked joint's origin."));
			        EditorGUILayout.PropertyField(hydraBaseRotationOffsetKinect, new GUIContent("Base Rotation Offset", "Razer Hydra "
																		+ "base station's rotation in the tracked joint's local coordinate "
																		+ "system. Set these euler angles according to the orientation in which "
																		+ "Razer Hydra base station is attached to the tracked joint. "
																		+ "IT IS IMPORTANT THAT THIS PARAMETER IS CORRECT."));
					EditorGUILayout.PropertyField(inferBaseRotationFromRotationTrackerKinect, new GUIContent("Use Rotation Tracker", "If the "
																		+ "above Position Tracker or Compass Razer Hydra is attached to the "
																		+ "Rotation Tracker (e.g. Oculus Rift), then use them together to "
																		+ "calculate the base station's rotation. Recommended for "
																		+ "Kinect."));
					if(inferBaseRotationFromRotationTrackerKinect.boolValue)
					{
						EditorGUI.indentLevel += 1;
						EditorGUILayout.PropertyField(hydraAtRotationTrackerOffset, new GUIContent("Razer Hydra Rotation Offset", "Tracked "
																		+ "Razer Hydra controller's rotation in tracked object's local coordinate "
																		+ "system. Set these euler angles according to the orientation in which "
																		+ "the Razer Hydra is attached to the tracked object (head etc.). "
																		+ "IT IS IMPORTANT THAT THIS PARAMETER IS CORRECT."));
						EditorGUI.indentLevel -= 1;
					
					}
					EditorGUILayout.PropertyField(filterHydraBasePoseKinect, new GUIContent("Filter Tracking", "Enables simple "
																		+ "Kalman filtering for position and rotation tracking of the Razer "
																		+ "Hydra base station. Recommended for Kinect."));
					if(filterHydraBasePoseKinect.boolValue)
					{
						EditorGUILayout.PropertyField(hydraBasePositionCovarianceKinect, new GUIContent("Filter Position Strength", "Position " 
																		+ "noise covariance of Kalman filtering: a bigger value means "
																		+ "smoother results but a slower response to changes."));
						EditorGUILayout.PropertyField(hydraBaseRotationCovarianceKinect, new GUIContent("Filter Rotation Strength", "Rotation " 
																		+ "noise covariance of Kalman filtering: a bigger value means "
																		+ "smoother results but a slower response to changes."));
					}
				break;
			
				case (int) RUISTracker.RazerHydraBase.InputTransform:
					if(hydraBasePositionCovarianceTransform.floatValue < minNoiseCovariance)
						hydraBasePositionCovarianceTransform.floatValue = minNoiseCovariance;
					if(hydraBaseRotationCovarianceTransform.floatValue < minNoiseCovariance)
						hydraBaseRotationCovarianceTransform.floatValue = minNoiseCovariance;
					EditorGUILayout.PropertyField(hydraBaseInput, new GUIContent("Input Transform", "All other trackers are supported "
																		+ "through this transform. Drag and drop here a transform "
																		+ "whose position and rotation is controlled by a tracking device."));
				
					EditorGUILayout.PropertyField(inferBaseRotationFromRotationTrackerTransform, new GUIContent("Use Rotation Tracker", "If the "
																		+ "above Position Tracker or Compass Razer Hydra is attached to the "
																		+ "Rotation Tracker (e.g. Oculus Rift), then use them together to "
																		+ "calculate the base station's rotation."));
					if(inferBaseRotationFromRotationTrackerTransform.boolValue)
					{
						EditorGUI.indentLevel += 1;
						EditorGUILayout.PropertyField(hydraAtRotationTrackerOffset, new GUIContent("Razer Hydra Rotation Offset", "Tracked "
																		+ "Razer Hydra controller's rotation in tracked object's local coordinate "
																		+ "system. Set these euler angles according to the orientation in which "
																		+ "the Razer Hydra is attached to the tracked object (head etc.). "
																		+ "IT IS IMPORTANT THAT THIS PARAMETER IS CORRECT."));
						EditorGUI.indentLevel -= 1;
					
					}
					EditorGUILayout.PropertyField(filterHydraBasePoseTransform, new GUIContent("Filter Tracking", "Enables simple "
																		+ "Kalman filtering for position and rotation tracking of the Razer "
																		+ "Hydra base station."));
					if(filterHydraBasePoseTransform.boolValue)
					{
						EditorGUILayout.PropertyField(hydraBasePositionCovarianceTransform, new GUIContent("Filter Position Strength", "Position " 
																		+ "noise covariance of Kalman filtering: a bigger value means "
																		+ "smoother results but a slower response to changes."));
						EditorGUILayout.PropertyField(hydraBaseRotationCovarianceTransform, new GUIContent("Filter Rotation Strength", "Rotation " 
																		+ "noise covariance of Kalman filtering: a bigger value means "
																		+ "smoother results but a slower response to changes."));
					}
				break;
			}
			EditorGUI.indentLevel -= 2;
			
		}
		
		
        serializedObject.ApplyModifiedProperties();
    }
コード例 #40
0
    /// <summary>
    /// Awake this instance.
    /// </summary>
    void Awake()
    {
        // Find camera controller
        OVRCameraRig[] CameraControllers;
        CameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();

        if(CameraControllers.Length == 0)
            Debug.LogWarning("OVRMainMenu: No OVRCameraRig attached.");
        else if (CameraControllers.Length > 1)
            Debug.LogWarning("OVRMainMenu: More then 1 OVRCameraRig attached.");
        else{
            CameraController = CameraControllers[0];
        #if USE_NEW_GUI
            OVRUGUI.CameraController = CameraController;
        #endif
        }

        // Find player controller
        OVRPlayerController[] PlayerControllers;
        PlayerControllers = gameObject.GetComponentsInChildren<OVRPlayerController>();

        if(PlayerControllers.Length == 0)
            Debug.LogWarning("OVRMainMenu: No OVRPlayerController attached.");
        else if (PlayerControllers.Length > 1)
            Debug.LogWarning("OVRMainMenu: More then 1 OVRPlayerController attached.");
        else{
            PlayerController = PlayerControllers[0];
        #if USE_NEW_GUI
            OVRUGUI.PlayerController = PlayerController;
        #endif
        }

        #if USE_NEW_GUI
            // Create canvas for using new GUI
            NewGUIObject = new GameObject();
            NewGUIObject.name = "OVRGUIMain";
            NewGUIObject.transform.parent = GameObject.Find("LeftEyeAnchor").transform;
            RectTransform r = NewGUIObject.AddComponent<RectTransform>();
            r.sizeDelta = new Vector2(100f, 100f);
            r.localScale = new Vector3(0.001f, 0.001f, 0.001f);
            r.localPosition = new Vector3(0.01f, 0.17f, 0.53f);
            r.localEulerAngles = Vector3.zero;

            Canvas c = NewGUIObject.AddComponent<Canvas>();
            c.renderMode = RenderMode.World;
            c.pixelPerfect = false;
        #endif
    }
コード例 #41
0
	/// <summary>
	/// Invoked by OVRCameraRig's UpdatedAnchors callback. Allows the Hmd rotation to update the facing direction of the player.
	/// </summary>
	public void UpdateTransform(OVRCameraRig rig)
	{
		Transform root = CameraRig.trackingSpace;
		Transform centerEye = CameraRig.centerEyeAnchor;

		if (HmdRotatesY)
		{
			Vector3 prevPos = root.position;
			Quaternion prevRot = root.rotation;

			transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);

			root.position = prevPos;
			root.rotation = prevRot;
		}
	}
コード例 #42
0
	private void Awake()
	{
		m_cameraRig = GetComponentInChildren<OVRCameraRig>();
		m_positionProvider = GetComponent<PositionProvider>();
	}
コード例 #43
0
	/// <summary>
	/// Sets the OVR camera controller.
	/// </summary>
	/// <param name="cameraController">Camera controller.</param>
	public void SetOVRCameraController(ref OVRCameraRig cameraController)
	{
		CameraController = cameraController;
		UIAnchor = CameraController.centerEyeAnchor;
	}
コード例 #44
0
ファイル: Move_Pong.cs プロジェクト: MrIThompson/Dino
	void Start(){
		gm = GameObject.Find ("GM").GetComponent<GM_Pong> ();
		ovrRig = GetComponent<OVRCameraRig> ();
	}
コード例 #45
0
    /// <summary>
    /// Invoked by OVRCameraRig's UpdatedAnchors callback. Allows the Hmd rotation to update the facing direction of the player.
    /// </summary>
    public void UpdateTransform(OVRCameraRig rig)
    {
        Transform root = CameraRig.trackingSpace;
        Transform centerEye = CameraRig.centerEyeAnchor;

        Vector3 euler = transform.rotation.eulerAngles;

        float rotateInfluence = SimulationRate * Time.deltaTime * RotationAmount * RotationScaleMultiplier;

        euler.y +=  rotateInfluence;

        if (HmdRotatesY)
        {
            Vector3 prevPos = root.position;
            Quaternion prevRot = root.rotation;

            print("X: " + centerEye.rotation.eulerAngles.x + "  Y: " + centerEye.rotation.eulerAngles.y + "  Z: " + centerEye.rotation.eulerAngles.z);

            if ((centerEye.rotation.eulerAngles.y > 20.0f && centerEye.rotation.eulerAngles.y < 160.0f))
            {
                //transform.rotation = Quaternion.Euler(euler);
                //transform.rotation = Quaternion.Euler(0.0f, 0.05f, 0.0f);
            }
            else
            {

                //transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);
            }

            root.position = prevPos;
            root.rotation = prevRot;
        }

        //if (HmdRotatesY)
        //{
        //	Vector3 prevPos = root.position;
        //	Quaternion prevRot = root.rotation;

        //	transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);

        //	root.position = prevPos;
        //	root.rotation = prevRot;
        //}
    }
コード例 #46
0
ファイル: MovementFlight.cs プロジェクト: MrIThompson/Dino
	// Use this for initialization
	void Start () {
		ovrRig = GetComponent<OVRCameraRig> ();
		
	}
コード例 #47
0
	void Awake()
	{
		Controller = gameObject.GetComponent<CharacterController>();

		if(Controller == null)
			Debug.LogWarning("OVRPlayerController: No CharacterController attached.");

		// We use OVRCameraRig to set rotations to cameras,
		// and to be influenced by rotation
		OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren<OVRCameraRig>();

		if(CameraRigs.Length == 0)
			Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
		else if (CameraRigs.Length > 1)
			Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
		else
			CameraRig = CameraRigs[0];

		InitialYRotation = transform.rotation.eulerAngles.y;
	}
コード例 #48
0
	void Awake()
	{
		coordinateSystem = MonoBehaviour.FindObjectOfType(typeof(RUISCoordinateSystem)) as RUISCoordinateSystem;
		localPosition = Vector3.zero;
		localRotation = Quaternion.identity;
		rawRotation = Quaternion.identity;
		
		filterPos = new KalmanFilter();
		filterPos.initialize(3,3);
		filterPos.skipIdenticalMeasurements = true;
		//		filterRot = new KalmanFilter();
		//		filterRot.initialize(4,4);
		
		// Mobile Razer Hydra base filtering
		hydraBaseFilterPos = new KalmanFilter();
		hydraBaseFilterPos.initialize(3,3);
		hydraBaseFilterPos.skipIdenticalMeasurements = true;
		//		hydraBaseFilterRot = new KalmanFilter();
		//		hydraBaseFilterRot.initialize(4,4);
		
		filterRot.skipIdenticalMeasurements = true;
		
		// Yaw Drift Corrector invocations in Awake()
		filterDrift = new KalmanFilter();
		filterDrift.initialize(2,2);
		
		transform.localPosition = defaultPosition;
		eyeCenterPosition = defaultPosition;
		measuredHeadPosition = defaultPosition;
		
		hydraBasePosition = new Vector3(0, 0, 0);
		hydraBaseRotation = Quaternion.identity;
		
		ovrCameraRig = GetComponentInChildren<OVRCameraRig>();
		if(ovrCameraRig != null && OVRManager.display != null && OVRManager.display.isPresent)
		{
			useOculusRiftRotation = true;
		}
		else
		{
			useOculusRiftRotation = false;
		}
		
		// Enforce rotation settings if rotation source is set to be same as position source
		if (!pickRotationSource) 
		{
			switch (headPositionInput)
			{	
			case HeadPositionSource.Kinect1:
			{
				headRotationInput = HeadRotationSource.Kinect1;
				rotationPlayerID = positionPlayerID;
				rotationJoint = positionJoint;
				break;
			}
			case HeadPositionSource.Kinect2:
			{
				headRotationInput = HeadRotationSource.Kinect2;
				rotationPlayerID = positionPlayerID;
				rotationJoint = positionJoint;
				break;
			}
			case HeadPositionSource.PSMove:
			{
				headRotationInput = HeadRotationSource.PSMove;
				rotationPSMoveID = positionPSMoveID;
				break;
			}
			case HeadPositionSource.RazerHydra:
			{
				headRotationInput = HeadRotationSource.RazerHydra;
				rotationRazerID = positionRazerID;
				break;
			}
			case HeadPositionSource.InputTransform:
			{
				headRotationInput = HeadRotationSource.InputTransform;
				rotationInput = positionInput;
				break;
			}
			case HeadPositionSource.None:
			{
				headRotationInput = HeadRotationSource.None;
				break;
			}
			}
		}
		
		filterPosition = false;
	}
コード例 #49
0
 /// <summary>
 /// Sets the OVR camera controller.
 /// </summary>
 /// <param name="cameraController">Camera controller.</param>
 public void SetOVRCameraController(ref OVRCameraRig cameraController)
 {
     CameraController = cameraController;
 }
コード例 #50
0
ファイル: Movement.cs プロジェクト: MrIThompson/Dino
	void Start(){
		posI = 2;
		gm = GameObject.Find ("GM").GetComponent<GM_Doors> ();
		ovrRig = GetComponent<OVRCameraRig> ();
	}