// InitCameraControllerVariables // Made public so that it can be called by classes that require information about the // camera to be present when initing variables in 'Start' public void InitCameraControllerVariables() { // Get the IPD value (distance between eyes in meters) //IVRDevice.GetIPD(ref ipd); // Get the values for both IPD and lens distortion correction shift. We don't normally // need to set the PhysicalLensOffset once it's been set here. //IVRDevice.CalculatePhysicalLensOffsets(ref LensOffsetLeft, ref LensOffsetRight); // Using the calculated FOV, based on distortion parameters, yeilds the best results. // However, public functions will allow to override the FOV if desired //VerticalFOV = IVRDevice.VerticalFOV(); // Store aspect ratio as well //AspectRatio = IVRDevice.CalculateAspectRatio(); //Since we haven't setup this, use constant numbers instead. IVRDevice.GetDistortionCorrectionCoefficients(ref DistK0, ref DistK1, ref DistK2, ref DistK3); // Check to see if we should render in portrait mode //if (PortraitMode != true) //PortraitMode = OVRDevice.RenderPortraitMode(); //PrevPortraitMode = false; // Get our initial world orientation of the cameras from the scene (we can grab it from // the set FollowOrientation object or this OVRCameraController gameObject) //if (FollowOrientation != null) //OrientationOffset = FollowOrientation.rotation; //else //OrientationOffset = transform.rotation; }
private void OnInputDeviceDisconnected(IVRDevice vrDevice, IVRInputDevice inputDevice) { if (!vrDevice.InputDevices.Any(x => x is UnityXRController)) { // No controllers are connected // Enable gaze controls DisableAllControllerVisuals(); } }
//Notes: Device Connecting is difference than controller being active private void OnInputDeviceConnected(IVRDevice vrDevice, IVRInputDevice inputDevice) { var unityController = inputDevice as UnityXRController; if (unityController != null) { // A controller was connected // Disable gaze controls EnableControllerVisual(unityController); } }
//Notes: Device Connecting is difference than controller being active private void OnInputDeviceConnected(IVRDevice vrDevice, IVRInputDevice inputDevice) { var gearController = inputDevice as GearVRController; if (gearController != null) { // A controller was connected // Disable gaze controls EnableController(gearController); } }
#pragma warning restore 0649 #endregion /// <summary> /// Creates a new emulator device. /// </summary> /// <param name="device">The emulator device type.</param> public EmulatorDevice(VREmulatorDevice device) { if (!Enum.IsDefined(typeof(VREmulatorDevice), device)) { UnityEngine.Debug.LogFormat("[EmulatorDevice] No VREmulatorSetup supplied, using default device ({0}).", _defaultDevice.ToString()); mImpl = CreateDeviceImplementation(_defaultDevice); } else { mImpl = CreateDeviceImplementation(device); } UnityEngine.Debug.LogFormat("[EmulatorDevice] Using device implementation: {0}", mImpl.GetType().Name); }
// Update is called once per frame void Update() { //InitCameraControllerVariables(); //ConfigureCameraLensCorrection(ref _leftCamera); //ConfigureCameraLensCorrection(ref _rightCamera); //IVRDevice.GetOrientation (0, ref _gyroAttitude); if (Input.GetKey(KeyCode.Alpha3)) { IVRDevice.ResetOrientation(0); } #if !UNITY_EDITOR_OSX IVRDevice.GetPredictedOrientation(0, ref _gyroAttitude); transform.localRotation = _gyroAttitude; #endif }
// Update is called once per frame void Update() { //InitCameraControllerVariables(); //ConfigureCameraLensCorrection(ref _leftCamera); //ConfigureCameraLensCorrection(ref _rightCamera); //IVRDevice.GetOrientation (0, ref _gyroAttitude); #if CHILDREN_VR if (Input.GetKey(KeyCode.Alpha3) && rotation_fix == false) { IVRDevice.ResetOrientation(0); rotation_fix = true; } #endif IVRDevice.GetPredictedOrientation(0, ref _gyroAttitude); transform.localRotation = _gyroAttitude; // print(transform.localRotation); }
void Update() { if (IVRDevice.IsHMDAttached()) { return; } #if UNITY_EDITOR_OSX if (Input.GetKey(KeyCode.Alpha3)) { _bodyTransform.localRotation = Quaternion.identity; _mouseX = 0; _mouseY = 0; } #endif if (Input.GetKey(KeyCode.LeftControl)) { _mouseZ += Input.GetAxis("Mouse X") * kSensitivity; _mouseZ = Mathf.Clamp(_mouseZ, -85, 85); } else { _mouseX += Input.GetAxis("Mouse X") * kSensitivity; if (_mouseX <= -180) { _mouseX += 360; } else if (_mouseX > 180) { _mouseX -= 360; } _mouseY -= Input.GetAxis("Mouse Y") * kSensitivity; _mouseY = Mathf.Clamp(_mouseY, -85, 85); } _bodyTransform.localRotation = Quaternion.Euler(0, this.transform.localRotation.eulerAngles.y, 0.0f) * Quaternion.Euler(_mouseY, _mouseX, _mouseZ); }
void ConfigureCameraLensCorrection(ref Camera camera) { // Get the distortion scale and aspect ratio to use when calculating distortion shader float distortionScale = 1.0f / IVRDevice.DistortionScale(); float aspectRatio = IVRDevice.CalculateAspectRatio(); // These values are different in the SDK World Demo; Unity renders each camera to a buffer // that is normalized, so we will respect this rule when calculating the distortion inputs float NormalizedWidth = 1.0f; float NormalizedHeight = 1.0f; IVRLensCorrection lc = camera.GetComponent <IVRLensCorrection>(); lc.enabled = true; lc._Scale.x = (NormalizedWidth / 2.0f) * distortionScale; lc._Scale.y = (NormalizedHeight / 2.0f) * distortionScale * aspectRatio; lc._ScaleIn.x = (2.0f / NormalizedWidth); lc._ScaleIn.y = (2.0f / NormalizedHeight) / aspectRatio; lc._HmdWarpParam.x = DistK0; lc._HmdWarpParam.y = DistK1; lc._HmdWarpParam.z = DistK2; }
/// <summary> /// Initializes the avatar extension. /// </summary> /// <param name="avatar">The <see cref="IVRAvatar"/> the extension is bound to.</param> public void Initialize(IVRAvatar avatar) { mAvatar = avatar; mDevice = VRDevice.Device; mPointer = mDevice.Headset.Pointer; mPointerVisual = m_PointerVisual; if (mPointerVisual == null) { if (mPointer.Transform == transform) { mPointer.Transform = null; } } else { mPointerVisual.Bind(mPointer); mPointer.Transform = mPointerVisual.transform; } ApplyTimedPointerProperties(); Debug.Log("Initialized"); }
public void Initialize() { mAvatar = GetComponentInParent <IVRAvatar>(); mAvatar.InitializeExtensions(); mDevice = VRDevice.Device; mGazeInput = GetComponent <GazeInput>(); // Load controller visuals for any VRAvatarController objects attached to the avatar var avatarControllers = GetComponentsInChildren <VRAvatarController>(includeInactive: true); foreach (var controller in avatarControllers) { AttachControllerVisual(controller); } // Add event listeners mDevice.InputDeviceConnected += OnInputDeviceConnected; mDevice.InputDeviceDisconnected += OnInputDeviceDisconnected; mAvatar.Head.ActiveCameraChanged += OnActiveCameraChanged; SetupInitialControllerState(); UpdateHandedness(); }
private void Awake() { mAvatar = GetComponentInParent <IVRAvatar>(); mAvatar.InitializeExtensions(); mPrimaryControllerTracker = new GearVRTrackedControllerProxy(mAvatar, VRAvatarLimbType.RightHand); mSecondaryControllerTracker = new GearVRTrackedControllerProxy(mAvatar, VRAvatarLimbType.LeftHand); mDevice = VRDevice.Device; mSettings = gameObject.GetOrAddComponent <GearVRAvatarSettings>(); mGazeInput = GetComponent <GazeInput>(); // Setup auxiliary systems SetupManager(); SetupCameraRig(); // Activate OVRManager once everything is setup mManager.gameObject.SetActive(true); // Load controller visuals for any VRAvatarController objects attached to the avatar { var avatarControllers = GetComponentsInChildren <VRAvatarController>(includeInactive: true); foreach (var controller in avatarControllers) { AttachControllerVisual(controller); } } // Add event listeners mDevice.InputDeviceConnected += OnInputDeviceConnected; mDevice.InputDeviceDisconnected += OnInputDeviceDisconnected; mAvatar.Head.ActiveCameraChanged += OnActiveCameraChanged; SetupInitialControllerState(); UpdateHandedness(); }
private void OnDestroy() { mDevice = null; }
private IVRInputDevice GetInput(VRInputDeviceHand hand) { IVRDevice device = VRDevice.Device; return(hand == VRInputDeviceHand.Left ? device.SecondaryInputDevice : device.PrimaryInputDevice); }