/****************************************************************************************************\ * Private functions to setup multi-camera-based rendering \****************************************************************************************************/ private GameObject SetupDummyViewCameraObject(float ipd, EFVR_Eye whichEye) { GameObject temp = new GameObject(); temp.name = string.Format("FOVE Eye ({0})", whichEye); temp.transform.parent = transform; temp.transform.localPosition = new Vector3(ipd, eyeHeight, eyeForward) * worldScale; temp.transform.localRotation = UnityEngine.Quaternion.identity; return(temp); }
/// <summary> /// Returns the position of the supplied Vector3 in normalized viewport space for whichever /// eye is specified. This is a convenience function wrapping Unity's built-in /// Camera.WorldToViewportPoint without the need to acquire references to each camera by hand. /// /// <para>In most cases, it is sufficient to query only one eye at a time, however both are accessible /// for advanced use cases.</para> /// </summary> /// <param name="pos">The position in 3D world space to project to viewport space.</param> /// <param name="eye">Which Fove.Eye (Fove.Eye.Left or Fove.Eye.Right) to project onto.</param> /// <returns>The vector indicating where the 3D world point appears in the specified eye viewport.</returns> public Vector3 GetNormalizedViewportPointForEye(Vector3 pos, EFVR_Eye eye) { if (eye == EFVR_Eye.Left) { return(_leftCamera.WorldToViewportPoint(pos)); } if (eye == EFVR_Eye.Right) { return(_rightCamera.WorldToViewportPoint(pos)); } return(new Vector3(0, 0, 0)); }
public bool CanUseCamera(EFVR_Eye whichEye, FoveEyeCamera cam) { switch (whichEye) { case EFVR_Eye.Left: return(left == null || left == cam); case EFVR_Eye.Right: return(right == null || right == cam); } return(false); }
public void SetCamera(EFVR_Eye whichEye, FoveEyeCamera cam) { switch (whichEye) { case EFVR_Eye.Left: left = cam; break; case EFVR_Eye.Right: right = cam; break; } }
// Latepdate ensures that the object doesn't lag behind the user's head motion void Update() { //set value rays = fove.GetGazeRays_Immediate(); myEyeStruct = FoveInterface.CheckEyesClosed(); //raycast Physics.Raycast(rays.right, out hit, Mathf.Infinity); if (hit.point != Vector3.zero) //&& (myEyeStruct == EFVR_Eye.Left || myEyeStruct == EFVR_Eye.Both)) { transform.position = hit.point; } }
// LateUpdate ensures that the object doesn't lag behind the user's head motion void FixedUpdate() { //set value rays = fove.GetGazeRays_Immediate(); myEyeStruct = FoveInterface.CheckEyesClosed(); //raycast Debug.Log(myEyeStruct.ToString()); Physics.Raycast(rays.left, out hit, Mathf.Infinity); if (fove.Gazecast(hit.collider)) { transform.position = hit.point; if (hit.point != Vector3.zero) //&& (myEyeStruct == EFVR_Eye.Right || myEyeStruct == EFVR_Eye.Both)) { transform.position = hit.point; //send a message to a destination gameobject if (prev == null) { //set the prev to the current hit collider prev = hit.collider; Debug.Log("hit collider set collider to my object " + prev.GetType()); if (prev.GetComponent <timerScript>() != null) { prev.SendMessage("StartTimer"); } } else if (prev.name != hit.collider.name) { if (prev.GetComponent <timerScript>() != null) { prev.SendMessage("StopTimer"); } if (hit.collider.GetComponent <timerScript>() != null) { hit.collider.SendMessage("StartTimer"); } prev = hit.collider; } } } }
/****************************************************************************************************\ * Legacy instance methods \****************************************************************************************************/ /// <summary> /// Get a reference to the camera used to render the right-eye view. /// This object remains consistent between frames unless deleted or the scene changes. /// </summary> /// <param name="whichEye">Which eye's camera to retrieve. This must either be "Left" or "Right".</param> /// <returns>The Camera object for the specified eye, or `null` if passed an argument other than "Left" or /// "Right".</returns> public Camera GetEyeCamera(EFVR_Eye whichEye) { EnsureLocalDataConcurrency(); switch (whichEye) { case EFVR_Eye.Left: return(_leftCamera); case EFVR_Eye.Right: return(_rightCamera); default: Debug.LogWarning("GetEyeCamera called with a non-left/right argument, which makes no sense."); break; } return(null); }
private static FoveCameraPair GetNextLayerPair(EFVR_Eye whichEye, FoveEyeCamera cam) { if (whichEye != EFVR_Eye.Left && whichEye != EFVR_Eye.Right) { return(null); } foreach (var pair in _layerCameraPairs) { if (pair.CanUseCamera(whichEye, cam)) { return(pair); } } var p = new FoveCameraPair(cam._compositor, cam.foveInterfaceBase); _layerCameraPairs.Add(p); return(p); }
/// <summary> /// Sets the _rayOrigin variable based on the used VRHMD. /// </summary> private void FindGazeOrigin() { switch (TestController.Instance.TestBlockData.SelectedVRHMD) { case TestBlock.VRHMD.VIVE: _rayOrigin = transform.position; break; case TestBlock.VRHMD.FOVE: if (CurrentControlMethod == TestBlock.ControlMethod.Eyetracking) { FoveInterface.EyeRays rays = _foveInterface.GetGazeRays(); EFVR_Eye eyeClosed = FoveInterface.CheckEyesClosed(); switch (eyeClosed) { case (EFVR_Eye.Neither): _rayOrigin = (rays.left.origin + rays.right.origin) * 0.5f; break; case (EFVR_Eye.Left): _rayOrigin = rays.right.origin; break; case (EFVR_Eye.Right): _rayOrigin = rays.left.origin; break; case (EFVR_Eye.Both): _rayOrigin = Vector3.zero; break; } } else if (CurrentControlMethod == TestBlock.ControlMethod.Headtracking) { _rayOrigin = transform.position; } break; } }
private void Update() { RaycastHit hit; myEyeStruct = FoveInterface.CheckEyesClosed(); gcd = FoveInterface.GetGazeConvergence(); if (!prevgcd.Equals(gcd) || prev == "") { Physics.Raycast(gcd.ray, out hit, Mathf.Infinity); transform.position = hit.point; prevgcd = gcd; //Debug.Log("(accuracy,distance,ray) " + gcd.accuracy + " " + gcd.distance + " " + gcd.ray); } if (prev == "" || prev != myEyeStruct.ToString()) { prev = myEyeStruct.ToString(); //Debug.Log(prev); } }
//Gets point where user is looking every frame and interacts with any intersecting gazeobjects if possible void Update() { if (!_initialized) { return; } if (Input.GetKeyDown(KeyCode.H)) { CenterHead(); } if (Input.GetKeyDown(KeyCode.Escape)) { Application.Quit(); } Ray ray = new Ray(); switch (_selectedControlType) { case StreamController.ControlType.Head: ray = new Ray(Head.position, Head.forward * 1000); break; case StreamController.ControlType.Eyes_Mouse: case StreamController.ControlType.Mouse: if (Input.GetMouseButtonDown(1)) { } if (Input.GetMouseButton(1)) { Head.Rotate(Vector3.up, Input.GetAxis("Mouse X") * _mouseRotationSpeed, Space.Self); Head.Rotate(Vector3.right, -Input.GetAxis("Mouse Y") * _mouseRotationSpeed, Space.Self); Head.localRotation = Quaternion.Euler(Head.localEulerAngles.x, Head.localEulerAngles.y, 0); } if (Input.GetMouseButton(0) || _selectedControlType == StreamController.ControlType.Eyes_Mouse) { ray = Camera.main.ScreenPointToRay(Input.mousePosition); } else { ResetHoveredObject(); return; } break; //both of the code for the two input cases was moved further down, since we want gaze data to be recorded for both inputs. case StreamController.ControlType.Eyes: //List<Vector3> eyeDirections = new List<Vector3>(); //FoveInterfaceBase.EyeRays rays = _foveInterface.GetGazeRays(); //EFVR_Eye eyeClosed = FoveInterface.CheckEyesClosed(); //if (eyeClosed != EFVR_Eye.Both && eyeClosed != EFVR_Eye.Left) // eyeDirections.Add(rays.left.direction); //if (eyeClosed != EFVR_Eye.Both && eyeClosed != EFVR_Eye.Right) // eyeDirections.Add(rays.right.direction); //Vector3 direction = Vector3.zero; //foreach (Vector3 eyeDirection in eyeDirections) //{ // direction += eyeDirection; //} //direction = direction / eyeDirections.Count; //ray = new Ray(Head.transform.position, direction * 1000); break; case StreamController.ControlType.Joystick: { // // Joystick input //Vector2 JoyInput = new Vector2(Input.GetAxis("Horizontal"), Input.GetAxis("Vertical")); ////if the virtual environment is on, send the command to the VirtualUnityController //if (StreamController.Instance.VirtualEnvironment) //{ // if (VirtualUnityController.Instance.IsActive) // { // VirtualUnityController.Instance.JoystickCommand(JoyInput); // } //} //// Othewise send it to the robotinterface //else //{ // if (RobotInterface.Instance.IsConnected) // { // RobotInterface.Instance.DirectCommandRobot(JoyInput); // } //} break; } } //--Eye direction calculation for all occasions List <Vector3> eyeDirections = new List <Vector3>(); FoveInterfaceBase.EyeRays rays = _foveInterface.GetGazeRays(); EFVR_Eye eyeClosed = FoveInterface.CheckEyesClosed(); if (eyeClosed != EFVR_Eye.Both && eyeClosed != EFVR_Eye.Left) { eyeDirections.Add(rays.left.direction); } if (eyeClosed != EFVR_Eye.Both && eyeClosed != EFVR_Eye.Right) { eyeDirections.Add(rays.right.direction); } Vector3 direction = Vector3.zero; foreach (Vector3 eyeDirection in eyeDirections) { direction += eyeDirection; } direction = direction / eyeDirections.Count; ray = new Ray(Head.transform.position, direction * 1000); //--------------------------------------------------------- //Positioning of the cursor _cursorCanvas.position = Head.position + ray.direction * _cursorDistance; Debug.DrawRay(ray.origin, ray.direction); RaycastHit hit; if (Physics.Raycast(ray, out hit)) { GazeObject gazeObject = hit.collider.GetComponent <GazeObject>(); if (gazeObject == null) { ResetHoveredObject(); return; } // For this reason we also check if the tag of the gazeobject is the correct one RobotControlTrackPad robotControl = gazeObject.GetComponent <RobotControlTrackPad>(); if (robotControl != null && gazeObject.CompareTag("EyeControlPanel")) { //Control result is provided on hit. This is updated for both cases of input controlResult = robotControl.GetControlResult(hit.point); //If the robotcontrols are activated and the eye tracking is used for motion then send the command to the appropriate controller if (robotControl.IsActivated & !robotControl.IsExternallyDisabled() && _selectedControlType == StreamController.ControlType.Eyes) { if (StreamController.Instance.VirtualEnvironment) { if (VirtualUnityController.Instance.IsActive) { // Debug.Log("Sending gaze command to robot"); VirtualUnityController.Instance.GazeCommand(controlResult); } else { Debug.Log("VirtualUnityController is not connected"); } } // Othewise send it to the robotinterface else { if (RobotInterface.Instance.IsConnected) { RobotInterface.Instance.SendCommand(controlResult); } else { Debug.Log("RobotInterface controller is not connected"); } } //Instead of robotinterface here } //---Joystick Input--- else if (robotControl.IsActivated & !robotControl.IsExternallyDisabled() && _selectedControlType == StreamController.ControlType.Joystick) { // Joystick input Vector2 JoyInput = new Vector2(Input.GetAxis("Horizontal"), Input.GetAxis("Vertical")); //if the virtual environment is on, send the command to the VirtualUnityController if (StreamController.Instance.VirtualEnvironment) { if (VirtualUnityController.Instance.IsActive) { VirtualUnityController.Instance.JoystickCommand(JoyInput); } } // Othewise send it to the robotinterface else { if (RobotInterface.Instance.IsConnected) { RobotInterface.Instance.DirectCommandRobot(JoyInput); } } } } else { //this result means not staring at panel. controlResult = new Vector2(-2, -2); //TODO : SendStopCommandToRobot instead of a zero vector. The zero vector is filtered and still adds movemenet to the robot // RobotInterface.Instance.SendCommand(Vector2.zero); } if (gazeObject == _hoveredGazeObject) { return; } if (_hoveredGazeObject != null) { _hoveredGazeObject.OnUnhover(); } gazeObject.OnHover(); _hoveredGazeObject = gazeObject; } else { ResetHoveredObject(); } }
/// <summary> /// Update cursor position on screen based on the eye gaze ray from VRHMD (if using eyetracking as control method). /// This function is also called if RecordGazePosition is true, in which case the ray is still handled but the cursor position is not updated. /// This is in case we want to record gaze position regardless of using eyetracking as control method. /// </summary> private void TrackEyes() { List <Vector3> eyeDirections = new List <Vector3>(); switch (TestController.Instance.TestBlockData.SelectedVRHMD) { case TestBlock.VRHMD.VIVE: Vector3 gaze = Pupil.values.GazePoint3D; //Transform and correct eye-tracking gaze = (transform.rotation * gaze).normalized; Vector3 delta = transform.forward.normalized - gaze; gaze = gaze + delta * 2; //float eyeConfidence = (Pupil.values.Confidences[0] + Pupil.values.Confidences[1]) / 2.0f; //if (eyeConfidence > 0.7f) //{ eyeDirections.Add(gaze); //} break; case TestBlock.VRHMD.FOVE: FoveInterface.EyeRays rays = _foveInterface.GetGazeRays(); EFVR_Eye eyeClosed = FoveInterface.CheckEyesClosed(); switch (eyeClosed) { case (EFVR_Eye.Neither): eyeDirections.Add(rays.left.direction); eyeDirections.Add(rays.right.direction); break; case (EFVR_Eye.Left): eyeDirections.Add(rays.right.direction); break; case (EFVR_Eye.Right): eyeDirections.Add(rays.left.direction); break; case (EFVR_Eye.Both): eyeDirections.Add(Vector3.zero); break; } break; } Vector3 direction = Vector3.zero; foreach (Vector3 eyeDirection in eyeDirections) { direction += eyeDirection; } direction = direction / eyeDirections.Count; Ray ray = new Ray(_rayOrigin, direction); ray = GetAverageEyeRay(ray); CurrentEyeGazeScreenPoint = VRCamera.WorldToScreenPoint(_rayOrigin + ray.direction * POINT_CALCULATION_DISTANCE); if (CurrentControlMethod == TestBlock.ControlMethod.Eyetracking) { HandleRay(ray); } else { HandleGazeTrackingRay(ray); } Debug.DrawRay(ray.origin, ray.direction * 100); }
/// <summary> /// Set up one FOVE view camera in the live scene at runtime. /// </summary> /// <param name="ipd">How far apart the cameras should be to create the stereoscopic effect.</param> /// <param name="whichEye">The eye are you creating this time.</param> /// <param name="go">The Unity GameObject instance to which the FOVE camera is attached.</param> /// <param name="ec">The FoveEyeCamera instance which is attached to the GameObject.</param> /// <returns>The Unity Camera instance which is attached to the GameObject.</returns> /// <remarks>We try to support a number of options to be flexible for different users' needs. /// As such, the process goes through some checks and differing setup tasks in corresponding /// cases. The three paths are: 1) no camera prefabs are referenced, 2) a single camera prefab is /// referenced, and 3) both cameras are overridden by objects already placed in the scene. /// The third option should only be done for people who *really* need to have two separate cameras /// with separate effects for each eye, and accept the consequences that showing different images /// to each eye can very easily cause disorientation and sickness in most people. /// /// In no case is it strictly necessary to add your own FoveEyeCamera behaviour to a game object, /// and we recomment nod doing so, as it can be easy to accidentally get your effects in the wrong /// order. (FoveEyeCamera should be the last behaviour in the list in order to get all the image /// effects reliably.)</remarks> private Camera SetupFoveViewCamera(float ipd, EFVR_Eye whichEye, out GameObject go, out FoveEyeCamera ec) { GameObject temp = null; Camera cam = null; Camera mirror = GetComponent <Camera>(); if (useCameraOverride && useCameraPrefab) { Debug.LogError("You can not use a prefab and override cameras at the same time!"); } if (useCameraOverride) { if (leftEyeOverride != null && rightEyeOverride != null) { if (whichEye == EFVR_Eye.Left) { cam = leftEyeOverride; } else if (whichEye == EFVR_Eye.Right) { cam = rightEyeOverride; } else { Debug.LogError("Camera Override in unforseen state"); } temp = cam.gameObject; _nearClip = leftEyeOverride.nearClipPlane; _farClip = leftEyeOverride.farClipPlane; //sanity check if (leftEyeOverride.nearClipPlane != rightEyeOverride.nearClipPlane || leftEyeOverride.farClipPlane != rightEyeOverride.farClipPlane) { Debug.LogWarning("Left and Right eye clip planes differ, using left plane for VR!"); } //the mirror camera is the portal/preview view for unity etc - useful for use when there is no compositor. if (mirror != null) { mirror.nearClipPlane = _nearClip; mirror.farClipPlane = _farClip; } } else { Debug.LogError("Both Camera Overrides must be assiged if using override mode."); } } // Use a camera prefab if set to do so and one is available if (useCameraPrefab) { if (eyeCameraPrototype != null) { if (eyeCameraPrototype.GetComponent <FoveInterface>() != null) { Debug.LogError("FoveInterface's eye camera prototype has another FoveInterface component attached. " + whichEye); go = null; ec = null; return(null); } cam = Instantiate(eyeCameraPrototype); _nearClip = cam.nearClipPlane; _farClip = cam.farClipPlane; temp = cam.gameObject; if (mirror != null) { mirror.nearClipPlane = _nearClip; mirror.farClipPlane = _farClip; } } } if (cam == null) { temp = new GameObject(); cam = temp.AddComponent <Camera>(); if (mirror != null) { _nearClip = mirror.nearClipPlane; _farClip = mirror.farClipPlane; // Copy over camera properties cam.cullingMask = mirror.cullingMask; cam.depth = mirror.depth; cam.renderingPath = mirror.renderingPath; cam.useOcclusionCulling = mirror.useOcclusionCulling; #if UNITY_5_6_OR_NEWER cam.allowHDR = mirror.allowHDR; #else cam.hdr = mirror.hdr; #endif cam.backgroundColor = mirror.backgroundColor; cam.clearFlags = mirror.clearFlags; } cam.nearClipPlane = _nearClip; cam.farClipPlane = _farClip; } cam.fieldOfView = 95.0f; ec = temp.GetComponent <FoveEyeCamera>(); if (ec == null) { ec = temp.AddComponent <FoveEyeCamera>(); } ec.whichEye = whichEye; ec.suppressProjectionUpdates = suppressProjectionUpdates; ec.foveInterfaceBase = this; temp.name = string.Format("FOVE Eye ({0})", whichEye); temp.transform.parent = transform; UpdateCameraSettings(temp, ec, ipd); go = temp; return(cam); }