void FixedUpdate() { RaycastHit hit_r, hit_l; // Ensure both eyes are opened EyeRays eyes = m_foveInterface.GetGazeRays(); if (Physics.Raycast(eyes.right.origin, eyes.right.direction, out hit_r)) // Right eye hit { if (Physics.Raycast(eyes.left.origin, eyes.left.direction, out hit_l)) // Right eye hit { if (Physics.Raycast(transform.position, transform.forward, out hit)) { if (hit.collider && hit.collider.tag == desired_tag) { Vector3 hit_point = hit.point; Extract(hit_point, hit_point, desired); } } } } }
//Gets point where user is looking every frame and interacts with any intersecting gazeobjects if possible void Update() { if (!_initialized) { return; } if (Input.GetKeyDown(KeyCode.H)) { CenterHead(); } if (Input.GetKeyDown(KeyCode.Escape)) { Application.Quit(); } Ray ray = new Ray(); switch (_selectedControlType) { case StreamController.ControlType.Head: ray = new Ray(Head.position, Head.forward * 1000); break; case StreamController.ControlType.Eyes_Mouse: case StreamController.ControlType.Mouse: if (Input.GetMouseButtonDown(1)) { } if (Input.GetMouseButton(1)) { Head.Rotate(Vector3.up, Input.GetAxis("Mouse X") * _mouseRotationSpeed, Space.Self); Head.Rotate(Vector3.right, -Input.GetAxis("Mouse Y") * _mouseRotationSpeed, Space.Self); Head.localRotation = Quaternion.Euler(Head.localEulerAngles.x, Head.localEulerAngles.y, 0); } if (Input.GetMouseButton(0) || _selectedControlType == StreamController.ControlType.Eyes_Mouse) { ray = Camera.main.ScreenPointToRay(Input.mousePosition); } else { ResetHoveredObject(); return; } break; //both of the code for the two input cases was moved further down, since we want gaze data to be recorded for both inputs. case StreamController.ControlType.Eyes: //List<Vector3> eyeDirections = new List<Vector3>(); //FoveInterfaceBase.EyeRays rays = _foveInterface.GetGazeRays(); //EFVR_Eye eyeClosed = FoveInterface.CheckEyesClosed(); //if (eyeClosed != EFVR_Eye.Both && eyeClosed != EFVR_Eye.Left) // eyeDirections.Add(rays.left.direction); //if (eyeClosed != EFVR_Eye.Both && eyeClosed != EFVR_Eye.Right) // eyeDirections.Add(rays.right.direction); //Vector3 direction = Vector3.zero; //foreach (Vector3 eyeDirection in eyeDirections) //{ // direction += eyeDirection; //} //direction = direction / eyeDirections.Count; //ray = new Ray(Head.transform.position, direction * 1000); break; case StreamController.ControlType.Joystick: { // // Joystick input //Vector2 JoyInput = new Vector2(Input.GetAxis("Horizontal"), Input.GetAxis("Vertical")); ////if the virtual environment is on, send the command to the VirtualUnityController //if (StreamController.Instance.VirtualEnvironment) //{ // if (VirtualUnityController.Instance.IsActive) // { // VirtualUnityController.Instance.JoystickCommand(JoyInput); // } //} //// Othewise send it to the robotinterface //else //{ // if (RobotInterface.Instance.IsConnected) // { // RobotInterface.Instance.DirectCommandRobot(JoyInput); // } //} break; } } //--Eye direction calculation for all occasions List <Vector3> eyeDirections = new List <Vector3>(); FoveInterfaceBase.EyeRays rays = _foveInterface.GetGazeRays(); EFVR_Eye eyeClosed = FoveInterface.CheckEyesClosed(); if (eyeClosed != EFVR_Eye.Both && eyeClosed != EFVR_Eye.Left) { eyeDirections.Add(rays.left.direction); } if (eyeClosed != EFVR_Eye.Both && eyeClosed != EFVR_Eye.Right) { eyeDirections.Add(rays.right.direction); } Vector3 direction = Vector3.zero; foreach (Vector3 eyeDirection in eyeDirections) { direction += eyeDirection; } direction = direction / eyeDirections.Count; ray = new Ray(Head.transform.position, direction * 1000); //--------------------------------------------------------- //Positioning of the cursor _cursorCanvas.position = Head.position + ray.direction * _cursorDistance; Debug.DrawRay(ray.origin, ray.direction); RaycastHit hit; if (Physics.Raycast(ray, out hit)) { GazeObject gazeObject = hit.collider.GetComponent <GazeObject>(); if (gazeObject == null) { ResetHoveredObject(); return; } // For this reason we also check if the tag of the gazeobject is the correct one RobotControlTrackPad robotControl = gazeObject.GetComponent <RobotControlTrackPad>(); if (robotControl != null && gazeObject.CompareTag("EyeControlPanel")) { //Control result is provided on hit. This is updated for both cases of input controlResult = robotControl.GetControlResult(hit.point); //If the robotcontrols are activated and the eye tracking is used for motion then send the command to the appropriate controller if (robotControl.IsActivated & !robotControl.IsExternallyDisabled() && _selectedControlType == StreamController.ControlType.Eyes) { if (StreamController.Instance.VirtualEnvironment) { if (VirtualUnityController.Instance.IsActive) { // Debug.Log("Sending gaze command to robot"); VirtualUnityController.Instance.GazeCommand(controlResult); } else { Debug.Log("VirtualUnityController is not connected"); } } // Othewise send it to the robotinterface else { if (RobotInterface.Instance.IsConnected) { RobotInterface.Instance.SendCommand(controlResult); } else { Debug.Log("RobotInterface controller is not connected"); } } //Instead of robotinterface here } //---Joystick Input--- else if (robotControl.IsActivated & !robotControl.IsExternallyDisabled() && _selectedControlType == StreamController.ControlType.Joystick) { // Joystick input Vector2 JoyInput = new Vector2(Input.GetAxis("Horizontal"), Input.GetAxis("Vertical")); //if the virtual environment is on, send the command to the VirtualUnityController if (StreamController.Instance.VirtualEnvironment) { if (VirtualUnityController.Instance.IsActive) { VirtualUnityController.Instance.JoystickCommand(JoyInput); } } // Othewise send it to the robotinterface else { if (RobotInterface.Instance.IsConnected) { RobotInterface.Instance.DirectCommandRobot(JoyInput); } } } } else { //this result means not staring at panel. controlResult = new Vector2(-2, -2); //TODO : SendStopCommandToRobot instead of a zero vector. The zero vector is filtered and still adds movemenet to the robot // RobotInterface.Instance.SendCommand(Vector2.zero); } if (gazeObject == _hoveredGazeObject) { return; } if (_hoveredGazeObject != null) { _hoveredGazeObject.OnUnhover(); } gazeObject.OnHover(); _hoveredGazeObject = gazeObject; } else { ResetHoveredObject(); } }
void Update() { // get gaze FoveInterface.EyeRays eyeRays = f.GetGazeRays(); Ray left = eyeRays.left; Ray right = eyeRays.right; leftEyeSphere.transform.position = left.origin + left.direction.normalized * 2.0f; rightEyeSphere.transform.position = right.origin + right.direction.normalized * 2.0f; // get angles and rotation of left, right, head Vector2 leftAngles = GetAngles(left.direction, f.transform.forward, f.transform.up); Vector2 rightAngles = GetAngles(right.direction, f.transform.forward, f.transform.up); Vector3 headAngles = GetAngles(f.transform.forward, Vector3.forward, Vector3.up); Quaternion leftRotation = Quaternion.FromToRotation(left.direction, f.transform.forward); Quaternion rightRotation = Quaternion.FromToRotation(right.direction, f.transform.forward); Quaternion headRotation = f.transform.rotation; // move if (Input.GetKey(KeyCode.W)) { MoveFoveCamera(f.transform.rotation * Vector3.forward); } if (Input.GetKey(KeyCode.S)) { MoveFoveCamera(f.transform.rotation * -Vector3.forward); } if (Input.GetKey(KeyCode.A)) { MoveFoveCamera(f.transform.rotation * -Vector3.right); } if (Input.GetKey(KeyCode.D)) { MoveFoveCamera(f.transform.rotation * Vector3.right); } if (true) { float rx = Input.GetAxis("Mouse X"); float ry = Input.GetAxis("Mouse Y"); f.transform.parent.transform.Rotate(f.transform.localRotation * Vector3.up, rx * ROTATE_SPEED, Space.World); f.transform.parent.transform.Rotate(f.transform.localRotation * Vector3.left, ry * ROTATE_SPEED, Space.World); } // enable calibration if (Input.GetKeyDown(KeyCode.Space)) { calibrationSphere.SetActive(true); } // record the data if (Input.GetKeyDown(KeyCode.F)) { if (recording) { recorder.Close(); recordingLight.GetComponent <Renderer>().material.color = Color.white; recording = false; } else { recorder = new StreamWriter(new FileStream("record.txt", FileMode.OpenOrCreate)); recordingLight.GetComponent <Renderer>().material.color = Color.red; recording = true; } } DateTime now = DateTime.Now; if (recording && (now - lastRecordTime).TotalMilliseconds > 100) { Record(recorder, "left", leftAngles, leftRotation); Record(recorder, "right", rightAngles, rightRotation); Record(recorder, "head", headAngles, headRotation); lastRecordTime = now; } // render calibration point if (calibrationSphere.activeSelf) { /*RaycastHit hit; * Ray ray = new Ray(f.transform.position, f.transform.forward); * if (Physics.Raycast(ray, out hit, 10.0f) && false) { * calibSphere.transform.position = hit.point; * }*/ calibrationSphere.transform.position = f.transform.position + f.transform.forward * 3.0f; } // calibrate if (Input.GetKeyUp(KeyCode.Space)) { calibrationSphere.SetActive(false); f.ManualDriftCorrection3D(calibrationSphere.transform.localPosition); } }
/// <summary> /// Update cursor position on screen based on the eye gaze ray from VRHMD (if using eyetracking as control method). /// This function is also called if RecordGazePosition is true, in which case the ray is still handled but the cursor position is not updated. /// This is in case we want to record gaze position regardless of using eyetracking as control method. /// </summary> private void TrackEyes() { List <Vector3> eyeDirections = new List <Vector3>(); switch (TestController.Instance.TestBlockData.SelectedVRHMD) { case TestBlock.VRHMD.VIVE: Vector3 gaze = Pupil.values.GazePoint3D; //Transform and correct eye-tracking gaze = (transform.rotation * gaze).normalized; Vector3 delta = transform.forward.normalized - gaze; gaze = gaze + delta * 2; //float eyeConfidence = (Pupil.values.Confidences[0] + Pupil.values.Confidences[1]) / 2.0f; //if (eyeConfidence > 0.7f) //{ eyeDirections.Add(gaze); //} break; case TestBlock.VRHMD.FOVE: FoveInterface.EyeRays rays = _foveInterface.GetGazeRays(); EFVR_Eye eyeClosed = FoveInterface.CheckEyesClosed(); switch (eyeClosed) { case (EFVR_Eye.Neither): eyeDirections.Add(rays.left.direction); eyeDirections.Add(rays.right.direction); break; case (EFVR_Eye.Left): eyeDirections.Add(rays.right.direction); break; case (EFVR_Eye.Right): eyeDirections.Add(rays.left.direction); break; case (EFVR_Eye.Both): eyeDirections.Add(Vector3.zero); break; } break; } Vector3 direction = Vector3.zero; foreach (Vector3 eyeDirection in eyeDirections) { direction += eyeDirection; } direction = direction / eyeDirections.Count; Ray ray = new Ray(_rayOrigin, direction); ray = GetAverageEyeRay(ray); CurrentEyeGazeScreenPoint = VRCamera.WorldToScreenPoint(_rayOrigin + ray.direction * POINT_CALCULATION_DISTANCE); if (CurrentControlMethod == TestBlock.ControlMethod.Eyetracking) { HandleRay(ray); } else { HandleGazeTrackingRay(ray); } Debug.DrawRay(ray.origin, ray.direction * 100); }
// Update is called once per frame void Update() { Vector3 pos = transform.position; SFVR_Quaternion orientation = FoveInterface.GetLastPose().orientation; Quaternion quat = new Quaternion(orientation.x, orientation.y, orientation.z, orientation.w); Vector3 forwad = quat * Vector3.forward; if (_shouldStop) { if (_shouldLeftEyeDark) { foveInterface.LeftFoveEye.ShouldDraw = false; foveInterface.RightFoveEye.ShouldDraw = true; } else { foveInterface.LeftFoveEye.ShouldDraw = true; foveInterface.RightFoveEye.ShouldDraw = false; } _shouldEyeDarkTimer += Time.deltaTime; if (_shouldEyeDarkTimer > 3.0f) { _shouldEyeDarkTimer = 0.0f; _shouldLeftEyeDark = !_shouldLeftEyeDark; if (_shouldLeftEyeDark) { print("left flick"); } else { print("right flick"); } } } else { float speed = 20; distanceThreshold += speed * Time.deltaTime; if (goForward && distanceThreshold > speed * 5) { distanceThreshold = 0.0f; goForward = false; _shouldStop = true; } else if (!goForward && distanceThreshold > speed * 5) { distanceThreshold = 0.0f; goForward = true; } if (goForward) { pos += forwad * speed * Time.deltaTime; } else { pos -= forwad * speed * Time.deltaTime; } transform.position = pos; } FoveInterfaceBase.EyeRays eyeRays = foveInterface.GetGazeRays(); RaycastHit leftRaycastHit, rightRaycastHit; Physics.Raycast(eyeRays.left, out leftRaycastHit, Mathf.Infinity); if (leftRaycastHit.point != Vector3.zero) { leftEye.transform.position = leftRaycastHit.point; } else { leftEye.transform.position = eyeRays.left.GetPoint(3.0f); } _leftRecords.Add(new EyeTrackingRecord(Timer, eyeRays.left.origin, eyeRays.left.direction, leftEye.transform.position)); Physics.Raycast(eyeRays.right, out rightRaycastHit, Mathf.Infinity); if (rightRaycastHit.point != Vector3.zero) { rightEye.transform.position = rightRaycastHit.point; } else { rightEye.transform.position = eyeRays.right.GetPoint(3.0f); } _rightRecords.Add(new EyeTrackingRecord(Timer, eyeRays.right.origin, eyeRays.right.direction, rightEye.transform.position)); if (SaveTimer > 1.0f) { string leftPath = LoggingManager.GetPath(dataFileID + "_left.csv"); System.IO.FileInfo file = new System.IO.FileInfo(leftPath); file.Directory.Create(); using (var writer = new StreamWriter(leftPath, append: true)) { foreach (EyeTrackingRecord record in _leftRecords) { writer.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9}", record.Timer, record.Origin.x, record.Origin.y, record.Origin.z, record.Direction.x, record.Direction.y, record.Direction.z, record.TouchedPosition.x, record.TouchedPosition.y, record.TouchedPosition.z ); } writer.Flush(); _leftRecords.Clear(); } string rightPath = LoggingManager.GetPath(dataFileID + "_right.csv"); file = new System.IO.FileInfo(rightPath); file.Directory.Create(); using (var writer = new StreamWriter(rightPath, append: true)) { foreach (EyeTrackingRecord record in _rightRecords) { writer.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9}", record.Timer, record.Origin.x, record.Origin.y, record.Origin.z, record.Direction.x, record.Direction.y, record.Direction.z, record.TouchedPosition.x, record.TouchedPosition.y, record.TouchedPosition.z ); } writer.Flush(); _rightRecords.Clear(); } SaveTimer = 0.0f; } SaveTimer += Time.deltaTime; Timer += Time.deltaTime; }
// Update is called once per frame void Update() { FoveInterfaceBase.EyeRays eyeRays = foveInterface.GetGazeRays(); RaycastHit leftRaycastHit, rightRaycastHit; Physics.Raycast(eyeRays.left, out leftRaycastHit, Mathf.Infinity); if (leftRaycastHit.point != Vector3.zero) { leftEye = leftRaycastHit.point; } else { leftEye = eyeRays.left.GetPoint(3.0f); } _leftRecords.Add(new EyeTrackingRecord(Timer, eyeRays.left.origin, eyeRays.left.direction, leftEye)); Physics.Raycast(eyeRays.right, out rightRaycastHit, Mathf.Infinity); if (rightRaycastHit.point != Vector3.zero) { rightEye = rightRaycastHit.point; } else { rightEye = eyeRays.right.GetPoint(3.0f); } _rightRecords.Add(new EyeTrackingRecord(Timer, eyeRays.right.origin, eyeRays.right.direction, rightEye)); if (SaveTimer > 1.0f) { string leftPath = LoggingManager.GetPath(dataFileID + "_left.csv"); System.IO.FileInfo file = new System.IO.FileInfo(leftPath); file.Directory.Create(); using (var writer = new StreamWriter(leftPath, append: true)) { foreach (EyeTrackingRecord record in _leftRecords) { writer.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9}", record.Timer, record.Origin.x, record.Origin.y, record.Origin.z, record.Direction.x, record.Direction.y, record.Direction.z, record.TouchedPosition.x, record.TouchedPosition.y, record.TouchedPosition.z ); } writer.Flush(); _leftRecords.Clear(); } string rightPath = LoggingManager.GetPath(dataFileID + "_right.csv"); file = new System.IO.FileInfo(rightPath); file.Directory.Create(); using (var writer = new StreamWriter(rightPath, append: true)) { foreach (EyeTrackingRecord record in _rightRecords) { writer.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9}", record.Timer, record.Origin.x, record.Origin.y, record.Origin.z, record.Direction.x, record.Direction.y, record.Direction.z, record.TouchedPosition.x, record.TouchedPosition.y, record.TouchedPosition.z ); } writer.Flush(); _rightRecords.Clear(); } SaveTimer = 0.0f; } SaveTimer += Time.deltaTime; Timer += Time.deltaTime; }