private void UpdateEye(EyeData eyeData) { float totalScale = _settings.eyeTrackingScale; bool adjustEyeWhenHalfClose = true; if (_eyeMgr) { adjustEyeWhenHalfClose = false; totalScale *= _eyeMgr.EyeMoveStrength; } SingleEyeData leftEyeData = eyeData.verbose_data.left; if (leftEyeData.eye_openness > 0.3f && leftEyeData.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_PUPIL_POSITION_IN_SENSOR_AREA_VALIDITY)) { Vector2 Rotation = EyeDataToRotation(leftEyeData.pupil_position_in_sensor_area.x, leftEyeData.pupil_position_in_sensor_area.y); float scale = 1.0f; if (adjustEyeWhenHalfClose) { scale = (0.2f - Math.Min(0.0f, 0.5f - leftEyeData.eye_openness)) * 2.5f + 0.5f; } scale *= totalScale; Rotation *= scale; Transform boneTransform = _animator.GetBoneTransform(HumanBodyBones.LeftEye); Vector3 angles = basicLeftEyeRot.eulerAngles; boneTransform.localRotation = Quaternion.Euler(angles.x + Rotation.y, angles.y + Rotation.x, angles.z); } SingleEyeData rightEyeData = eyeData.verbose_data.right; if (rightEyeData.eye_openness > 0.3f && rightEyeData.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_PUPIL_POSITION_IN_SENSOR_AREA_VALIDITY)) { Vector2 Rotation = EyeDataToRotation(rightEyeData.pupil_position_in_sensor_area.x, rightEyeData.pupil_position_in_sensor_area.y); float scale = 1.0f; if (adjustEyeWhenHalfClose) { scale = (0.2f - Math.Min(0.0f, 0.5f - rightEyeData.eye_openness)) * 2.5f + 0.5f; } scale *= totalScale; Rotation *= scale; Transform boneTransform = _animator.GetBoneTransform(HumanBodyBones.RightEye); Vector3 angles = basicRightEyeRot.eulerAngles; boneTransform.localRotation = Quaternion.Euler(angles.x + Rotation.y, angles.y + Rotation.x, angles.z); } if (_eyeMgr && _mesh && _eyeMgr.winkleEnable) { float leftOpeness = Math.Min(Math.Max(leftEyeData.eye_openness, 0.1f), 0.8f) / 0.7f - 0.1f / 0.7f; float rightOpeness = Math.Min(Math.Max(rightEyeData.eye_openness, 0.1f), 0.8f) / 0.7f - 0.1f / 0.7f; _mesh.SetBlendShapeWeight(_eyeMgr.leftEyeWinkleBlendShapeIndex, (1.0f - leftOpeness) * 100.0f); _mesh.SetBlendShapeWeight(_eyeMgr.rightEyeWinkleBlendShapeIndex, (1.0f - rightOpeness) * 100.0f); } }
/// <summary> /// Extracts a Unity ray from the eye data for a single eye, representing its gaze ray. /// </summary> /// <param name="singleEyeData">The (single) eye data to build the ray from.</param> /// <returns>A ray in Unity coordinates, starting at the gaze origin, pointing into the gaze direction.</returns> private static Ray GetWorldRay(ref SingleEyeData singleEyeData) { // We receive the origin and direction in a right-handed coordinate system, but Unity is left-handed. Vector3 origin = singleEyeData.gaze_origin_mm * 0.001f; origin.x *= -1; Vector3 direction = singleEyeData.gaze_direction_normalized; direction.x *= -1; return(new Ray(Camera.main.transform.TransformPoint(origin), Camera.main.transform.TransformDirection(direction))); }
/// <summary> /// Gets the openness value of an eye when enable eye callback function. /// </summary> /// <param name="eye">The index of an eye.</param> /// <param name="openness">The openness value of an eye, clamped between 0 (fully closed) and 1 (fully open). </param> /// <param name="eye_data">ViveSR.anipal.Eye.EyeData_v2. </param> /// <returns>Indicates whether the openness value received is valid.</returns> public static bool GetEyeOpenness(EyeIndex eye, out float openness, EyeData_v2 eye_data) { if (SRanipal_Eye_Framework.Status == SRanipal_Eye_Framework.FrameworkStatus.WORKING) { SingleEyeData eyeData = eye == EyeIndex.LEFT ? eye_data.verbose_data.left : eye_data.verbose_data.right; bool valid = eyeData.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_EYE_OPENNESS_VALIDITY); openness = valid ? eyeData.eye_openness : 0; } else { // If not support eye tracking, set default to open. openness = 1; } return(true); }
public void Update(SingleEyeData eyeData, SingleEyeExpression?expression = null) { if (eyeData.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_GAZE_DIRECTION_VALIDITY)) { Look = eyeData.gaze_direction_normalized.Invert(); } Openness = eyeData.eye_openness; if (expression == null) { return; // This is null when we use this as a combined eye, so don't try read data from it } Widen = expression.Value.eye_wide; Squeeze = expression.Value.eye_squeeze; }
/// <summary> /// Gets the 2D position of a selected pupil when enable eye callback function. /// </summary> /// <param name="eye">The index of an eye.</param> /// <param name="postion">The 2D position of a selected pupil clamped between -1 and 1. /// Position (0, 0) indicates that the pupil is looking forward; /// position (1, 1) up-rightward; and /// position (-1, -1) left-downward.</param> /// <param name="eye_data">ViveSR.anipal.Eye.EyeData. </param> /// <returns></returns> public static bool GetPupilPosition(EyeIndex eye, out Vector2 postion, EyeData eye_data) { bool valid = false; if (SRanipal_Eye_Framework.Status == SRanipal_Eye_Framework.FrameworkStatus.WORKING) { SingleEyeData eyeData = eye == EyeIndex.LEFT ? eye_data.verbose_data.left : eye_data.verbose_data.right; valid = eyeData.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_PUPIL_POSITION_IN_SENSOR_AREA_VALIDITY); postion = valid ? postion = new Vector2(eyeData.pupil_position_in_sensor_area.x * 2 - 1, eyeData.pupil_position_in_sensor_area.y * -2 + 1) : Vector2.zero; } else { // If not support eye tracking, set default in middle. postion = Vector2.zero; valid = true; } return(valid); }
/// <summary> /// Gets the gaze ray of a source of eye gaze data. /// </summary> /// <param name="gazeIndex">The index of a source of eye gaze data.</param> /// <param name="origin">The starting point of the ray in local coordinates.</param> /// <param name="direction">Tthe direction of the ray.</param> /// <returns>Indicates whether the eye gaze data received is valid.</returns> public static bool GetGazeRay(GazeIndex gazeIndex, out Vector3 origin, out Vector3 direction) { bool valid = false; origin = Vector3.zero; direction = Vector3.forward; if (SRanipal_Eye_Framework.Status != SRanipal_Eye_Framework.FrameworkStatus.WORKING) { origin = Camera.main.transform.position; valid = true; } else { UpdateData(); SingleEyeData[] eyesData = new SingleEyeData[(int)GazeIndex.COMBINE + 1]; eyesData[(int)GazeIndex.LEFT] = EyeData_.verbose_data.left; eyesData[(int)GazeIndex.RIGHT] = EyeData_.verbose_data.right; eyesData[(int)GazeIndex.COMBINE] = EyeData_.verbose_data.combined.eye_data; if (gazeIndex == GazeIndex.COMBINE) { valid = eyesData[(int)GazeIndex.COMBINE].GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_GAZE_DIRECTION_VALIDITY); if (valid) { origin = eyesData[(int)GazeIndex.COMBINE].gaze_origin_mm * 0.001f; direction = eyesData[(int)GazeIndex.COMBINE].gaze_direction_normalized; direction.x *= -1; } } else if (gazeIndex == GazeIndex.LEFT || gazeIndex == GazeIndex.RIGHT) { valid = eyesData[(int)gazeIndex].GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_GAZE_DIRECTION_VALIDITY); if (valid) { origin = eyesData[(int)gazeIndex].gaze_origin_mm * 0.001f; direction = eyesData[(int)gazeIndex].gaze_direction_normalized; direction.x *= -1; } } } return(valid); }
public StringBuilder StartTracking(String time, int frameCount, StringBuilder csvLogger) { // Get Eye data from TOBI API TobiiXR_EyeTrackingData localEyeData = TobiiXR.GetEyeTrackingData(TobiiXR_TrackingSpace.Local); TobiiXR_EyeTrackingData worldEyeData = TobiiXR.GetEyeTrackingData(TobiiXR_TrackingSpace.World); // Get Eye data from SRNipal ViveSR.Error error = SRanipal_Eye_API.GetEyeData(ref _eyeData); if (error == Error.WORK) { _sRanipalEyeVerboseData = _eyeData.verbose_data; // Left Eye Data SingleEyeData sRleftEyeData = _sRanipalEyeVerboseData.left; // Right Eye SingleEyeData sRrightEyeData = _sRanipalEyeVerboseData.right; // Write in the CSV file csvLogger.AppendFormat( "{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}, {8}, {9}, {10}, {11}, {12}, {13}, {14}, {15}, {16}, {17}, {18}," + "{19}, {20}, {21}, {22}, {23}, {24}, {25},{26}, {27}, {28}, {29}, {30}, {31}, {32}, {33}, {34}, {35}," + "{36}, {37}, {38}, {39}", // Time and Frame count time, frameCount, // Convergence Distance worldEyeData.ConvergenceDistanceIsValid, worldEyeData.ConvergenceDistance, // Eye Openness sRleftEyeData.eye_openness, sRrightEyeData.eye_openness, // Eye blinking localEyeData.IsLeftEyeBlinking, localEyeData.IsRightEyeBlinking, // Pupil Diameter sRleftEyeData.pupil_diameter_mm, sRrightEyeData.pupil_diameter_mm, // Pupil Position in Sensor area (x, y) sRleftEyeData.pupil_position_in_sensor_area.x, sRleftEyeData.pupil_position_in_sensor_area.y, sRrightEyeData.pupil_position_in_sensor_area.x, sRrightEyeData.pupil_position_in_sensor_area.y, // IS local Gaze Valid localEyeData.GazeRay.IsValid, // Local Space Gaze Origin Combined localEyeData.GazeRay.Origin.x, localEyeData.GazeRay.Origin.y, localEyeData.GazeRay.Origin.z, // Local Space Gaze Direction Combined localEyeData.GazeRay.Direction.x, localEyeData.GazeRay.Direction.y, localEyeData.GazeRay.Direction.z, // IS World Gaze Valid worldEyeData.GazeRay.IsValid, //world space Gaze Origin Combined worldEyeData.GazeRay.Origin.x, worldEyeData.GazeRay.Origin.y, worldEyeData.GazeRay.Origin.z, // world space Gaze Direction Combined worldEyeData.GazeRay.Direction.x, worldEyeData.GazeRay.Direction.y, worldEyeData.GazeRay.Direction.z, // Gaze Origin in mm sRleftEyeData.gaze_origin_mm.x, sRleftEyeData.gaze_origin_mm.y, sRleftEyeData.gaze_origin_mm.z, sRrightEyeData.gaze_origin_mm.x, sRrightEyeData.gaze_origin_mm.y, sRrightEyeData.gaze_origin_mm.z, // Normalized Gaze direction sRleftEyeData.gaze_direction_normalized.x, sRleftEyeData.gaze_direction_normalized.y, sRleftEyeData.gaze_direction_normalized.z, sRrightEyeData.gaze_direction_normalized.x, sRrightEyeData.gaze_direction_normalized.y, sRrightEyeData.gaze_direction_normalized.z ); csvLogger.AppendLine(); } return(csvLogger); }