Example #1
0
 public EmteqEyeData(EyeData_v2 eyeDataV2)
 {
     no_user         = eyeDataV2.no_user;
     frame_sequence  = eyeDataV2.frame_sequence;
     timestamp       = eyeDataV2.timestamp;
     verbose_data    = new VerboseData(eyeDataV2.verbose_data);
     expression_data = new EyeExpression(eyeDataV2.expression_data);
     timestampJ2000  = (ulong)DateTimeOffset.Now.ToUnixTimeMilliseconds() - 946684800000U; //UnixEpochOffset
 }
Example #2
0
 public void UpdateEyeTrack(EyeData_v2 eyeData)
 {
     Vector3? leftGazeDir = null, rightGazeDir = null;
     if (eyeData.verbose_data.right.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_GAZE_DIRECTION_VALIDITY))
         rightGazeDir = Vector3.Scale(
             eyeData.verbose_data.right.gaze_direction_normalized,
             new Vector3(-1, 1, 1));
     
     if (eyeData.verbose_data.left.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_GAZE_DIRECTION_VALIDITY))
         leftGazeDir = Vector3.Scale(
             eyeData.verbose_data.left.gaze_direction_normalized,
             new Vector3(-1, 1, 1));
         
     UpdateXY(leftGazeDir, rightGazeDir);
 }
Example #3
0
 /// <summary>
 /// Gets the openness value of an eye when enable eye callback function.
 /// </summary>
 /// <param name="eye">The index of an eye.</param>
 /// <param name="openness">The openness value of an eye, clamped between 0 (fully closed) and 1 (fully open). </param>
 /// <param name="eye_data">ViveSR.anipal.Eye.EyeData_v2. </param>
 /// <returns>Indicates whether the openness value received is valid.</returns>
 public static bool GetEyeOpenness(EyeIndex eye, out float openness, EyeData_v2 eye_data)
 {
     if (SRanipal_Eye_Framework.Status == SRanipal_Eye_Framework.FrameworkStatus.WORKING)
     {
         SingleEyeData eyeData = eye == EyeIndex.LEFT ? eye_data.verbose_data.left : eye_data.verbose_data.right;
         bool          valid   = eyeData.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_EYE_OPENNESS_VALIDITY);
         openness = valid ? eyeData.eye_openness : 0;
     }
     else
     {
         // If not support eye tracking, set default to open.
         openness = 1;
     }
     return(true);
 }
Example #4
0
 private static void EyeCallback(ref EyeData_v2 eye_data)
 {
     eyeData   = eye_data;
     lastTime  = nextTime;
     nextTime  = eyeData.timestamp;
     buffer[0] = eyeData.timestamp;
     buffer[1] = eyeData.verbose_data.combined.eye_data.gaze_origin_mm.x;
     buffer[2] = eyeData.verbose_data.combined.eye_data.gaze_origin_mm.y;
     buffer[3] = eyeData.verbose_data.combined.eye_data.gaze_origin_mm.z;
     buffer[4] = eyeData.verbose_data.combined.eye_data.gaze_direction_normalized.x;
     buffer[5] = eyeData.verbose_data.combined.eye_data.gaze_direction_normalized.y;
     buffer[6] = eyeData.verbose_data.combined.eye_data.gaze_direction_normalized.z;
     buffer[7] = eyeData.verbose_data.left.pupil_diameter_mm;
     buffer[8] = eyeData.verbose_data.left.pupil_diameter_mm;
     soutlet.push_sample(buffer);
 }
Example #5
0
                /// <summary>
                /// Gets the gaze ray data of a source eye gaze data when enable eye callback function.
                /// </summary>
                /// <param name="gazeIndex">The index of a source of eye gaze data.</param>
                /// <param name="ray">The starting point and direction of the ray.</param>
                /// <param name="eye_data">ViveSR.anipal.Eye.EyeData_v2. </param>
                /// <returns>Indicates whether the gaze ray data received is valid.</returns>
                public static bool GetGazeRay(GazeIndex gazeIndex, out Ray ray, EyeData_v2 eye_data)
                {
                    Vector3 origin    = Vector3.zero;
                    Vector3 direction = Vector3.forward;
                    bool    valid     = false;

                    if (SRanipal_Eye_Framework.Status == SRanipal_Eye_Framework.FrameworkStatus.WORKING)
                    {
                        valid = GetGazeRay(gazeIndex, out origin, out direction, eye_data);
                    }
                    else if (SRanipal_Eye_Framework.Status == SRanipal_Eye_Framework.FrameworkStatus.NOT_SUPPORT)
                    {
                        origin = Camera.main.transform.position;
                        valid  = true;
                    }
                    ray = new Ray(origin, direction);
                    return(valid);
                }
        /// <summary>
        /// It's called at 120 fps to get more accurate data
        /// </summary>
        private static void EyeCallback(ref EyeData_v2 eye_data)
        {
            Debug.Log("callback hogehoge");
            // Gets data from anipal's Eye module
            eyeData = eye_data;

            // The time when the frame was capturing. in millisecond.
            timeStamp = eyeData.timestamp;

            // The point in the eye from which the gaze ray originates in meter miles.(right-handed coordinate system)
            gazeOriginLeft  = eyeData.verbose_data.left.gaze_origin_mm;
            gazeOriginRight = eyeData.verbose_data.Right.gaze_origin_mm;
            Debug.Log("gazeOriginLeft: " + gazeOriginLeft);

            // The normalized gaze direction of the eye in [0,1].(right-handed coordinate system)
            gazeDirectionLeft     = eyeData.verbose_data.left.gaze_direction_normalized;
            gazeDirectionRight    = eyeData.verbose_data.right.gaze_direction_normalized;
            gazeDirectionCombined = eyeData.verbose_data.combined.eye_data.gaze_direction_normalized;
            Debug.Log("gaze_direction_left: " + gazeDirectionLeft);

            // The diameter of the pupil in milli meter
            pupilDiameterLeft     = eyeData.verbose_data.left.pupil_diameter_mm;
            pupilDiameterRight    = eyeData.verbose_data.right.pupil_diameter_mm;
            pupilDiameterCombined = eyeData.verbose_data.combined.eye_data.pupil_diameter_mm;
            Debug.Log("pupilDiameterLeft: " + pupilDiameterLeft);

            // A value representing how open the eye is in [0,1]
            eyeOpenLeft     = eyeData.verbose_data.left.eye_openness;
            eyeOpenRight    = eyeData.verbose_data.right.eye_openness;
            eyeOpenCombined = eyeData.verbose_data.combined.eye_data.eye_openness;
            Debug.Log("eyeOpenLeft: " + eyeOpenLeft);

            // The normalized position of a pupil in [0,1]
            pupilPositionLeft     = eyeData.verbose_data.left.pupil_position_in_sensor_area;
            pupilPositionRight    = eyeData.verbose_data.right.pupil_position_in_sensor_area;
            pupilPositionCombined = eyeData.verbose_data.combined.eye_data.pupil_position_in_sensor_area;
            Debug.Log("pupilPositionLeft: " + pupilPositionLeft);

            lock (DebugWriter)
            {
                CSVwriter.Write();
            }
        }
Example #7
0
    private void EyeCallback(ref EyeData_v2 eye_data)
    {
        Data data = new Data
        {
            hasUser                   = !eye_data.no_user,
            leftPupilDiameterMm       = eye_data.verbose_data.left.pupil_diameter_mm,
            rightPupilDiameterMm      = eye_data.verbose_data.right.pupil_diameter_mm,
            isLeftPupilDiameterValid  = eye_data.verbose_data.left.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_PUPIL_DIAMETER_VALIDITY),
            isRightPupilDiameterValid = eye_data.verbose_data.right.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_PUPIL_DIAMETER_VALIDITY),
            leftPupilPosition         = eye_data.verbose_data.left.pupil_position_in_sensor_area,
            rightPupilPosition        = eye_data.verbose_data.right.pupil_position_in_sensor_area,
            isLeftPupilPositionValid  = eye_data.verbose_data.left.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_PUPIL_POSITION_IN_SENSOR_AREA_VALIDITY),
            isRightPupilPositionValid = eye_data.verbose_data.right.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_PUPIL_POSITION_IN_SENSOR_AREA_VALIDITY),
        };

        if (sLogChanges)
        {
            Debug.Log("Pupilometry data: " + data);
        }

        DataChanged?.Invoke(this, data);
    }
    private void EyeCallback(ref EyeData_v2 eye_data)
    {
        //if (this == null)
        //{
        //	Debug.LogWarning("EyeCallback called on null object");
        //	return;
        //}
        Data data = new Data
        {
            hasUser                   = !eye_data.no_user,
            leftPupilDiameterMm       = eye_data.verbose_data.left.pupil_diameter_mm,
            rightPupilDiameterMm      = eye_data.verbose_data.right.pupil_diameter_mm,
            isLeftPupilDiameterValid  = eye_data.verbose_data.left.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_PUPIL_DIAMETER_VALIDITY),
            isRightPupilDiameterValid = eye_data.verbose_data.right.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_PUPIL_DIAMETER_VALIDITY),
        };

        if (sLogChanges)
        {
            Debug.Log("Pupilometry data: " + data);
        }

        DataChanged?.Invoke(this, data);
    }
Example #9
0
                /// <summary>
                /// Gets the gaze ray of a source of eye gaze data when enable eye callback function.
                /// </summary>
                /// <param name="gazeIndex">The index of a source of eye gaze data.</param>
                /// <param name="origin">The starting point of the ray in local coordinates.</param>
                /// <param name="direction">Tthe direction of the ray.</param>
                /// <param name="eye_data">ViveSR.anipal.Eye.EyeData_v2. </param>
                /// <returns>Indicates whether the eye gaze data received is valid.</returns>
                public static bool GetGazeRay(GazeIndex gazeIndex, out Vector3 origin, out Vector3 direction, EyeData_v2 eye_data)
                {
                    bool valid = false;

                    origin    = Vector3.zero;
                    direction = Vector3.forward;
                    if (SRanipal_Eye_Framework.Status != SRanipal_Eye_Framework.FrameworkStatus.WORKING)
                    {
                        origin = Camera.main.transform.position;
                        valid  = true;
                    }
                    else
                    {
                        SingleEyeData[] eyesData = new SingleEyeData[(int)GazeIndex.COMBINE + 1];
                        eyesData[(int)GazeIndex.LEFT]    = eye_data.verbose_data.left;
                        eyesData[(int)GazeIndex.RIGHT]   = eye_data.verbose_data.right;
                        eyesData[(int)GazeIndex.COMBINE] = eye_data.verbose_data.combined.eye_data;

                        if (gazeIndex == GazeIndex.COMBINE)
                        {
                            valid = eyesData[(int)GazeIndex.COMBINE].GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_GAZE_DIRECTION_VALIDITY);
                            if (valid)
                            {
                                origin       = eyesData[(int)GazeIndex.COMBINE].gaze_origin_mm * 0.001f;
                                direction    = eyesData[(int)GazeIndex.COMBINE].gaze_direction_normalized;
                                direction.x *= -1;
                            }
                        }
                        else if (gazeIndex == GazeIndex.LEFT || gazeIndex == GazeIndex.RIGHT)
                        {
                            valid = eyesData[(int)gazeIndex].GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_GAZE_DIRECTION_VALIDITY);
                            if (valid)
                            {
                                origin       = eyesData[(int)gazeIndex].gaze_origin_mm * 0.001f;
                                direction    = eyesData[(int)gazeIndex].gaze_direction_normalized;
                                direction.x *= -1;
                            }
                        }
                    }
                    return(valid);
                }
Example #10
0
 /// <summary>
 /// Gets the VerboseData of anipal's Eye module when enable eye callback function.
 /// </summary>
 /// <param name="data">ViveSR.anipal.Eye.VerboseData</param>
 /// <param name="data">ViveSR.anipal.Eye.EyeData_v2</param>
 /// <returns>Indicates whether the data received is new.</returns>
 public static bool GetVerboseData(out VerboseData data, EyeData_v2 eye_data)
 {
     data = eye_data.verbose_data;
     return(true);
 }
Example #11
0
                /// <summary>
                /// Gets the 2D position of a selected pupil when enable eye callback function.
                /// </summary>
                /// <param name="eye">The index of an eye.</param>
                /// <param name="postion">The 2D position of a selected pupil clamped between -1 and 1.
                /// Position (0, 0) indicates that the pupil is looking forward;
                /// position (1, 1) up-rightward; and
                /// position (-1, -1) left-downward.</param>
                /// <param name="eye_data">ViveSR.anipal.Eye.EyeData_v2. </param>
                /// <returns></returns>
                public static bool GetPupilPosition(EyeIndex eye, out Vector2 postion, EyeData_v2 eye_data)
                {
                    bool valid = false;

                    if (SRanipal_Eye_Framework.Status == SRanipal_Eye_Framework.FrameworkStatus.WORKING)
                    {
                        SingleEyeData eyeData = eye == EyeIndex.LEFT ? eye_data.verbose_data.left : eye_data.verbose_data.right;
                        valid   = eyeData.GetValidity(SingleEyeDataValidity.SINGLE_EYE_DATA_PUPIL_POSITION_IN_SENSOR_AREA_VALIDITY);
                        postion = valid ? postion = new Vector2(eyeData.pupil_position_in_sensor_area.x * 2 - 1,
                                                                eyeData.pupil_position_in_sensor_area.y * -2 + 1) : Vector2.zero;
                    }
                    else
                    {
                        // If not support eye tracking, set default in middle.
                        postion = Vector2.zero;
                        valid   = true;
                    }
                    return(valid);
                }
Example #12
0
 /// <summary>
 /// Casts a ray against all colliders when enable eye callback function.
 /// </summary>
 /// <param name="index">A source of eye gaze data.</param>
 /// <param name="ray">The starting point and direction of the ray.</param>
 /// <param name="focusInfo">Information about where the ray focused on.</param>
 /// <param name="eye_data">ViveSR.anipal.Eye.EyeData_v2. </param>
 /// <returns>Indicates whether the ray hits a collider.</returns>
 public static bool Focus(GazeIndex index, out Ray ray, out FocusInfo focusInfo, EyeData_v2 eye_data)
 {
     return(Focus(index, out ray, out focusInfo, 0, float.MaxValue, -1, eye_data));
 }
Example #13
0
 /// <summary>
 /// Casts a ray against all colliders.
 /// </summary>
 /// <param name="index">A source of eye gaze data.</param>
 /// <param name="ray">The starting point and direction of the ray.</param>
 /// <param name="focusInfo">Information about where the ray focused on.</param>
 /// <param name="radius">The radius of the gaze ray</param>
 /// <param name="maxDistance">The max length of the ray.</param>
 /// <param name="eye_data">ViveSR.anipal.Eye.EyeData_v2. </param>
 /// <returns>Indicates whether the ray hits a collider.</returns>
 public static bool Focus(GazeIndex index, out Ray ray, out FocusInfo focusInfo, float radius, float maxDistance, EyeData_v2 eye_data)
 {
     return(Focus(index, out ray, out focusInfo, radius, maxDistance, -1, eye_data));
 }
Example #14
0
                /// <summary>
                /// Casts a ray against all colliders when enable eye callback function.
                /// </summary>
                /// <param name="index">A source of eye gaze data.</param>
                /// <param name="ray">The starting point and direction of the ray.</param>
                /// <param name="focusInfo">Information about where the ray focused on.</param>
                /// <param name="radius">The radius of the gaze ray</param>
                /// <param name="maxDistance">The max length of the ray.</param>
                /// <param name="focusableLayer">A layer id that is used to selectively ignore object.</param>
                /// <param name="eye_data">ViveSR.anipal.Eye.EyeData_v2. </param>
                /// <returns>Indicates whether the ray hits a collider.</returns>
                public static bool Focus(GazeIndex index, out Ray ray, out FocusInfo focusInfo, float radius, float maxDistance, int focusableLayer, EyeData_v2 eye_data)
                {
                    bool valid = GetGazeRay(index, out ray, eye_data);

                    if (valid)
                    {
                        Ray        rayGlobal = new Ray(Camera.main.transform.position, Camera.main.transform.TransformDirection(ray.direction));
                        RaycastHit hit;
                        if (radius == 0)
                        {
                            valid = Physics.Raycast(rayGlobal, out hit, maxDistance, focusableLayer);
                        }
                        else
                        {
                            valid = Physics.SphereCast(rayGlobal, radius, out hit, maxDistance, focusableLayer);
                        }
                        focusInfo = new FocusInfo
                        {
                            point     = hit.point,
                            normal    = hit.normal,
                            distance  = hit.distance,
                            collider  = hit.collider,
                            rigidbody = hit.rigidbody,
                            transform = hit.transform
                        };
                    }
                    else
                    {
                        focusInfo = new FocusInfo();
                    }
                    return(valid);
                }
 public static extern Error GetEyeData_v2(ref EyeData_v2 data);
Example #16
0
 /// <summary>
 /// Tests eye gaze data when enable eye callback function.
 /// </summary>
 /// <param name="validity">A type of eye gaze data to test with.</param>
 /// <param name="gazeIndex">The index of a source of eye gaze data.</param>
 /// <param name="eye_data">ViveSR.anipal.Eye.EyeData_v2. </param>
 /// <returns>Indicates whether a source of eye gaze data is found.</returns>
 public static bool TryGaze(SingleEyeDataValidity validity, out GazeIndex gazeIndex, EyeData_v2 eye_data)
 {
     bool[] valid = new bool[(int)GazeIndex.COMBINE + 1] {
         eye_data.verbose_data.left.GetValidity(validity),
         eye_data.verbose_data.right.GetValidity(validity),
         eye_data.verbose_data.combined.eye_data.GetValidity(validity)
     };
     gazeIndex = GazeIndex.COMBINE;
     for (int i = (int)GazeIndex.COMBINE; i >= 0; --i)
     {
         if (valid[i])
         {
             gazeIndex = (GazeIndex)i;
             return(true);
         }
     }
     return(false);
 }
Example #17
0
                /// <summary>
                /// Gets weighting values from anipal's Eye module when enable eye callback function.
                /// </summary>
                /// <param name="shapes">Weighting values obtained from anipal's Eye module.</param>
                /// <param name="eye_data">ViveSR.anipal.Eye.EyeData_v2. </param>
                /// <returns>Indicates whether the values received are new.</returns>\
                ///
                public static bool GetEyeWeightings(out Dictionary <EyeShape_v2, float> shapes, EyeData_v2 eye_data)
                {
                    float[]   openness      = new float[2];
                    bool[]    valid         = new bool[2];
                    Vector2[] pupilPosition = new Vector2[2];

                    foreach (EyeIndex index in (EyeIndex[])Enum.GetValues(typeof(EyeIndex)))
                    {
                        GetEyeOpenness(index, out openness[(int)index], eye_data);
                        valid[(int)index] = GetPupilPosition(index, out pupilPosition[(int)index]);
                    }

                    float[] weighting_up = new float[3] {
                        Mathf.Max(pupilPosition[(int)GazeIndex.LEFT].y, 0f), Mathf.Max(pupilPosition[(int)GazeIndex.RIGHT].y, 0f), 0
                    };
                    float[] weighting_down = new float[3] {
                        Mathf.Max(-pupilPosition[(int)GazeIndex.LEFT].y, 0f), Mathf.Max(-pupilPosition[(int)GazeIndex.RIGHT].y, 0f), 0
                    };
                    float[] weighting_left = new float[3] {
                        Mathf.Max(-pupilPosition[(int)GazeIndex.LEFT].x, 0f), Mathf.Max(-pupilPosition[(int)GazeIndex.RIGHT].x, 0f), 0
                    };
                    float[] weighting_right = new float[3] {
                        Mathf.Max(pupilPosition[(int)GazeIndex.LEFT].x, 0f), Mathf.Max(pupilPosition[(int)GazeIndex.RIGHT].x, 0f), 0
                    };
                    weighting_up[(int)GazeIndex.COMBINE]    = (weighting_up[(int)GazeIndex.LEFT] + weighting_up[(int)GazeIndex.RIGHT]) / 2;
                    weighting_down[(int)GazeIndex.COMBINE]  = (weighting_down[(int)GazeIndex.LEFT] + weighting_down[(int)GazeIndex.RIGHT]) / 2;
                    weighting_left[(int)GazeIndex.COMBINE]  = (weighting_left[(int)GazeIndex.LEFT] + weighting_left[(int)GazeIndex.RIGHT]) / 2;
                    weighting_right[(int)GazeIndex.COMBINE] = (weighting_right[(int)GazeIndex.LEFT] + weighting_right[(int)GazeIndex.RIGHT]) / 2;

                    foreach (EyeShape_v2 index in (EyeShape_v2[])Enum.GetValues(typeof(EyeShape_v2)))
                    {
                        Weightings[index] = 0;
                    }
                    Weightings[EyeShape_v2.Eye_Left_Blink]  = 1 - openness[(int)EyeIndex.LEFT];
                    Weightings[EyeShape_v2.Eye_Right_Blink] = 1 - openness[(int)EyeIndex.RIGHT];
                    Weightings[EyeShape_v2.Eye_Left_Wide]   = EyeData_.expression_data.left.eye_wide;
                    Weightings[EyeShape_v2.Eye_Right_Wide]  = EyeData_.expression_data.right.eye_wide;

                    Weightings[EyeShape_v2.Eye_Left_Squeeze]  = EyeData_.expression_data.left.eye_squeeze;
                    Weightings[EyeShape_v2.Eye_Right_Squeeze] = EyeData_.expression_data.right.eye_squeeze;

                    if (valid[(int)EyeIndex.LEFT] && valid[(int)EyeIndex.RIGHT])
                    {
                        Ray gaze_ray = new Ray();
                        GetGazeRay(GazeIndex.COMBINE, out gaze_ray, eye_data);
                        Vector3 gaze_direction = gaze_ray.direction - gaze_ray.origin;
                        gaze_direction.x = 0.0f;
                        Vector3 gaze_direction_normalized = gaze_direction.normalized;
                        Vector3 gaze_axis_z = Vector3.forward;
                        float   y_weight    = Mathf.Acos(Vector3.Dot(gaze_direction_normalized, gaze_axis_z));

                        Weightings[EyeShape_v2.Eye_Left_Up]    = EyeData_.expression_data.left.eye_wide;
                        Weightings[EyeShape_v2.Eye_Left_Down]  = gaze_direction_normalized.y < 0 ? y_weight : 0;
                        Weightings[EyeShape_v2.Eye_Left_Left]  = weighting_left[(int)GazeIndex.COMBINE];
                        Weightings[EyeShape_v2.Eye_Left_Right] = weighting_right[(int)GazeIndex.COMBINE];

                        Weightings[EyeShape_v2.Eye_Right_Up]    = EyeData_.expression_data.right.eye_wide;
                        Weightings[EyeShape_v2.Eye_Right_Down]  = gaze_direction_normalized.y < 0 ? y_weight : 0;
                        Weightings[EyeShape_v2.Eye_Right_Left]  = weighting_left[(int)GazeIndex.COMBINE];
                        Weightings[EyeShape_v2.Eye_Right_Right] = weighting_right[(int)GazeIndex.COMBINE];
                    }
                    else if (valid[(int)EyeIndex.LEFT])
                    {
                        Ray gaze_ray = new Ray();
                        GetGazeRay(GazeIndex.LEFT, out gaze_ray, eye_data);
                        Vector3 gaze_direction = gaze_ray.direction - gaze_ray.origin;
                        gaze_direction.x = 0.0f;
                        Vector3 gaze_direction_normalized = gaze_direction.normalized;
                        Vector3 gaze_axis_z = Vector3.forward;
                        float   y_weight    = Mathf.Acos(Vector3.Dot(gaze_direction_normalized, gaze_axis_z));

                        Weightings[EyeShape_v2.Eye_Left_Up]    = EyeData_.expression_data.left.eye_wide;
                        Weightings[EyeShape_v2.Eye_Left_Down]  = gaze_direction_normalized.y < 0 ? y_weight : 0;
                        Weightings[EyeShape_v2.Eye_Left_Left]  = weighting_left[(int)GazeIndex.LEFT];
                        Weightings[EyeShape_v2.Eye_Left_Right] = weighting_right[(int)GazeIndex.LEFT];
                    }
                    else if (valid[(int)EyeIndex.RIGHT])
                    {
                        Ray gaze_ray = new Ray();
                        GetGazeRay(GazeIndex.RIGHT, out gaze_ray, eye_data);
                        Vector3 gaze_direction = gaze_ray.direction - gaze_ray.origin;
                        gaze_direction.x = 0.0f;
                        Vector3 gaze_direction_normalized = gaze_direction.normalized;
                        Vector3 gaze_axis_z = Vector3.forward;
                        float   y_weight    = Mathf.Acos(Vector3.Dot(gaze_direction_normalized, gaze_axis_z));

                        Weightings[EyeShape_v2.Eye_Right_Up]    = EyeData_.expression_data.right.eye_wide;
                        Weightings[EyeShape_v2.Eye_Right_Down]  = gaze_direction_normalized.y < 0 ? y_weight : 0;
                        Weightings[EyeShape_v2.Eye_Right_Left]  = weighting_left[(int)GazeIndex.RIGHT];
                        Weightings[EyeShape_v2.Eye_Right_Right] = weighting_right[(int)GazeIndex.RIGHT];
                    }
                    shapes = Weightings;
                    return(true);
                }
Example #18
0
 private static void EyeCallback(ref EyeData_v2 eye_data)
 {
     EyeData = eye_data;
 }
Example #19
0
    // ********************************************************************************************************************
    //
    //  Callback function to record the eye movement data.
    //  Note that SRanipal_Eye_v2 does not work in the function below. It only works under UnityEngine.
    //
    // ********************************************************************************************************************
    private static void EyeCallback(ref EyeData_v2 eye_data)
    {
        EyeParameter eye_parameter = new EyeParameter();

        SRanipal_Eye_API.GetEyeParameter(ref eye_parameter);
        eyeData = eye_data;

        // ----------------------------------------------------------------------------------------------------------------
        //  Measure eye movements at the frequency of 120Hz until framecount reaches the maxframe count set.
        // ----------------------------------------------------------------------------------------------------------------
        while (cnt_callback < maxframe_count)
        {
            ViveSR.Error error = SRanipal_Eye_API.GetEyeData_v2(ref eyeData);

            if (error == ViveSR.Error.WORK)
            {
                // --------------------------------------------------------------------------------------------------------
                //  Measure each parameter of eye data that are specified in the guideline of SRanipal SDK.
                // --------------------------------------------------------------------------------------------------------
                MeasureTime      = DateTime.Now.Ticks;
                time_stamp       = eyeData.timestamp;
                frame            = eyeData.frame_sequence;
                eye_valid_L      = eyeData.verbose_data.left.eye_data_validata_bit_mask;
                eye_valid_R      = eyeData.verbose_data.right.eye_data_validata_bit_mask;
                openness_L       = eyeData.verbose_data.left.eye_openness;
                openness_R       = eyeData.verbose_data.right.eye_openness;
                pupil_diameter_L = eyeData.verbose_data.left.pupil_diameter_mm;
                pupil_diameter_R = eyeData.verbose_data.right.pupil_diameter_mm;
                pos_sensor_L     = eyeData.verbose_data.left.pupil_position_in_sensor_area;
                pos_sensor_R     = eyeData.verbose_data.right.pupil_position_in_sensor_area;
                gaze_origin_L    = eyeData.verbose_data.left.gaze_origin_mm;
                gaze_origin_R    = eyeData.verbose_data.right.gaze_origin_mm;
                gaze_direct_L    = eyeData.verbose_data.left.gaze_direction_normalized;
                gaze_direct_R    = eyeData.verbose_data.right.gaze_direction_normalized;
                gaze_sensitive   = eye_parameter.gaze_ray_parameter.sensitive_factor;
                frown_L          = eyeData.expression_data.left.eye_frown;
                frown_R          = eyeData.expression_data.right.eye_frown;
                squeeze_L        = eyeData.expression_data.left.eye_squeeze;
                squeeze_R        = eyeData.expression_data.right.eye_squeeze;
                wide_L           = eyeData.expression_data.left.eye_wide;
                wide_R           = eyeData.expression_data.right.eye_wide;
                distance_valid_C = eyeData.verbose_data.combined.convergence_distance_validity;
                distance_C       = eyeData.verbose_data.combined.convergence_distance_mm;
                track_imp_cnt    = eyeData.verbose_data.tracking_improvements.count;
                ////track_imp_item = eyeData.verbose_data.tracking_improvements.items;

                //  Convert the measured data to string data to write in a text file.
                string value =
                    MeasureTime.ToString() + "," +
                    time_stamp.ToString() + "," +
                    frame.ToString() + "," +
                    eye_valid_L.ToString() + "," +
                    eye_valid_R.ToString() + "," +
                    openness_L.ToString() + "," +
                    openness_R.ToString() + "," +
                    pupil_diameter_L.ToString() + "," +
                    pupil_diameter_R.ToString() + "," +
                    pos_sensor_L.x.ToString() + "," +
                    pos_sensor_L.y.ToString() + "," +
                    pos_sensor_R.x.ToString() + "," +
                    pos_sensor_R.y.ToString() + "," +
                    gaze_origin_L.x.ToString() + "," +
                    gaze_origin_L.y.ToString() + "," +
                    gaze_origin_L.z.ToString() + "," +
                    gaze_origin_R.x.ToString() + "," +
                    gaze_origin_R.y.ToString() + "," +
                    gaze_origin_R.z.ToString() + "," +
                    gaze_direct_L.x.ToString() + "," +
                    gaze_direct_L.y.ToString() + "," +
                    gaze_direct_L.z.ToString() + "," +
                    gaze_direct_R.x.ToString() + "," +
                    gaze_direct_R.y.ToString() + "," +
                    gaze_direct_R.z.ToString() + "," +
                    gaze_sensitive.ToString() + "," +
                    frown_L.ToString() + "," +
                    frown_R.ToString() + "," +
                    squeeze_L.ToString() + "," +
                    squeeze_R.ToString() + "," +
                    wide_L.ToString() + "," +
                    wide_R.ToString() + "," +
                    distance_valid_C.ToString() + "," +
                    distance_C.ToString() + "," +
                    track_imp_cnt.ToString() +
                    //track_imp_item.ToString() +
                    Environment.NewLine;

                File.AppendAllText("StandardSaccade_" + UserID + ".txt", value);

                cnt_callback++;
            }

            //  Break while loop 3 seconds after the saccade tasks are completed. We know the timing at this point by time information.
            CurrentTime    = DateTime.Now.Ticks;
            MeasureEndTime = GetSaccadeEndTime();

            if ((CurrentTime - MeasureEndTime > 3 * 10000000) && MeasureEndTime != 0)
            {
                break;
            }
        }
    }