Beispiel #1
0
        public StringBuilder StartTracking(String time, int frameCount, StringBuilder csvLogger)
        {
            // Get Eye data from TOBI API
            TobiiXR_EyeTrackingData localEyeData = TobiiXR.GetEyeTrackingData(TobiiXR_TrackingSpace.Local);
            TobiiXR_EyeTrackingData worldEyeData = TobiiXR.GetEyeTrackingData(TobiiXR_TrackingSpace.World);


            // Get Eye data from SRNipal
            ViveSR.Error error = SRanipal_Eye_API.GetEyeData(ref _eyeData);

            if (error == Error.WORK)
            {
                _sRanipalEyeVerboseData = _eyeData.verbose_data;

                // Left Eye Data
                SingleEyeData sRleftEyeData = _sRanipalEyeVerboseData.left;
                // Right Eye
                SingleEyeData sRrightEyeData = _sRanipalEyeVerboseData.right;

                // Write in the CSV file
                csvLogger.AppendFormat(
                    "{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}, {8}, {9}, {10}, {11}, {12}, {13}, {14}, {15}, {16}, {17}, {18}," +
                    "{19}, {20}, {21}, {22}, {23}, {24}, {25},{26}, {27}, {28}, {29}, {30}, {31}, {32}, {33}, {34}, {35}," +
                    "{36}, {37}, {38}, {39}",
                    // Time and Frame count
                    time, frameCount,

                    // Convergence Distance
                    worldEyeData.ConvergenceDistanceIsValid,
                    worldEyeData.ConvergenceDistance,

                    // Eye Openness
                    sRleftEyeData.eye_openness,
                    sRrightEyeData.eye_openness,

                    // Eye blinking
                    localEyeData.IsLeftEyeBlinking,
                    localEyeData.IsRightEyeBlinking,

                    // Pupil Diameter
                    sRleftEyeData.pupil_diameter_mm,
                    sRrightEyeData.pupil_diameter_mm,

                    // Pupil Position in Sensor area (x, y)
                    sRleftEyeData.pupil_position_in_sensor_area.x,
                    sRleftEyeData.pupil_position_in_sensor_area.y,
                    sRrightEyeData.pupil_position_in_sensor_area.x,
                    sRrightEyeData.pupil_position_in_sensor_area.y,

                    // IS local Gaze Valid
                    localEyeData.GazeRay.IsValid,

                    // Local Space Gaze Origin Combined
                    localEyeData.GazeRay.Origin.x,
                    localEyeData.GazeRay.Origin.y,
                    localEyeData.GazeRay.Origin.z,

                    // Local Space Gaze Direction Combined
                    localEyeData.GazeRay.Direction.x,
                    localEyeData.GazeRay.Direction.y,
                    localEyeData.GazeRay.Direction.z,

                    // IS World Gaze Valid
                    worldEyeData.GazeRay.IsValid,

                    //world space Gaze Origin Combined
                    worldEyeData.GazeRay.Origin.x,
                    worldEyeData.GazeRay.Origin.y,
                    worldEyeData.GazeRay.Origin.z,

                    // world space Gaze Direction Combined
                    worldEyeData.GazeRay.Direction.x,
                    worldEyeData.GazeRay.Direction.y,
                    worldEyeData.GazeRay.Direction.z,

                    // Gaze Origin in mm
                    sRleftEyeData.gaze_origin_mm.x,
                    sRleftEyeData.gaze_origin_mm.y,
                    sRleftEyeData.gaze_origin_mm.z,
                    sRrightEyeData.gaze_origin_mm.x,
                    sRrightEyeData.gaze_origin_mm.y,
                    sRrightEyeData.gaze_origin_mm.z,

                    // Normalized Gaze direction
                    sRleftEyeData.gaze_direction_normalized.x,
                    sRleftEyeData.gaze_direction_normalized.y,
                    sRleftEyeData.gaze_direction_normalized.z,
                    sRrightEyeData.gaze_direction_normalized.x,
                    sRrightEyeData.gaze_direction_normalized.y,
                    sRrightEyeData.gaze_direction_normalized.z
                    );

                csvLogger.AppendLine();
            }

            return(csvLogger);
        }
Beispiel #2
0
    // ********************************************************************************************************************
    //
    //  Callback function to record the eye movement data.
    //  Note that SRanipal_Eye_v2 does not work in the function below. It only works under UnityEngine.
    //
    // ********************************************************************************************************************
    private static void EyeCallback(ref EyeData_v2 eye_data)
    {
        EyeParameter eye_parameter = new EyeParameter();

        SRanipal_Eye_API.GetEyeParameter(ref eye_parameter);
        eyeData = eye_data;

        // ----------------------------------------------------------------------------------------------------------------
        //  Measure eye movements at the frequency of 120Hz until framecount reaches the maxframe count set.
        // ----------------------------------------------------------------------------------------------------------------
        while (cnt_callback < maxframe_count)
        {
            ViveSR.Error error = SRanipal_Eye_API.GetEyeData_v2(ref eyeData);

            if (error == ViveSR.Error.WORK)
            {
                // --------------------------------------------------------------------------------------------------------
                //  Measure each parameter of eye data that are specified in the guideline of SRanipal SDK.
                // --------------------------------------------------------------------------------------------------------
                MeasureTime      = DateTime.Now.Ticks;
                time_stamp       = eyeData.timestamp;
                frame            = eyeData.frame_sequence;
                eye_valid_L      = eyeData.verbose_data.left.eye_data_validata_bit_mask;
                eye_valid_R      = eyeData.verbose_data.right.eye_data_validata_bit_mask;
                openness_L       = eyeData.verbose_data.left.eye_openness;
                openness_R       = eyeData.verbose_data.right.eye_openness;
                pupil_diameter_L = eyeData.verbose_data.left.pupil_diameter_mm;
                pupil_diameter_R = eyeData.verbose_data.right.pupil_diameter_mm;
                pos_sensor_L     = eyeData.verbose_data.left.pupil_position_in_sensor_area;
                pos_sensor_R     = eyeData.verbose_data.right.pupil_position_in_sensor_area;
                gaze_origin_L    = eyeData.verbose_data.left.gaze_origin_mm;
                gaze_origin_R    = eyeData.verbose_data.right.gaze_origin_mm;
                gaze_direct_L    = eyeData.verbose_data.left.gaze_direction_normalized;
                gaze_direct_R    = eyeData.verbose_data.right.gaze_direction_normalized;
                gaze_sensitive   = eye_parameter.gaze_ray_parameter.sensitive_factor;
                frown_L          = eyeData.expression_data.left.eye_frown;
                frown_R          = eyeData.expression_data.right.eye_frown;
                squeeze_L        = eyeData.expression_data.left.eye_squeeze;
                squeeze_R        = eyeData.expression_data.right.eye_squeeze;
                wide_L           = eyeData.expression_data.left.eye_wide;
                wide_R           = eyeData.expression_data.right.eye_wide;
                distance_valid_C = eyeData.verbose_data.combined.convergence_distance_validity;
                distance_C       = eyeData.verbose_data.combined.convergence_distance_mm;
                track_imp_cnt    = eyeData.verbose_data.tracking_improvements.count;
                ////track_imp_item = eyeData.verbose_data.tracking_improvements.items;

                //  Convert the measured data to string data to write in a text file.
                string value =
                    MeasureTime.ToString() + "," +
                    time_stamp.ToString() + "," +
                    frame.ToString() + "," +
                    eye_valid_L.ToString() + "," +
                    eye_valid_R.ToString() + "," +
                    openness_L.ToString() + "," +
                    openness_R.ToString() + "," +
                    pupil_diameter_L.ToString() + "," +
                    pupil_diameter_R.ToString() + "," +
                    pos_sensor_L.x.ToString() + "," +
                    pos_sensor_L.y.ToString() + "," +
                    pos_sensor_R.x.ToString() + "," +
                    pos_sensor_R.y.ToString() + "," +
                    gaze_origin_L.x.ToString() + "," +
                    gaze_origin_L.y.ToString() + "," +
                    gaze_origin_L.z.ToString() + "," +
                    gaze_origin_R.x.ToString() + "," +
                    gaze_origin_R.y.ToString() + "," +
                    gaze_origin_R.z.ToString() + "," +
                    gaze_direct_L.x.ToString() + "," +
                    gaze_direct_L.y.ToString() + "," +
                    gaze_direct_L.z.ToString() + "," +
                    gaze_direct_R.x.ToString() + "," +
                    gaze_direct_R.y.ToString() + "," +
                    gaze_direct_R.z.ToString() + "," +
                    gaze_sensitive.ToString() + "," +
                    frown_L.ToString() + "," +
                    frown_R.ToString() + "," +
                    squeeze_L.ToString() + "," +
                    squeeze_R.ToString() + "," +
                    wide_L.ToString() + "," +
                    wide_R.ToString() + "," +
                    distance_valid_C.ToString() + "," +
                    distance_C.ToString() + "," +
                    track_imp_cnt.ToString() +
                    //track_imp_item.ToString() +
                    Environment.NewLine;

                File.AppendAllText("StandardSaccade_" + UserID + ".txt", value);

                cnt_callback++;
            }

            //  Break while loop 3 seconds after the saccade tasks are completed. We know the timing at this point by time information.
            CurrentTime    = DateTime.Now.Ticks;
            MeasureEndTime = GetSaccadeEndTime();

            if ((CurrentTime - MeasureEndTime > 3 * 10000000) && MeasureEndTime != 0)
            {
                break;
            }
        }
    }