Example #1
0
    /// <summary>
    /// Initializes the eye texture.
    /// </summary>
    /// <param name="index">The index.</param>
    /// <param name="eye">The eye.</param>
    private void InitEyeTexture(int index, Eyes eye)
    {
        EyeParameter eyeParameter = this.eyeParameters[(int)eye];

        // SceneTexture
        int eyeIndex = index + ((int)eye * 2);

        this.eyeTextures[eyeIndex] = new RenderTexture(
            (int)eyeParameter.Resolution.x,
            (int)eyeParameter.Resolution.y,
            (int)VrManager.Instance.EyeTextureDepth,
            VrManager.Instance.EyeTextureFormat)
        {
            antiAliasing = (int)VrManager.Instance.EyeTextureAntiAliasing
        };
        this.eyeTextures[eyeIndex].Create();
        this.eyeTextureIds[eyeIndex] = this.eyeTextures[eyeIndex].GetNativeTexturePtr().ToInt32();

        // GazeTexture
        eyeIndex++;
        this.eyeTextures[eyeIndex] = new RenderTexture(
            (int)eyeParameter.Resolution.x,
            (int)eyeParameter.Resolution.y,
            (int)VrManager.Instance.EyeTextureDepth,
            VrManager.Instance.EyeTextureFormat)
        {
            antiAliasing = (int)VrManager.Instance.EyeTextureAntiAliasing
        };
        this.eyeTextures[eyeIndex].Create();
        this.eyeTextureIds[eyeIndex] = this.eyeTextures[eyeIndex].GetNativeTexturePtr().ToInt32();
    }
Example #2
0
        /// <summary>
        /// Initializes the eye texture.
        /// </summary>
        /// <param name="index">The index.</param>
        /// <param name="eye">The eye.</param>
        private void InitEyeTexture(int index, Eyes eye)
        {
            EyeParameter eyeParameter = this.eyeParameters[(int)eye];

            // SceneTexture
            int eyeIndex = index + ((int)eye);

#if UNITY_5_6
            // it will occur native crash when using SetTargetBuffers function in 5.6.5
            int eyeDepth = (int)VrManager.Instance.EyeTextureDepth;
#else
            // use single depth buffer for each eye, create depth buffer only in first eye texture
            int eyeDepth = index == 0 ? (int)VrManager.Instance.EyeTextureDepth : 0;
#endif

            this.eyeTextures[eyeIndex] = new RenderTexture(
                (int)eyeParameter.Resolution.x,
                (int)eyeParameter.Resolution.y,
                eyeDepth,
                VrManager.Instance.EyeTextureFormat)
            {
                antiAliasing = (int)VrManager.Instance.EyeTextureAntiAliasing
            };
            this.eyeTextures[eyeIndex].Create();
            this.eyeTextureIds[eyeIndex] = this.eyeTextures[eyeIndex].GetNativeTexturePtr().ToInt32();
        }
Example #3
0
    // ********************************************************************************************************************
    //
    //  Measure eye movements in a callback function that HTC SRanipal provides.
    //
    // ********************************************************************************************************************
    void Measurement()
    {
        EyeParameter eye_parameter = new EyeParameter();

        SRanipal_Eye_API.GetEyeParameter(ref eye_parameter);
        Data_txt();

        if (SRanipal_Eye_Framework.Instance.EnableEyeDataCallback == true && eye_callback_registered == false)
        {
            SRanipal_Eye_v2.WrapperRegisterEyeDataCallback(Marshal.GetFunctionPointerForDelegate((SRanipal_Eye_v2.CallbackBasic)EyeCallback));
            eye_callback_registered = true;
        }

        else if (SRanipal_Eye_Framework.Instance.EnableEyeDataCallback == false && eye_callback_registered == true)
        {
            SRanipal_Eye_v2.WrapperUnRegisterEyeDataCallback(Marshal.GetFunctionPointerForDelegate((SRanipal_Eye_v2.CallbackBasic)EyeCallback));
            eye_callback_registered = false;
        }
    }
Example #4
0
                private void OnGUI()
                {
                    if (GUILayout.Button("Set Parameter"))
                    {
                        EyeParameter parameter = new EyeParameter
                        {
                            gaze_ray_parameter = new GazeRayParameter(),
                        };
                        Error error = SRanipal_Eye.GetEyeParameter(ref parameter);
                        Debug.Log("GetEyeParameter: " + error + "\n" +
                                  "sensitive_factor: " + parameter.gaze_ray_parameter.sensitive_factor);

                        parameter.gaze_ray_parameter.sensitive_factor = parameter.gaze_ray_parameter.sensitive_factor == 1 ? 0.015f : 1;
                        error = SRanipal_Eye.SetEyeParameter(parameter);
                        Debug.Log("SetEyeParameter: " + error + "\n" +
                                  "sensitive_factor: " + parameter.gaze_ray_parameter.sensitive_factor);
                    }

                    if (GUILayout.Button("Launch Calibration"))
                    {
                        SRanipal_Eye.LaunchEyeCalibration();
                    }
                }
 public static extern Error GetEyeParameter(ref EyeParameter parameter);
 public static extern Error SetEyeParameter(EyeParameter parameter);
 public float GetEyeParameter(Eye eye, EyeParameter param) => _GetEyeParameter(eye, param);
 [DllImport("PimaxEyeTracker", EntryPoint = "GetEyeParameter")] private static extern float _GetEyeParameter(Eye eye, EyeParameter param);
Example #9
0
    // ********************************************************************************************************************
    //
    //  Callback function to record the eye movement data.
    //  Note that SRanipal_Eye_v2 does not work in the function below. It only works under UnityEngine.
    //
    // ********************************************************************************************************************
    private static void EyeCallback(ref EyeData_v2 eye_data)
    {
        EyeParameter eye_parameter = new EyeParameter();

        SRanipal_Eye_API.GetEyeParameter(ref eye_parameter);
        eyeData = eye_data;

        // ----------------------------------------------------------------------------------------------------------------
        //  Measure eye movements at the frequency of 120Hz until framecount reaches the maxframe count set.
        // ----------------------------------------------------------------------------------------------------------------
        while (cnt_callback < maxframe_count)
        {
            ViveSR.Error error = SRanipal_Eye_API.GetEyeData_v2(ref eyeData);

            if (error == ViveSR.Error.WORK)
            {
                // --------------------------------------------------------------------------------------------------------
                //  Measure each parameter of eye data that are specified in the guideline of SRanipal SDK.
                // --------------------------------------------------------------------------------------------------------
                MeasureTime      = DateTime.Now.Ticks;
                time_stamp       = eyeData.timestamp;
                frame            = eyeData.frame_sequence;
                eye_valid_L      = eyeData.verbose_data.left.eye_data_validata_bit_mask;
                eye_valid_R      = eyeData.verbose_data.right.eye_data_validata_bit_mask;
                openness_L       = eyeData.verbose_data.left.eye_openness;
                openness_R       = eyeData.verbose_data.right.eye_openness;
                pupil_diameter_L = eyeData.verbose_data.left.pupil_diameter_mm;
                pupil_diameter_R = eyeData.verbose_data.right.pupil_diameter_mm;
                pos_sensor_L     = eyeData.verbose_data.left.pupil_position_in_sensor_area;
                pos_sensor_R     = eyeData.verbose_data.right.pupil_position_in_sensor_area;
                gaze_origin_L    = eyeData.verbose_data.left.gaze_origin_mm;
                gaze_origin_R    = eyeData.verbose_data.right.gaze_origin_mm;
                gaze_direct_L    = eyeData.verbose_data.left.gaze_direction_normalized;
                gaze_direct_R    = eyeData.verbose_data.right.gaze_direction_normalized;
                gaze_sensitive   = eye_parameter.gaze_ray_parameter.sensitive_factor;
                frown_L          = eyeData.expression_data.left.eye_frown;
                frown_R          = eyeData.expression_data.right.eye_frown;
                squeeze_L        = eyeData.expression_data.left.eye_squeeze;
                squeeze_R        = eyeData.expression_data.right.eye_squeeze;
                wide_L           = eyeData.expression_data.left.eye_wide;
                wide_R           = eyeData.expression_data.right.eye_wide;
                distance_valid_C = eyeData.verbose_data.combined.convergence_distance_validity;
                distance_C       = eyeData.verbose_data.combined.convergence_distance_mm;
                track_imp_cnt    = eyeData.verbose_data.tracking_improvements.count;
                ////track_imp_item = eyeData.verbose_data.tracking_improvements.items;

                //  Convert the measured data to string data to write in a text file.
                string value =
                    MeasureTime.ToString() + "," +
                    time_stamp.ToString() + "," +
                    frame.ToString() + "," +
                    eye_valid_L.ToString() + "," +
                    eye_valid_R.ToString() + "," +
                    openness_L.ToString() + "," +
                    openness_R.ToString() + "," +
                    pupil_diameter_L.ToString() + "," +
                    pupil_diameter_R.ToString() + "," +
                    pos_sensor_L.x.ToString() + "," +
                    pos_sensor_L.y.ToString() + "," +
                    pos_sensor_R.x.ToString() + "," +
                    pos_sensor_R.y.ToString() + "," +
                    gaze_origin_L.x.ToString() + "," +
                    gaze_origin_L.y.ToString() + "," +
                    gaze_origin_L.z.ToString() + "," +
                    gaze_origin_R.x.ToString() + "," +
                    gaze_origin_R.y.ToString() + "," +
                    gaze_origin_R.z.ToString() + "," +
                    gaze_direct_L.x.ToString() + "," +
                    gaze_direct_L.y.ToString() + "," +
                    gaze_direct_L.z.ToString() + "," +
                    gaze_direct_R.x.ToString() + "," +
                    gaze_direct_R.y.ToString() + "," +
                    gaze_direct_R.z.ToString() + "," +
                    gaze_sensitive.ToString() + "," +
                    frown_L.ToString() + "," +
                    frown_R.ToString() + "," +
                    squeeze_L.ToString() + "," +
                    squeeze_R.ToString() + "," +
                    wide_L.ToString() + "," +
                    wide_R.ToString() + "," +
                    distance_valid_C.ToString() + "," +
                    distance_C.ToString() + "," +
                    track_imp_cnt.ToString() +
                    //track_imp_item.ToString() +
                    Environment.NewLine;

                File.AppendAllText("StandardSaccade_" + UserID + ".txt", value);

                cnt_callback++;
            }

            //  Break while loop 3 seconds after the saccade tasks are completed. We know the timing at this point by time information.
            CurrentTime    = DateTime.Now.Ticks;
            MeasureEndTime = GetSaccadeEndTime();

            if ((CurrentTime - MeasureEndTime > 3 * 10000000) && MeasureEndTime != 0)
            {
                break;
            }
        }
    }