Пример #1
0
    private void Update()
    {
        if (playTrialData && frames.Count >= 1)
        {
            double[] tmp = frames[0];
            frames.RemoveAt(0);

            // Data is: pos , pos y, pos z, rot, gaze X, gaze Y, trial state
            // IMPORTANT there is a 0.8 unit offset between the player controller and the camera, manually added here.
            Camera.main.transform.position = new Vector3 {
                x = (float)tmp[0], y = (float)(tmp[1] + 0.8), z = (float)tmp[2]
            };
            Camera.main.transform.rotation = Quaternion.Euler(0f, (float)tmp[3], 0f);
            // TODO Trialstate and gaze
            float x = (float)tmp[4];
            float y = (float)tmp[5];
            x = (pix_per_deg * x) + (0.5f * XRes);
            y = (pix_per_deg * y) + (0.5f * YRes);

            Vector2 _eyePix = new Vector2 {
                x = x, y = y
            };

            // manually convert to pixels
            gp.ProcessGaze(_eyePix, out float[] gazeTargets, out float[] gazeCounts, out Vector3[] hitPoints);
Пример #2
0
    private void Update()
    {
        if (playTrialData && frames.Count >= 1)
        {
            double[] tmp = frames[0];
            frames.RemoveAt(0);

            // Data is: pos , pos y, pos z, rot, gaze X, gaze Y, trial state
            // IMPORTANT there is a 0.8 unit offset between the player controller and the camera, manually added here.
            Camera.main.transform.position = new Vector3 {
                x = (float)tmp[0], y = (float)(tmp[1] + 0.8), z = (float)tmp[2]
            };
            Camera.main.transform.rotation = Quaternion.Euler(0f, (float)tmp[3], 0f);
            // TODO Trialstate and gaze
            Vector2 _eyePix;
            switch (ExperimentConfiguration.Eye_Tracker)
            {
            case ExperimentConfiguration.EyeTrackers.EyeLink:
                // gaze data is in degrees
                _eyePix = new Vector2
                {
                    x = (pix_per_deg * (float)tmp[4]) + (0.5f * XRes),
                    y = (pix_per_deg * (float)tmp[5]) + (0.5f * YRes)
                };
                break;

            case ExperimentConfiguration.EyeTrackers.TobiiProFusion:
                // gaze data is in relative screen position
                _eyePix = new Vector2
                {
                    x = (float)tmp[4] * XRes,
                    y = (1f - (float)tmp[5]) * YRes
                };
                break;

            default:
                _eyePix = Vector2.negativeInfinity;
                break;
            }

            // manually convert to pixels
            gp.ProcessGaze(_eyePix, out float[] gazeTargets, out float[] gazeCounts, out Vector3[] hitPoints);
Пример #3
0
    // Update is called once per frame
    void Update()
    {
        Sample s;
        //Sample s;
        double lastSampleTime = 0.0;

        // if not connected, has eye calibration and no thread: Initialize thread
        if (!el.isConnected() && !checker.RunCheck() && !checker.CheckELOnline() && eyecal.has_calibration)
        {
            // Configure eye
            switch (eyecal.GetTrackedEye())
            {
            case 0:
                el_Eye = EL_EYE.EL_LEFT;
                break;

            case 1:
                el_Eye = EL_EYE.EL_RIGHT;
                break;

            default:
                el_Eye = EL_EYE.EL_EYE_NONE;
                break;
            }

            // Spawn checker thread to test connection
            checker.StartThread(eyecal.GetEyeLinkIP());

            s = null;
        }
        // If checker created
        else if (!el.isConnected() && checker.CheckELOnline() && eyecal.has_calibration)
        {
            // Eyelink Online
            // Connect
            el.setEyelinkAddress(eyecal.GetEyeLinkIP(), -1);
            el.broadcastOpen();
            if (el.isConnected())
            {
                Debug.Log("EyeLink Connected");
                checker.StopThread();

                //
                el.resetData();
                el.dataSwitch(4 | 8);
            }
            s = null;
        }

        // If connected, has calibration and tracker is in record mode: get sample
        else if (el.isConnected() && eyecal.has_calibration && el.getTrackerMode() == 14)
        {
            try
            {
                s = el.getNewestSample();
            }
            catch
            {
                //Debug.Log(e.ToString());
                s = null;
            }
        }
        else
        {
            s = null;
        }

        // Get position on screen in pixels
        if (s != null && s.time != lastSampleTime)
        {
            if (el_Eye != EL_EYE.EL_EYE_NONE)
            {
                if (el_Eye == EL_EYE.EL_BINOCULAR)
                {
                    el_Eye = EL_EYE.EL_LEFT;
                }

                _eyeRaw.x = s.get_px(el_Eye);
                _eyeRaw.y = s.get_py(el_Eye);

                eyecal.RawToPix(_eyeRaw, out _eyeDeg, out _eyePix);

                gazeProcess.ProcessGaze(_eyePix, out float[] gazeTargets, out float[] gazeCounts, out Vector3[] hitPoints);