Example #1
0
        /// <summary>
        /// Forwards 2D gaze points to fixation detector. Gaze points are only forwarded
        /// if CPU and Eyetracker clocks are synchronized and validity is &lt 2. If both eyes are valid gaze point coordinates are averaged,
        /// if only one eye is valid, only that eye's gaze point is used.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e">GazeDataItem to process</param>
        protected override void GazeDataReceivedSynchronized(object sender, GazeDataEventArgs e)
        {
            // ignore gaze data with low validity
            if (e.GazeDataItem.LeftValidity < 2 || e.GazeDataItem.RightValidity < 2)
            {
                // convert timestamp
                long microseconds = e.GazeDataItem.TimeStamp;
                int  milliseconds = (int)(microseconds / 1000);
                milliseconds -= getTimestampOffset(milliseconds);
                int time = milliseconds;
                if (((microseconds / 100) % 10) >= 5)
                {
                    time++;                                   // round
                }
                // convert normalized screen coordinates (float between [0 - 1]) to pixel coordinates
                // coordinates (0, 0) designate the top left corner
                double leftX  = e.GazeDataItem.LeftGazePoint2D.X * SCREEN_WIDTH;
                double leftY  = e.GazeDataItem.LeftGazePoint2D.Y * SCREEN_HEIGHT;
                double rightX = e.GazeDataItem.RightGazePoint2D.X * SCREEN_WIDTH;
                double rightY = e.GazeDataItem.RightGazePoint2D.Y * SCREEN_HEIGHT;

                if (e.GazeDataItem.LeftValidity < 2 && e.GazeDataItem.RightValidity < 2)
                {
                    // average left and right eyes
                    int x = (int)((leftX + rightX) / 2);
                    int y = (int)((leftY + rightY) / 2);
                    fixationDetector.addPoint(time, x, y);
                }
                else if (e.GazeDataItem.LeftValidity < 2)
                {
                    // use only left eye
                    fixationDetector.addPoint(time, (int)leftX, (int)leftY);
                }
                else if (e.GazeDataItem.RightValidity < 2)
                {
                    // use only right eye
                    fixationDetector.addPoint(time, (int)rightX, (int)rightY);
                }
            }
        }
Example #2
0
        int count = 0;//testing

        /// <summary>
        /// Forwards 2D gaze points to fixation detector. Gaze points are only forwarded
        /// if CPU and Eyetracker clocks are synchronized.
        ///
        /// Detailed explanation of synchronization available in Tobii SDK 3.0 Developer Guide.
        /// http://www.tobii.com/Global/Analysis/Downloads/User_Manuals_and_Guides/Tobii%20SDK%203.0%20Release%20Candidate%201%20Developers%20Guide.pdf
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        public override void GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            if (syncManager.SyncState.StateFlag == SyncStateFlag.Synchronized && count < 3)//testing
            {
                int time = (int)syncManager.RemoteToLocal(e.GazeDataItem.TimeStamp);
                int x    = (int)e.GazeDataItem.LeftGazePoint3D.X; // TODO Determine which of LeftGazePoint2D, LeftGazePoint3D, RightGazePoint2D, RightGazePoint3D or combination thereof is same as Tobii Studio
                int y    = (int)e.GazeDataItem.LeftGazePoint3D.Y;
                fixationDetector.addPoint(time, x, y);

                // testing

                /*GazeDataItem gdi = e.GazeDataItem;
                 * Console.WriteLine("GazeDataItem:\r\n" +
                 *  "LeftEyePosition3D         (" + gdi.LeftEyePosition3D.X + ", " + gdi.LeftEyePosition3D.Y + ", " + gdi.LeftEyePosition3D.Z + ")\r\n" +
                 *  "LeftEyePosition3DRelative (" + gdi.LeftEyePosition3DRelative.X + ", " + gdi.LeftEyePosition3DRelative.Y + ", " + gdi.LeftEyePosition3DRelative.Z + ")\r\n" +
                 *  "LeftGazePoint2D           (" + gdi.LeftGazePoint2D.X + ", " + gdi.LeftGazePoint2D.Y + ")\r\n" +
                 *  "LeftGazePoint3D           (" + gdi.LeftGazePoint3D.X + ", " + gdi.LeftGazePoint3D.Y + ", " + gdi.LeftGazePoint3D.Z + ")\r\n" +
                 *  "RightEyePosition3D         (" + gdi.RightEyePosition3D.X + ", " + gdi.RightEyePosition3D.Y + ", " + gdi.RightEyePosition3D.Z + ")\r\n" +
                 *  "RightEyePosition3DRelative (" + gdi.RightEyePosition3DRelative.X + ", " + gdi.RightEyePosition3DRelative.Y + ", " + gdi.RightEyePosition3DRelative.Z + ")\r\n" +
                 *  "RightGazePoint2D           (" + gdi.RightGazePoint2D.X + ", " + gdi.RightGazePoint2D.Y + ")\r\n" +
                 *  "RightGazePoint3D           (" + gdi.RightGazePoint3D.X + ", " + gdi.RightGazePoint3D.Y + ", " + gdi.RightGazePoint3D.Z + ")");
                 * count++;*/
            }
        }