Exemplo n.º 1
0
        private void OnNewIPAValueAvailable(object sender, GazeDataEventArgs e)
        {
            IPALogger.Log(e.RawIPA.ToString("F4"), e.NormalizedIPA.ToString("F4"), e.SmoothIPA.ToString("F4"));

            //Do not use debug logger here because of multi-threading.
            //DebugLogger.Log(e.RawIPA.ToString("F4"), e.NormalizedIPA.ToString("F4"), e.SmoothIPA.ToString("F4"));
        }
 // This method will be called on a thread belonging to the SDK, and can not safely change values
 // that will be read from the main thread.
 private void EnqueueEyeData(object sender, GazeDataEventArgs e)
 {
     lock (_queue)
     {
         _queue.Enqueue(e);
     }
 }
Exemplo n.º 3
0
 /// <summary>
 /// Writes (X, Y) coordinate of gaze point to console if CPU and eyetracker clocks are synchronized.
 ///
 /// Detailed explanation of synchronization available in Tobii SDK 3.0 Developer Guide.
 /// http://www.tobii.com/Global/Analysis/Downloads/User_Manuals_and_Guides/Tobii%20SDK%203.0%20Release%20Candidate%201%20Developers%20Guide.pdf
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 public override void GazeDataReceived(object sender, GazeDataEventArgs e)
 {
     if (syncManager.SyncState.StateFlag == SyncStateFlag.Synchronized)
     {
         Console.WriteLine("GazeData - (" + e.GazeDataItem.LeftEyePosition3D.X + ", " + e.GazeDataItem.LeftEyePosition3D.Y + ")");
     }
 }
Exemplo n.º 4
0
 /// <summary>
 /// Ensures synchronization before handling receive gaze data
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e">Contains the gaze data item</param>
 public override sealed void GazeDataReceived(object sender, GazeDataEventArgs e)
 {
     if (syncManager.SyncState.StateFlag == SyncStateFlag.Synchronized)
     {
         GazeDataReceivedSynchronized(sender, e.GazeDataItem);
     }
 }
Exemplo n.º 5
0
        public static double CalculateVelocity(GazeDataEventArgs prevGaze, GazeDataEventArgs currentGaze, String option)
        {
            double velocity = 0;

            if (prevGaze == null)
            {
                return(velocity);
            }

            double visualAngleDegrees = 0;

            if (option.Equals("Average"))
            {
                Point3D prevAveragePoint3D    = GetAveragePoint(prevGaze);
                Point3D currentAveragePoint3D = GetAveragePoint(currentGaze);
                visualAngleDegrees = GetVisualAngle(prevAveragePoint3D, currentAveragePoint3D);
                Console.Write("------ Visual angle ");
                Console.WriteLine(visualAngleDegrees);
            }

            double durationSeconds = (currentGaze.DeviceTimeStamp - prevGaze.DeviceTimeStamp) / 1000000d;

            if (durationSeconds > 0d)
            {
                velocity = visualAngleDegrees / durationSeconds;
            }
            return(velocity);
        }
Exemplo n.º 6
0
 /// <summary>
 /// Writes (X, Y) coordinate of gaze point to console if CPU and eyetracker clocks are synchronized.
 /// 
 /// Detailed explanation of synchronization available in Tobii SDK 3.0 Developer Guide.
 /// http://www.tobii.com/Global/Analysis/Downloads/User_Manuals_and_Guides/Tobii%20SDK%203.0%20Release%20Candidate%201%20Developers%20Guide.pdf
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 public override void GazeDataReceived(object sender, GazeDataEventArgs e)
 {
     if (syncManager.SyncState.StateFlag == SyncStateFlag.Synchronized)
     {
         Console.WriteLine("GazeData - (" + e.GazeDataItem.LeftEyePosition3D.X + ", " + e.GazeDataItem.LeftEyePosition3D.Y + ")");
     }
 }
Exemplo n.º 7
0
        private void OnEyeTrackerGazeData(object sender, GazeDataEventArgs e)
        {
            EventHandler <GazeEventArgs> handler = GazeEvent;

            if (handler != null)
            {
                Point2D gazePoint;
                var     gazeData = e.GazeData;

                switch (gazeData.TrackingStatus)
                {
                case TrackingStatus.BothEyesTracked:
                    gazePoint = new Point2D((gazeData.Left.GazePointOnDisplayNormalized.X + gazeData.Right.GazePointOnDisplayNormalized.X) / 2.0,
                                            (gazeData.Left.GazePointOnDisplayNormalized.Y + gazeData.Right.GazePointOnDisplayNormalized.Y) / 2.0);
                    break;

                case TrackingStatus.OnlyLeftEyeTracked:
                case TrackingStatus.OneEyeTrackedProbablyLeft:
                    gazePoint = gazeData.Left.GazePointOnDisplayNormalized;
                    break;

                case TrackingStatus.OnlyRightEyeTracked:
                case TrackingStatus.OneEyeTrackedProbablyRight:
                    gazePoint = gazeData.Right.GazePointOnDisplayNormalized;
                    break;

                default:
                    return;
                }

                GazeEventArgs gazeEventArgs = new GazeEventArgs(gazePoint.X, gazePoint.Y, gazeData.Timestamp / 1000, Fixation.Unknown, true);
                handler(this, gazeEventArgs);
            }
        }
Exemplo n.º 8
0
 /// <summary>
 /// Ensures synchronization before handling receive gaze data
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e">Contains the gaze data item</param>
 public sealed override void GazeDataReceived(object sender, GazeDataEventArgs e)
 {
     if (syncManager.SyncState.StateFlag == SyncStateFlag.Synchronized)
     {
         GazeDataReceivedSynchronized(sender, e.GazeDataItem);
     }
 }
Exemplo n.º 9
0
        private void EyeTracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            Application.Current.Dispatcher.Invoke(new Action(() =>
            {
                var gazePoint = e.LeftEye.GazePoint;
                if (gazePoint.Validity == Validity.Valid)
                {
                    if (_lastLeftPoint != null)
                    {
                        var tmp = ToPoint(gazePoint.PositionOnDisplayArea);
                        DrawLine(_lastLeftPoint.Value, tmp, _blueBrush);
                        _lastLeftPoint = tmp;
                    }
                    else
                    {
                        _lastLeftPoint = ToPoint(gazePoint.PositionOnDisplayArea);
                    }
                }

                gazePoint = e.RightEye.GazePoint;
                if (gazePoint.Validity == Validity.Valid)
                {
                    if (_lastRightPoint != null)
                    {
                        var tmp = ToPoint(gazePoint.PositionOnDisplayArea);
                        DrawLine(_lastRightPoint.Value, tmp, _greenBrush);
                        _lastRightPoint = tmp;
                    }
                    else
                    {
                        _lastRightPoint = ToPoint(gazePoint.PositionOnDisplayArea);
                    }
                }
            }));
        }
Exemplo n.º 10
0
 internal GazeData(GazeDataEventArgs originalGaze)
 {
     TimeStamp    = originalGaze.SystemTimeStamp;
     Left         = new GazeDataEye(originalGaze.LeftEye);
     Right        = new GazeDataEye(originalGaze.RightEye);
     OriginalGaze = originalGaze;
 }
Exemplo n.º 11
0
        private void OnGazeData(object sender, GazeDataEventArgs gazeDataEventArgs)
        {
            var gazeData = gazeDataEventArgs.GazeData;

            if (gazeData.TrackingStatus == TrackingStatus.BothEyesTracked ||
                gazeData.TrackingStatus == TrackingStatus.OneEyeTrackedUnknownWhich)
            {
                var p = new Point2D(
                    (gazeData.Left.GazePointOnDisplayNormalized.X +
                     gazeData.Right.GazePointOnDisplayNormalized.X) / 2,
                    (gazeData.Left.GazePointOnDisplayNormalized.Y +
                     gazeData.Right.GazePointOnDisplayNormalized.Y) / 2);

                RaiseGazePoint(p);
            }
            else if (gazeData.TrackingStatus == TrackingStatus.OnlyLeftEyeTracked ||
                     gazeData.TrackingStatus == TrackingStatus.OneEyeTrackedProbablyLeft)
            {
                RaiseGazePoint(gazeData.Left.GazePointOnDisplayNormalized);
            }
            else if (gazeData.TrackingStatus == TrackingStatus.OnlyRightEyeTracked ||
                     gazeData.TrackingStatus == TrackingStatus.OneEyeTrackedProbablyRight)
            {
                RaiseGazePoint(gazeData.Right.GazePointOnDisplayNormalized);
            }
        }
Exemplo n.º 12
0
        private static void EyeTrackerGazeData(object sender, GazeDataEventArgs e)
        {
            var gazeData = e.GazeData;

            Console.Write("{0} ", gazeData.Timestamp / 1e6); // in seconds
            Console.Write("{0} ", gazeData.TrackingStatus);

            if (gazeData.TrackingStatus == TrackingStatus.BothEyesTracked ||
                gazeData.TrackingStatus == TrackingStatus.OnlyLeftEyeTracked ||
                gazeData.TrackingStatus == TrackingStatus.OneEyeTrackedProbablyLeft)
            {
                Console.Write("[{0:N4},{1:N4}] ", gazeData.Left.GazePointOnDisplayNormalized.X, gazeData.Left.GazePointOnDisplayNormalized.Y);
            }
            else
            {
                Console.Write("[-,-] ");
            }

            if (gazeData.TrackingStatus == TrackingStatus.BothEyesTracked ||
                gazeData.TrackingStatus == TrackingStatus.OnlyRightEyeTracked ||
                gazeData.TrackingStatus == TrackingStatus.OneEyeTrackedProbablyRight)
            {
                Console.Write("[{0:N4},{1:N4}] ", gazeData.Right.GazePointOnDisplayNormalized.X, gazeData.Right.GazePointOnDisplayNormalized.Y);
            }
            else
            {
                Console.Write("[-,-] ");
            }

            Console.WriteLine();
        }
Exemplo n.º 13
0
        // OnGazeDataReceive handler
        void GazeDataReceive(object sender, GazeDataEventArgs e)
        {
            if (FSliceCount > 0)
            {
                CustomGazeData cgd = new CustomGazeData(e.GazeDataItem);
                FOutputTimeStamp[0]            = e.GazeDataItem.TimeStamp;
                FOutputLeftEyePos[0]           = cgd.LEPos3D;
                FOutputLeftEyePosRel[0]        = cgd.LEPos3DRel;
                FOutputLeftEyeGazePoint[0]     = cgd.LEGazePoint;
                FOutputLeftEyePupilDiameter[0] = cgd.LEPupilDiameter;
                FOutputLeftEyeValidity[0]      = cgd.LEValidity;

                FOutputRightEyePos[0]           = cgd.REPos3D;
                FOutputRightEyePosRel[0]        = cgd.REPos3DRel;
                FOutputRightEyeGazePoint[0]     = cgd.REGazePoint;
                FOutputRightEyePupilDiameter[0] = cgd.REPupilDiameter;
                FOutputRightEyeValidity[0]      = cgd.REValidity;

                Vector2D?gazeVec = GetAverageGazePoint(e.GazeDataItem);
                if (gazeVec.HasValue)
                {
                    FOutputGazePoint2D[0] = MapValue_ETToVVVV(gazeVec.Value);
                }
            }
        }
Exemplo n.º 14
0
        private static Point3D GetAveragePoint(GazeDataEventArgs gazeEvent)
        {
            float gazeX = 0;
            float gazeY = 0;
            float gazeZ = 0;

            if (gazeEvent.LeftEye.GazePoint.Validity == Validity.Valid &&
                gazeEvent.RightEye.GazePoint.Validity == Validity.Valid)
            {
                gazeX = (gazeEvent.LeftEye.GazePoint.PositionInUserCoordinates.X + gazeEvent.RightEye.GazePoint.PositionInUserCoordinates.X) / 2;
                gazeY = (gazeEvent.LeftEye.GazePoint.PositionInUserCoordinates.Y + gazeEvent.RightEye.GazePoint.PositionInUserCoordinates.Y) / 2;
                gazeY = (gazeEvent.LeftEye.GazePoint.PositionInUserCoordinates.Z + gazeEvent.RightEye.GazePoint.PositionInUserCoordinates.Z) / 2;
            }
            else if (gazeEvent.LeftEye.GazePoint.Validity == Validity.Valid)
            {
                gazeX = gazeEvent.LeftEye.GazePoint.PositionInUserCoordinates.X;
                gazeY = gazeEvent.LeftEye.GazePoint.PositionInUserCoordinates.Y;
                gazeZ = gazeEvent.LeftEye.GazePoint.PositionInUserCoordinates.Y;
            }
            else if (gazeEvent.RightEye.GazePoint.Validity == Validity.Valid)
            {
                gazeX = gazeEvent.RightEye.GazePoint.PositionInUserCoordinates.X;
                gazeY = gazeEvent.RightEye.GazePoint.PositionInUserCoordinates.Y;
                gazeZ = gazeEvent.RightEye.GazePoint.PositionInUserCoordinates.Y;
            }

            return(new Point3D(gazeX, gazeY, gazeZ));
        }
        private void OnGazeData(object sender, GazeDataEventArgs e)
        {
            // mirror the x coordinate to make the visualization make sense.
            var left  = new Point2D(1 - e.GazeData.Left.EyePositionInTrackBoxNormalized.X, e.GazeData.Left.EyePositionInTrackBoxNormalized.Y);
            var right = new Point2D(1 - e.GazeData.Right.EyePositionInTrackBoxNormalized.X, e.GazeData.Right.EyePositionInTrackBoxNormalized.Y);
            var z     = 1.1;

            switch (e.GazeData.TrackingStatus)
            {
            case TrackingStatus.BothEyesTracked:
                z = (e.GazeData.Left.EyePositionInTrackBoxNormalized.Z + e.GazeData.Right.EyePositionInTrackBoxNormalized.Z) / 2;
                break;

            case TrackingStatus.OnlyLeftEyeTracked:
                z     = e.GazeData.Left.EyePositionInTrackBoxNormalized.Z;
                right = new Point2D(double.NaN, double.NaN);
                break;

            case TrackingStatus.OnlyRightEyeTracked:
                z    = e.GazeData.Right.EyePositionInTrackBoxNormalized.Z;
                left = new Point2D(double.NaN, double.NaN);
                break;

            default:
                left = right = new Point2D(double.NaN, double.NaN);
                break;
            }

            _dispatcher.BeginInvoke(new Action(() =>
            {
                SetEyePositions(left, right, z);
            }));
        }
Exemplo n.º 16
0
        private void EyeTracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            int w = Screen.PrimaryScreen.Bounds.Width;
            int h = Screen.PrimaryScreen.Bounds.Height;

            //if we become information from two Eye
            if (e.LeftEye.GazePoint.Validity == Validity.Valid && e.RightEye.GazePoint.Validity == Validity.Valid)
            {
                sampleCount++;
                sampleIndex++;
                if (sampleIndex >= samples.Length)
                {
                    sampleIndex = 0;
                }
                //combine to eye using tobii pro
                int x = ((int)(e.LeftEye.GazePoint.PositionOnDisplayArea.X * w) + (int)(e.RightEye.GazePoint.PositionOnDisplayArea.X * w)) / 2;
                int y = ((int)(e.LeftEye.GazePoint.PositionOnDisplayArea.Y * h) + (int)(e.RightEye.GazePoint.PositionOnDisplayArea.Y * h)) / 2;
                samples[sampleIndex] = new Point(x, y);

                hasNoGaze = false;
            }

            else if (e.LeftEye.GazePoint.Validity == Validity.Valid && e.RightEye.GazePoint.Validity == Validity.Invalid)
            {
                sampleCount++;
                sampleIndex++;
                if (sampleIndex >= samples.Length)
                {
                    sampleIndex = 0;
                }
                //combine to eye using tobii pro
                int x = (int)(e.LeftEye.GazePoint.PositionOnDisplayArea.X * w);
                int y = (int)(e.LeftEye.GazePoint.PositionOnDisplayArea.Y * h);
                samples[sampleIndex] = new Point(x, y);

                hasNoGaze = false;
            }

            else if (e.LeftEye.GazePoint.Validity == Validity.Invalid && e.RightEye.GazePoint.Validity == Validity.Valid)
            {
                sampleCount++;
                sampleIndex++;
                if (sampleIndex >= samples.Length)
                {
                    sampleIndex = 0;
                }
                //combine to eye using tobii pro
                int x = (int)(e.RightEye.GazePoint.PositionOnDisplayArea.X * w);
                int y = (int)(e.RightEye.GazePoint.PositionOnDisplayArea.Y * h);
                samples[sampleIndex] = new Point(x, y);

                hasNoGaze = false;
            }

            else if (e.LeftEye.GazePoint.Validity == Validity.Invalid && e.RightEye.GazePoint.Validity == Validity.Invalid)
            {
                hasNoGaze = true;
            }
        }
 private void EyeTracker_GazeDataReceived(object sender, GazeDataEventArgs e)
 {
     //this.Dispatcher.Invoke(() =>
     //{
     //    Connect.Text = $"Got gaze data with {e.LeftEye.GazeOrigin.Validity} left eye origin at point ({e.LeftEye.GazeOrigin.PositionInUserCoordinates.X}," +
     //               $" {e.LeftEye.GazeOrigin.PositionInUserCoordinates.Y}, {e.LeftEye.GazeOrigin.PositionInUserCoordinates.Z}) in the user coordinate system.";
     //});
 }
Exemplo n.º 18
0
        //--------------------------------------------------------------------
        // Low Level Functions
        //--------------------------------------------------------------------

        private void GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            lock (ScreenGaze)
            {
                this.UpdateGazePoint(e);
                this.UpdateGazeRect();
            }
        }
        private void OnGazeData(object sender, GazeDataEventArgs gazeDataEventArgs)
        {
            var gazeData = gazeDataEventArgs.GazeData;

            detectorDeOjos.dataReceived(gazeData.TrackingStatus);

            RaiseGazePoint(gazeData);
        }
Exemplo n.º 20
0
 public void _tracker_GazeDataReceived(object sender, GazeDataEventArgs e)
 {
     _leftGaze  = e.GazeDataItem.LeftGazePoint2D;
     _rightGaze = e.GazeDataItem.RightGazePoint2D;
     _time      = e.GazeDataItem.Timestamp;
     swt.WriteLine("{0}, {1}, {2}, {3}, {4}", _time, _leftGaze.X, _leftGaze.Y, _rightGaze.X, _rightGaze.Y);
     Console.WriteLine("{0}, {1}, {2}, {3}, {4}", _time, _leftGaze.X, _leftGaze.Y, _rightGaze.X, _rightGaze.Y);
 }
        //'##################################################################################################



        private void EyeTracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            Console.WriteLine(e.LeftEye.GazePoint.PositionOnDisplayArea.X);
            Console.WriteLine(e.LeftEye.GazePoint.PositionOnDisplayArea.Y);
            Console.WriteLine();
            // mTargetPoint =
            //mTimer.Enabled = true;
        }
Exemplo n.º 22
0
        private void _tracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            // Convert to centimeters
            const double D = 10.0;

            _leftPos.X = e.GazeDataItem.LeftEyePosition3D.X / D;
            _leftPos.Y = e.GazeDataItem.LeftEyePosition3D.Y / D;
            _leftPos.Z = e.GazeDataItem.LeftEyePosition3D.Z / D;

            _rightPos.X = e.GazeDataItem.RightEyePosition3D.X / D;
            _rightPos.Y = e.GazeDataItem.RightEyePosition3D.Y / D;
            _rightPos.Z = e.GazeDataItem.RightEyePosition3D.Z / D;

            _leftGaze.X = e.GazeDataItem.LeftGazePoint3D.X / D;
            _leftGaze.Y = e.GazeDataItem.LeftGazePoint3D.Y / D;
            _leftGaze.Z = e.GazeDataItem.LeftGazePoint3D.Z / D;

            _rightGaze.X = e.GazeDataItem.RightGazePoint3D.X / D;
            _rightGaze.Y = e.GazeDataItem.RightGazePoint3D.Y / D;
            _rightGaze.Z = e.GazeDataItem.RightGazePoint3D.Z / D;

            // Set which eyes to show
            _showLeft  = e.GazeDataItem.LeftValidity < 2;
            _showRight = e.GazeDataItem.RightValidity < 2;

            Action update = delegate()
            {
                if (_showLeft)
                {
                    _eyePair.LeftEyePosition = _leftPos;
                    _leftGazeVector.Point1   = _leftPos;
                    _leftGazeVector.Point2   = _leftGaze;
                }
                else
                {
                    Point3D farAway = new Point3D(1000.0, 1000.0, 1000.0);
                    _eyePair.LeftEyePosition = farAway;
                    _leftGazeVector.Point1   = farAway;
                    _leftGazeVector.Point2   = farAway;
                }

                if (_showRight)
                {
                    _eyePair.RightEyePosition = _rightPos;
                    _rightGazeVector.Point1   = _rightPos;
                    _rightGazeVector.Point2   = _rightGaze;
                }
                else
                {
                    Point3D farAway = new Point3D(1000.0, 1000.0, 1000.0);
                    _eyePair.RightEyePosition = farAway;
                    _rightGazeVector.Point1   = farAway;
                    _rightGazeVector.Point2   = farAway;
                }
            };

            Dispatcher.BeginInvoke(update);
        }
Exemplo n.º 23
0
 internal static void WriteRawGaze(this XmlWriter file, GazeDataEventArgs originalGaze)
 {
     file.WriteStartElement("OriginalGaze");
     file.WriteAttributeString("DeviceTimeStamp", originalGaze.DeviceTimeStamp.ToString());
     file.WriteAttributeString("SystemTimeStamp", originalGaze.SystemTimeStamp.ToString());
     file.WriteEyeData(originalGaze.LeftEye, "LeftEye");
     file.WriteEyeData(originalGaze.RightEye, "RightEye");
     file.WriteEndElement();
 }
Exemplo n.º 24
0
        private void MyEyeTracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            // Write the data to the console.
            double x = e.LeftEye.GazePoint.PositionOnDisplayArea.X;
            double y = e.LeftEye.GazePoint.PositionOnDisplayArea.Y;

            Console.WriteLine("Gaze point at ({0:0.0}, {1:0.0}) @{2:0}", x, y, e.SystemTimeStamp);
            linesGaze.Add(DateTime.Now.ToString("MM/dd/yyyy HH:mm:ss.FFF") + ";" + e.SystemTimeStamp + ";" + x * width + ";" + y * height + ";" + e.LeftEye.Pupil.PupilDiameter);
        }
Exemplo n.º 25
0
 /// <summary>
 /// Collects gaze point events
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e">Contains gaze data item</param>
 protected override void GazeDataReceivedSynchronized(object sender, GazeDataEventArgs e)
 {
     lock (this)
     {
         if (collectingData)
         {
             events.AddLast(new EyetrackerEvent(e.GazeDataItem));
         }
     }
 }
 // This method will be called on the main Unity thread
 private void HandleGazeData(GazeDataEventArgs e)
 {
     // Do something with gaze data
     // Debug.Log(string.Format(
     //     "Got gaze data with {0} left eye origin at point ({1}, {2}, {3}) in the user coordinate system.",
     //     e.LeftEye.GazeOrigin.Validity,
     //     e.LeftEye.GazeOrigin.PositionInUserCoordinates.X,
     //     e.LeftEye.GazeOrigin.PositionInUserCoordinates.Y,
     //     e.LeftEye.GazeOrigin.PositionInUserCoordinates.Z));
 }
Exemplo n.º 27
0
        private static void EyeTracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            int w = Screen.PrimaryScreen.Bounds.Width;
            int h = Screen.PrimaryScreen.Bounds.Height;

            Console.WriteLine(
                "Got gaze data with {0} left eye origin at point ({1}, {2}) in the user coordinate system.",
                e.LeftEye.GazePoint.Validity,
                e.LeftEye.GazePoint.PositionOnDisplayArea.X * w,
                e.LeftEye.GazePoint.PositionOnDisplayArea.Y * h);
        }
Exemplo n.º 28
0
            private static void EyeTracker_GazeDataReceived(object sender, GazeDataEventArgs e)
            {
                var local_timestamp = DateTimeOffset.Now.ToString("MM/dd/yyyy hh:mm:ss.fff").ToString();
                var UnixTimestamp   = new DateTimeOffset(DateTime.UtcNow).ToUnixTimeMilliseconds().ToString();

                var t_str = String.Format("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12}\r\n",
                                          e.LeftEye.GazeOrigin.Validity,
                                          e.LeftEye.GazeOrigin.PositionInUserCoordinates.X,
                                          e.LeftEye.GazeOrigin.PositionInUserCoordinates.Y,
                                          e.LeftEye.GazeOrigin.PositionInUserCoordinates.Z,
                                          e.LeftEye.GazePoint.PositionOnDisplayArea.X,
                                          e.LeftEye.GazePoint.PositionOnDisplayArea.Y,
                                          e.RightEye.GazePoint.PositionOnDisplayArea.X,
                                          e.RightEye.GazePoint.PositionOnDisplayArea.Y,
                                          e.LeftEye.Pupil.Validity,
                                          e.LeftEye.Pupil.PupilDiameter,
                                          e.RightEye.Pupil.PupilDiameter,
                                          UnixTimestamp,
                                          local_timestamp);



                //project eye tracking data to form image
                // eye tracking data: screen center point (0,0)
                m_eyegazex = e.LeftEye.GazePoint.PositionOnDisplayArea.X;
                m_eyegazey = e.LeftEye.GazePoint.PositionOnDisplayArea.Y;

                m_eyegazestr = String.Format("{0},{1},{2}",
                                             m_eyegazex,
                                             m_eyegazey,
                                             UnixTimestamp);


                //m_eyegazey = e.LeftEye.GazeOrigin.PositionInUserCoordinates.Y;

                /*
                 * (
                 * "Gaze data with {0} left eye origin at point ({1}, {2}, {3}) in the user coordinate system. TimeStamp: {4}",
                 * e.LeftEye.GazeOrigin.Validity,
                 * e.LeftEye.GazeOrigin.PositionInUserCoordinates.X,
                 * e.LeftEye.GazeOrigin.PositionInUserCoordinates.Y,
                 * e.LeftEye.GazeOrigin.PositionInUserCoordinates.Z,
                 * systemTimeStamp);
                 */

                //System.IO.File.WriteAllText(filename, t_str);
                if (eyetrackingrecordenabled)
                {
                    System.IO.File.AppendAllText(gazedatasavingpath, t_str);
                }
            }
Exemplo n.º 29
0
 private void HandleGazeData(object sender, GazeDataEventArgs gazeEvent)
 {
     try
     {
         if (gazeEvent.LeftEye.Pupil.Validity == Validity.Valid || gazeEvent.RightEye.Pupil.Validity == Validity.Valid)
         {
             GazeEventQueue.Enqueue(gazeEvent);
         }
     }
     catch (Exception exp)
     {
         Console.Write(exp);
     }
 }
Exemplo n.º 30
0
 private void _connectedTracker_GazeDataReceived(object sender, GazeDataEventArgs e)
 {
     if (s.Elapsed < TimeSpan.FromSeconds(5))
     {
         currentDataItem.GazeData.Add(e.GazeDataItem);
         currentDataItem.Timestamp.Add(DateTime.Now);
     }
     else
     {
         _connectedTracker.StopTracking();
         _isTracking = false;
         rc.Close();
     }
 }
Exemplo n.º 31
0
    void tracker_GazeDataReceived(object sender, GazeDataEventArgs e)
    {
        // Manages the data received by the eye tracker

        leftGaze_X = e.GazeDataItem.LeftGazePoint2D.X;
        leftGaze_Y = e.GazeDataItem.LeftGazePoint2D.Y;
        //leftGaze_Z = e.GazeDataItem.LeftGazePoint3D.Z;

        rightGaze_X = e.GazeDataItem.RightGazePoint2D.X;
        rightGaze_Y = e.GazeDataItem.RightGazePoint2D.Y;
        //rightGaze_Z = e.GazeDataItem.RightGazePoint3D.Z;

        left_pupil_diameter  = e.GazeDataItem.LeftPupilDiameter;
        right_pupil_diameter = e.GazeDataItem.RightPupilDiameter;
    }
Exemplo n.º 32
0
 private static void EyeTracker_GazeDataReceived(object sender, GazeDataEventArgs e)
 {
     if (e.LeftEye.GazePoint.PositionOnDisplayArea.X >= 0.49f && e.LeftEye.GazePoint.PositionOnDisplayArea.X <= 0.51f &&
         e.LeftEye.GazePoint.PositionOnDisplayArea.Y >= 0.49f && e.LeftEye.GazePoint.PositionOnDisplayArea.Y <= 0.51f)
     {
         Console.WriteLine(
             "Got gaze data with {0} left eye origin at point ({1}, {2}, ) in the user coordinate system.",
             e.LeftEye.GazePoint.Validity,
             e.LeftEye.GazePoint.PositionOnDisplayArea.X,
             e.LeftEye.GazePoint.PositionOnDisplayArea.Y
             );
         calibrationBegin2++;
         mEyeTracker.GazeDataReceived -= EyeTracker_GazeDataReceived;
     }
 }
Exemplo n.º 33
0
        private void EyeTrackerGazeData(object sender, GazeDataEventArgs e)
        {
            var gazeData = e.GazeData;

            var point1 = e.GazeData.Left.GazePointOnDisplayNormalized;
            var point2 = e.GazeData.Right.GazePointOnDisplayNormalized;
            var p1 = new Point((int)(point1.X * Screen.PrimaryScreen.WorkingArea.Width), (int)(point1.Y * Screen.PrimaryScreen.WorkingArea.Height));
            var p2 = new Point((int)(point2.X * Screen.PrimaryScreen.WorkingArea.Width), (int)(point2.Y * Screen.PrimaryScreen.WorkingArea.Height));

            //var p1 = new Point((int)gazeData.Left.GazePointOnDisplayNormalized.X, (int)gazeData.Left.GazePointOnDisplayNormalized.Y);
            p1 = (Point)this.boardControl.Invoke((Delegate)(Func<object>)(() => this.boardControl.PointToClient(p1)));
            p2 = (Point)this.boardControl.Invoke((Delegate)(Func<object>)(() => this.boardControl.PointToClient(p2)));

            //var p2 = new Point((int)gazeData.Right.GazePointOnDisplayNormalized.X, (int)gazeData.Right.GazePointOnDisplayNormalized.Y);
            //p2 = (Point)this.boardControl.Invoke((Delegate)(Func<object>)(() => this.boardControl.PointToClient(p2)));

            //System.Diagnostics.Debug.Write("\r"+ p1);

            this.OnDataAvailable(new EyeTrackerSensorData(p1, p2));
        }
Exemplo n.º 34
0
        private void OnGazeData(object sender, GazeDataEventArgs gazeDataEventArgs)
        {
            var gazeData = gazeDataEventArgs.GazeData;

            detectorDeOjos.dataReceived(gazeData.TrackingStatus);

            RaiseGazePoint(gazeData);
        }
Exemplo n.º 35
0
 /// <summary>
 /// Writes (X, Y) coordinate of gaze point to console if CPU and eyetracker clocks are synchronized.
 /// 
 /// Detailed explanation of synchronization available in Tobii SDK 3.0 Developer Guide.
 /// http://www.tobii.com/Global/Analysis/Downloads/User_Manuals_and_Guides/Tobii%20SDK%203.0%20Release%20Candidate%201%20Developers%20Guide.pdf
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 protected override void GazeDataReceivedSynchronized(object sender, GazeDataEventArgs e)
 {
     Print(e.GazeDataItem);
 }
Exemplo n.º 36
0
 /// <summary>
 /// Writes (X, Y) coordinate of gaze point to console if CPU and eyetracker clocks are synchronized.
 /// 
 /// Detailed explanation of synchronization available in Tobii SDK 3.0 Developer Guide.
 /// http://www.tobii.com/Global/Analysis/Downloads/User_Manuals_and_Guides/Tobii%20SDK%203.0%20Release%20Candidate%201%20Developers%20Guide.pdf
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 protected override void GazeDataReceivedSynchronized(object sender, GazeDataEventArgs e)
 {
     Console.WriteLine("GazeData - (" + e.GazeDataItem.LeftEyePosition3D.X + ", " + e.GazeDataItem.LeftEyePosition3D.Y + ")");
 }
Exemplo n.º 37
0
        private void _tracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            var gd = e.GazeDataItem;

            _leftGaze.X = gd.LeftGazePoint2D.X * 1920;
            _leftGaze.Y = gd.LeftGazePoint2D.Y * 1200;

            _rightGaze.X = gd.RightGazePoint2D.X * 1920;
            _rightGaze.Y = gd.RightGazePoint2D.Y * 1200;

            if (!GazeHelper.SetCurrentPoint(ref _current, _leftGaze, _rightGaze))
                return;

            _current = PointFromScreen(_current);

            if (GazeHaveMoved(_current) && _sw.ElapsedMilliseconds > 750)
            {
                if (IsGazeLeftSide() && _isSwipeAllowed)
                {
                    // SWIPE RIGHT ~>>~>>~>> (PREV)
                    Debug.WriteLine("Prev");

                    _isSwipeAllowed = false;

                    if (ImageContainer.SelectedIndex == 0)
                    {
                        ImageContainer.SelectedIndex = ImageContainer.Items.Count - 1;
                    }
                    else
                    {
                        ImageContainer.SelectedIndex--;
                    }
                    ImageContainer.RunSlideAnimation(ActualWidth);
                }
                else if (IsGazeRightSide() && _isSwipeAllowed)
                {
                    // SWIPE LEFT <<~<<~<<~ (NEXT)
                    Debug.WriteLine("Next");
                    _isSwipeAllowed = false;

                    if (ImageContainer.SelectedIndex == ImageContainer.Items.Count - 1)
                    {
                        ImageContainer.SelectedIndex = 0;
                    }
                    else
                    {
                        ImageContainer.SelectedIndex++;
                    }
                    ImageContainer.RunSlideAnimation(-ActualWidth);
                }

                _isSwipeAllowed = true;
                _previous = _current;

                Debug.WriteLine(_sw.ElapsedMilliseconds);
                _sw.Restart();
            }
        }
Exemplo n.º 38
0
        /// <summary>
        /// This is the function that handles the gaze data received from the tracker
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void tracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            // 'left' and 'right' are the eyes
            Point2D leftGaze = e.GazeDataItem.LeftGazePoint2D;
            Point2D rightGaze = e.GazeDataItem.RightGazePoint2D;

            if (leftGaze.X == -1)
                leftGaze = rightGaze;
            if (rightGaze.X == -1)
                rightGaze = leftGaze;

            Point gazePoint = new Point((leftGaze.X + rightGaze.X)/2, (leftGaze.Y + rightGaze.Y)/2);
            Point screenPoint = new Point(gazePoint.X*SystemParameters.PrimaryScreenWidth,
                                          gazePoint.Y*SystemParameters.PrimaryScreenHeight);
            Point clientPoint = window.PointFromScreen(screenPoint);

            Point smoothedPoint = smoother.Smooth(clientPoint);
            GazePoint = new Vector2((float) smoothedPoint.X, (float) smoothedPoint.Y);

            OnGazeDataReceived(this, new GazeEventArgs(GazePoint, e.GazeDataItem.LeftValidity, e.GazeDataItem.RightValidity));
        }
Exemplo n.º 39
0
    /// <summary>
    /// OnGazeData event handler for connected tracker.
    ///   This event fires whenever there are new gaze data
    ///   to receive.
    ///   It converts the interface internal gaze structure into
    ///   a OGAMA readable format and fires the <see cref="Tracker.OnGazeDataChanged"/>
    ///   event to the recorder.
    /// </summary>
    /// <param name="sender">
    /// Source of the event
    /// </param>
    /// <param name="e">
    /// The <see cref="GazeDataEventArgs"/> with the new gaze data
    ///   from the device.
    /// </param>
    private void ConnectedTrackerGazeDataReceived(object sender, GazeDataEventArgs e)
    {
      // Send the gaze data to the track status control.
      GazeDataItem gd = e.GazeDataItem;
      this.tobiiTrackStatus.OnGazeData(gd);
      if (this.dlgTrackStatus != null && this.dlgTrackStatus.Visible)
      {
        this.dlgTrackStatus.Update(gd);
      }

      // Convert Tobii gazestamp in milliseconds.
      var newGazeData = new GazeData { Time = gd.TimeStamp / 1000 };

      // The validity code takes one of five values for each eye ranging from 0 to 4, with the
      // following interpretation:
      // 0 - The eye tracker is certain that the data for this eye is right. There is no risk of
      // confusing data from the other eye.
      // 1 - The eye tracker has only recorded one eye, and has made some assumptions and
      // estimations regarding which is the left and which is the right eye. However, it is still
      // very likely that the assumption made is correct. The validity code for the other eye is
      // in this case always set to 3.
      // 2 - The eye tracker has only recorded one eye, and has no way of determining which
      // one is left eye and which one is right eye. The validity code for both eyes is set to 2.
      // 3 - The eye tracker is fairly confident that the actual gaze data belongs to the other
      // eye. The other eye will always have validity code 1.
      // 4 - The actual gaze data is missing or definitely belonging to the other eye.
      // Hence, there are a limited number of possible combinations of validity codes for the
      // two eyes:
      // Code Description
      // 0 - 0 Both eyes found. Data is valid for both eyes.
      // 0 - 4 or 4 - 0 One eye found. Gaze data is the same for both eyes.
      // 1 – 3 or 3 - 1 One eye found. Gaze data is the same for both eyes.
      // 2 – 2 One eye found. Gaze data is the same for both eyes.
      // 4 – 4 No eye found. Gaze data for both eyes are invalid.
      // Use data only if both left and right eye was found by the eye tracker
      // It is recommended that the validity codes are always used for data filtering, 
      // to remove data points which are obviously incorrect. 
      // Normally, we recommend removing all data points with a validity code of 2 or higher.
      if (gd.LeftValidity == 0 && gd.RightValidity == 0)
      {
        // Let the x, y and distance be the right and left eye average
        newGazeData.GazePosX = (float)((gd.LeftGazePoint2D.X + gd.RightGazePoint2D.X) / 2);
        newGazeData.GazePosY = (float)((gd.LeftGazePoint2D.Y + gd.RightGazePoint2D.Y) / 2);
        newGazeData.PupilDiaX = gd.LeftPupilDiameter;
        newGazeData.PupilDiaY = gd.RightPupilDiameter;
      }
      else if (gd.LeftValidity == 4 && gd.RightValidity == 4)
      {
        newGazeData.GazePosX = 0;
        newGazeData.GazePosY = 0;
        newGazeData.PupilDiaX = 0;
        newGazeData.PupilDiaY = 0;
      }
      else if (gd.LeftValidity == 2 && gd.RightValidity == 2)
      {
        newGazeData.GazePosX = 0;
        newGazeData.GazePosY = 0;
        newGazeData.PupilDiaX = 0;
        newGazeData.PupilDiaY = 0;
      }
      else if (gd.LeftValidity == 1 && gd.RightValidity == 3)
      {
        newGazeData.GazePosX = (float)gd.LeftGazePoint2D.X;
        newGazeData.GazePosY = (float)gd.LeftGazePoint2D.Y;
        newGazeData.PupilDiaX = gd.LeftPupilDiameter;
        newGazeData.PupilDiaY = null;
      }
      else if (gd.LeftValidity == 3 && gd.RightValidity == 1)
      {
        newGazeData.GazePosX = (float)gd.RightGazePoint2D.X;
        newGazeData.GazePosY = (float)gd.RightGazePoint2D.Y;
        newGazeData.PupilDiaX = null;
        newGazeData.PupilDiaY = gd.RightPupilDiameter;
      }
      else if (gd.LeftValidity == 0 && gd.RightValidity == 4)
      {
        newGazeData.GazePosX = (float)gd.LeftGazePoint2D.X;
        newGazeData.GazePosY = (float)gd.LeftGazePoint2D.Y;
        newGazeData.PupilDiaX = gd.LeftPupilDiameter;
        newGazeData.PupilDiaY = null;
      }
      else if (gd.LeftValidity == 4 && gd.RightValidity == 0)
      {
        newGazeData.GazePosX = (float)gd.RightGazePoint2D.X;
        newGazeData.GazePosY = (float)gd.RightGazePoint2D.Y;
        newGazeData.PupilDiaX = null;
        newGazeData.PupilDiaY = gd.RightPupilDiameter;
      }
      else
      {
        newGazeData.GazePosX = null;
        newGazeData.GazePosY = null;
        newGazeData.PupilDiaX = null;
        newGazeData.PupilDiaY = null;
      }

      this.OnGazeDataChanged(new GazeDataChangedEventArgs(newGazeData));
    }
Exemplo n.º 40
0
        private void _connectedTracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            int trigSignal;
            if (e.GazeDataItem.TryGetExtensionValue(IntegerExtensionValue.TrigSignal, out trigSignal))
            {
                Console.WriteLine(string.Format("Trig signal: {0}", trigSignal));
            }

            // Send the gaze data to the track status control.
            var gd = e.GazeDataItem;
            _trackStatus.OnGazeData(gd.LeftGazePoint2D, gd.RightGazePoint2D);
            if (_syncManager.CurrentSyncState.Status == SyncStatus.Synchronized)
            {
                Int64 convertedTime = _syncManager.RemoteToLocal(gd.Timestamp);
                Int64 localTime = _clock.Time;
            }
            else
            {
                Console.WriteLine("Warning. Sync state is " + _syncManager.CurrentSyncState.Status);
            }
        }
Exemplo n.º 41
0
        private void _connectedTracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            // Send the gaze data to the track status control.
            GazeDataItem gd = e.GazeDataItem;
            _trackStatus.OnGazeData(gd);

            if (_syncManager.SyncState.StateFlag == SyncStateFlag.Synchronized)
            {
                Int64 convertedTime = _syncManager.RemoteToLocal(gd.TimeStamp);
                Int64 localTime = _clock.GetTime();
            }
            else
            {
                Console.WriteLine("Warning. Sync state is " + _syncManager.SyncState.StateFlag);
            }
        }
Exemplo n.º 42
0
 /// <summary>
 /// Method to implement that handles the received eyetracker gaze data.
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e">Contains the gaze data item</param>
 protected abstract void GazeDataReceivedSynchronized(object sender, GazeDataEventArgs e);
Exemplo n.º 43
0
        private void _tracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            // Convert to centimeters
            const double D = 10.0;

            _leftPos.X = e.GazeDataItem.LeftEyePosition3D.X / D;
            _leftPos.Y = e.GazeDataItem.LeftEyePosition3D.Y / D;
            _leftPos.Z = e.GazeDataItem.LeftEyePosition3D.Z / D;

            _rightPos.X = e.GazeDataItem.RightEyePosition3D.X / D;
            _rightPos.Y = e.GazeDataItem.RightEyePosition3D.Y / D;
            _rightPos.Z = e.GazeDataItem.RightEyePosition3D.Z / D;

            _leftGaze.X = e.GazeDataItem.LeftGazePoint3D.X / D;
            _leftGaze.Y = e.GazeDataItem.LeftGazePoint3D.Y / D;
            _leftGaze.Z = e.GazeDataItem.LeftGazePoint3D.Z / D;

            _rightGaze.X = e.GazeDataItem.RightGazePoint3D.X / D;
            _rightGaze.Y = e.GazeDataItem.RightGazePoint3D.Y / D;
            _rightGaze.Z = e.GazeDataItem.RightGazePoint3D.Z / D;

            // Set which eyes to show
            _showLeft = e.GazeDataItem.LeftValidity < 2;
            _showRight = e.GazeDataItem.RightValidity < 2;

            Action update = delegate()
            {
                if (_showLeft)
                {
                    _eyePair.LeftEyePosition = _leftPos;
                    _leftGazeVector.Point1 = _leftPos;
                    _leftGazeVector.Point2 = _leftGaze;
                }
                else
                {
                    Point3D farAway = new Point3D(1000.0, 1000.0, 1000.0);
                    _eyePair.LeftEyePosition = farAway;
                    _leftGazeVector.Point1 = farAway;
                    _leftGazeVector.Point2 = farAway;
                }

                if (_showRight)
                {
                    _eyePair.RightEyePosition = _rightPos;
                    _rightGazeVector.Point1 = _rightPos;
                    _rightGazeVector.Point2 = _rightGaze;
                }
                else
                {
                    Point3D farAway = new Point3D(1000.0, 1000.0, 1000.0);
                    _eyePair.RightEyePosition = farAway;
                    _rightGazeVector.Point1 = farAway;
                    _rightGazeVector.Point2 = farAway;
                }

            };

            Dispatcher.BeginInvoke(update);
        }
Exemplo n.º 44
0
        private void _tracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            // Convert to centimeters
            var gd = e.GazeDataItem;

            _leftGaze.X = gd.LeftGazePoint2D.X * 1920;
            _leftGaze.Y = gd.LeftGazePoint2D.Y * 1200;

            _rightGaze.X = gd.RightGazePoint2D.X * 1920;
            _rightGaze.Y = gd.RightGazePoint2D.Y * 1200;

            if (!SetCurrentPoint(ref _current, _leftGaze, _rightGaze))
                return;
            _current = PointFromScreen(_current);

            if (GazeHaveMoved(_current))
            {
                MoveCursor();
            }
            if (_current.Y > Height * 0.72 || _current.Y < Height * 0.28)
            {
                var scrollSpeed = ScrollSpeed();
                ScrollAction(scrollSpeed);
            }

            _previous = _current;
        }
Exemplo n.º 45
0
        private void _tracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {

            var gd = e.GazeDataItem;

            //_leftGaze.X = gd.LeftGazePoint2D.X * Width;
            //_leftGaze.Y = gd.LeftGazePoint2D.Y * Height;

            //_rightGaze.X = gd.RightGazePoint2D.X * Width;
            //_rightGaze.Y = gd.RightGazePoint2D.Y * Height;

            _leftGaze.X = gd.LeftGazePoint2D.X * 1920;
            _leftGaze.Y = gd.LeftGazePoint2D.Y * 1200;

            _rightGaze.X = gd.RightGazePoint2D.X * 1920;
            _rightGaze.Y = gd.RightGazePoint2D.Y * 1200;

            if (!GazeHelper.SetCurrentPoint(ref _current, _leftGaze, _rightGaze))
                return;

            _current = PointFromScreen(_current);


            _headPos.Z = gd.LeftEyePosition3D.Z / 10;

            switch (windowType)
            {
                case WindowType.Map:
                    var zoomFactor = _headPos.Z - _initialHeadPos.Z;
                    MapControll.MapInteraction(_zoomActionButtonDown, _current, zoomFactor);
                    break;
                case WindowType.Swipe:

                    SwipeControl.MapInteraction(_current);
                    break;
                case WindowType.Scroll:
                    ScrollControl.MapInteraction(_current);
                    break;
            }
        }
Exemplo n.º 46
0
    void tracker_GazeDataReceived(object sender, GazeDataEventArgs e)
    {
        // Manages the data received by the eye tracker

        leftGaze_X = e.GazeDataItem.LeftGazePoint2D.X;
        leftGaze_Y = e.GazeDataItem.LeftGazePoint2D.Y;
        //leftGaze_Z = e.GazeDataItem.LeftGazePoint3D.Z;

        rightGaze_X = e.GazeDataItem.RightGazePoint2D.X;
        rightGaze_Y = e.GazeDataItem.RightGazePoint2D.Y;
        //rightGaze_Z = e.GazeDataItem.RightGazePoint3D.Z;

        left_pupil_diameter = e.GazeDataItem.LeftPupilDiameter;
        right_pupil_diameter = e.GazeDataItem.RightPupilDiameter;
    }
Exemplo n.º 47
0
 /// <summary>
 /// Handles a gaze data received from eyetracker event
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e">Contains the GazeDataItem</param>
 public abstract void GazeDataReceived(object sender, GazeDataEventArgs e);
Exemplo n.º 48
0
        /// <summary>
        /// Forwards 2D gaze points to fixation detector. Gaze points are only forwarded
        /// if CPU and Eyetracker clocks are synchronized.
        /// 
        /// Detailed explanation of synchronization available in Tobii SDK 3.0 Developer Guide.
        /// http://www.tobii.com/Global/Analysis/Downloads/User_Manuals_and_Guides/Tobii%20SDK%203.0%20Release%20Candidate%201%20Developers%20Guide.pdf
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        protected override void GazeDataReceivedSynchronized(object sender, GazeDataEventArgs e)
        {
            int time = (int)syncManager.RemoteToLocal(e.GazeDataItem.TimeStamp);
            int x = (int)e.GazeDataItem.LeftGazePoint3D.X; // TODO Determine which of LeftGazePoint2D, LeftGazePoint3D, RightGazePoint2D, RightGazePoint3D or combination thereof is same as Tobii Studio
            int y = (int)e.GazeDataItem.LeftGazePoint3D.Y;
            fixationDetector.addPoint(time, x, y);

            // testing
            /*GazeDataItem gdi = e.GazeDataItem;
            Console.WriteLine("GazeDataItem:\r\n" +
                "LeftEyePosition3D         (" + gdi.LeftEyePosition3D.X + ", " + gdi.LeftEyePosition3D.Y + ", " + gdi.LeftEyePosition3D.Z + ")\r\n" +
                "LeftEyePosition3DRelative (" + gdi.LeftEyePosition3DRelative.X + ", " + gdi.LeftEyePosition3DRelative.Y + ", " + gdi.LeftEyePosition3DRelative.Z + ")\r\n" +
                "LeftGazePoint2D           (" + gdi.LeftGazePoint2D.X + ", " + gdi.LeftGazePoint2D.Y + ")\r\n" +
                "LeftGazePoint3D           (" + gdi.LeftGazePoint3D.X + ", " + gdi.LeftGazePoint3D.Y + ", " + gdi.LeftGazePoint3D.Z + ")\r\n" +
                "RightEyePosition3D         (" + gdi.RightEyePosition3D.X + ", " + gdi.RightEyePosition3D.Y + ", " + gdi.RightEyePosition3D.Z + ")\r\n" +
                "RightEyePosition3DRelative (" + gdi.RightEyePosition3DRelative.X + ", " + gdi.RightEyePosition3DRelative.Y + ", " + gdi.RightEyePosition3DRelative.Z + ")\r\n" +
                "RightGazePoint2D           (" + gdi.RightGazePoint2D.X + ", " + gdi.RightGazePoint2D.Y + ")\r\n" +
                "RightGazePoint3D           (" + gdi.RightGazePoint3D.X + ", " + gdi.RightGazePoint3D.Y + ", " + gdi.RightGazePoint3D.Z + ")");
            count++;*/
        }
Exemplo n.º 49
0
        private void et_GazeDataRecieved(object sender, GazeDataEventArgs e)
        {
            if (e.GazeDataItem.RightValidity != BEST_VALIDITY && e.GazeDataItem.LeftValidity != BEST_VALIDITY)
            {
                return;
            }

            GetPoint point = Henshin(e.GazeDataItem);
            point.Timestamp = e.GazeDataItem.Timestamp;

            if (EtEventHandlers != null)
                EtEventHandlers(point);
        }
Exemplo n.º 50
0
        private void _tracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            // Convert to centimeters
            var gd = e.GazeDataItem;

            _leftGaze.X = gd.LeftGazePoint2D.X* Width;
            _leftGaze.Y = gd.LeftGazePoint2D.Y * Height;

            _rightGaze.X = gd.RightGazePoint2D.X * Width;
            _rightGaze.Y = gd.RightGazePoint2D.Y * Height;

            _headPos.Z = gd.LeftEyePosition3D.Z / 10;

            if ((_leftGaze.X < 0 && _rightGaze.X < 0 )|| _headPos.Z < 0) return;
            if (!SetCurrentPoint(ref _current, _leftGaze, _rightGaze))
                return;
            if ((_current.X > 1400 || _current.X < 500 || _current.Y > 700 || _current.Y < 500))
            {
                EyeMoveDuringAction();
            }
            if (HeadHaveMoved(_initialHeadPos.Z) && actionButtonDown)
            {
                var zoomFactor = _headPos.Z - _initialHeadPos.Z;
                zoom_event(zoomFactor);
            }
        }
Exemplo n.º 51
0
 protected override void GazeDataReceivedSynchronized(object sender, GazeDataEventArgs e)
 {
     lock (this)
     {
         if (collectingData)
         {
             gazePoints.AddLast(e.GazeDataItem);
         }
     }
 }
Exemplo n.º 52
0
 internal void EyeTrackerGazeData(object sender, GazeDataEventArgs e)
 {
     Dispatcher.Invoke(
         System.Windows.Threading.DispatcherPriority.Normal, new Action(delegate() { DisplayGaze(e.GazeData); }));
 }
Exemplo n.º 53
0
        private void _tracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            // Convert to centimeters
            var gd = e.GazeDataItem;

            _leftGaze.X = gd.LeftGazePoint2D.X*Width;
            _leftGaze.Y = gd.LeftGazePoint2D.Y*Height;

            _rightGaze.X = gd.RightGazePoint2D.X*Width;
            _rightGaze.Y = gd.RightGazePoint2D.Y*Height;

            if (_leftGaze.X < 0 && _rightGaze.X < 0) return;
            if (_leftGaze.X > 0 && _rightGaze.X > 0)
            {
                _current = new Point2D((_leftGaze.X + _rightGaze.X) / 2, (_leftGaze.Y + _rightGaze.Y) / 2);
            }
            else if (_rightGaze.X > 0)
            {
                _current = new Point2D(_rightGaze.X, _rightGaze.Y);
            }
            else if (_leftGaze.X > 0)
            {
                _current = new Point2D(_leftGaze.X, _leftGaze.Y);
            }
            if (!StaticValues.developerMode)
            {
                SaveData(_current, gd.RightEyePosition3D.Z/10);
            }
            if (GazeHaveMoved(_current))
            {
                Point = new Point(_current.X, _current.Y);
                _previous = _current;
            }
            InvalidateVisual();
        }
Exemplo n.º 54
0
        /// <summary>
        /// Forwards 2D gaze points to fixation detector. Gaze points are only forwarded
        /// if CPU and Eyetracker clocks are synchronized and validity is &lt 2. If both eyes are valid gaze point coordinates are averaged,
        /// if only one eye is valid, only that eye's gaze point is used.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e">GazeDataItem to process</param>
        protected override void GazeDataReceivedSynchronized(object sender, GazeDataEventArgs e)
        {
            // ignore gaze data with low validity
            if (e.GazeDataItem.LeftValidity < 2 || e.GazeDataItem.RightValidity < 2)
            {
                // convert timestamp
                long microseconds = e.GazeDataItem.TimeStamp;
                int milliseconds = (int)(microseconds / 1000);
                milliseconds -= getTimestampOffset(milliseconds);
                int time = milliseconds;
                if (((microseconds / 100) % 10) >= 5) time++; // round

                // convert normalized screen coordinates (float between [0 - 1]) to pixel coordinates
                // coordinates (0, 0) designate the top left corner
                double leftX = e.GazeDataItem.LeftGazePoint2D.X * SCREEN_WIDTH;
                double leftY = e.GazeDataItem.LeftGazePoint2D.Y * SCREEN_HEIGHT;
                double rightX = e.GazeDataItem.RightGazePoint2D.X * SCREEN_WIDTH;
                double rightY = e.GazeDataItem.RightGazePoint2D.Y * SCREEN_HEIGHT;

                if (e.GazeDataItem.LeftValidity < 2 && e.GazeDataItem.RightValidity < 2)
                {
                    // average left and right eyes
                    int x = (int)((leftX + rightX) / 2);
                    int y = (int)((leftY + rightY) / 2);
                    fixationDetector.addPoint(time, x, y);
                }
                else if (e.GazeDataItem.LeftValidity < 2)
                {
                    // use only left eye
                    fixationDetector.addPoint(time, (int)leftX, (int)leftY);
                }
                else if (e.GazeDataItem.RightValidity < 2)
                {
                    // use only right eye
                    fixationDetector.addPoint(time, (int)rightX, (int)rightY);
                }
            }
        }
Exemplo n.º 55
0
        private void tracker_GazeDataReceived(object sender, GazeDataEventArgs e)
        {
            // Send the gaze data to the track status control.
            GazeDataItem gd = e.GazeDataItem;
            Point2D leftGaze= e.GazeDataItem.LeftGazePoint2D;
            Point2D rightGaze = e.GazeDataItem.RightGazePoint2D;

            Point gazePoint = new Point((leftGaze.X + rightGaze.X) / 2, (leftGaze.Y + rightGaze.Y) / 2);
            Vector delta = Point.Subtract(gazePoint, previousGazePoint);

            //gazePoint = new Point((gazePoint.X + previousGazePoint.X) / 2, (gazePoint.Y + previousGazePoint.Y / 2));

            previousGazePoint = gazePoint;

            //if (delta.LengthSquared < 0.001)
            //    return;

            Point screenPoint = new Point(gazePoint.X * SystemParameters.PrimaryScreenWidth, gazePoint.Y * SystemParameters.PrimaryScreenHeight);

            if (syncManager.SyncState.StateFlag == SyncStateFlag.Synchronized)
            {
                Int64 convertedTime = syncManager.RemoteToLocal(gd.TimeStamp);
                Int64 localTime = clock.GetTime();
                Point clientPoint = canvas.PointFromScreen(new Point(screenPoint.X - crosshair.Width/2, screenPoint.Y - crosshair.Height/2)) ;
                Canvas.SetLeft(crosshair, clientPoint.X);
                Canvas.SetTop(crosshair, clientPoint.Y);
            }
            else
            {
                StatusText.Text = string.Format("Warning. Sync state is {0}", syncManager.SyncState.StateFlag);
            }
        }