Пример #1
0
 public void ApplyGesturePolicy(GazeData data, List <GazeData> eventsToBroadcast)
 {
     if (_LastGazeData.GazeTarget == null && data.GazeTarget == null)
     {
         PolicyDoesNothingWithSameTarget();
     }
     else if (_LastGazeData.GazeTarget == null && data.GazeTarget != null)
     {
         var enterData = NewTarget(data, eventsToBroadcast);
         ChangeFocusTo(enterData);
     }
     else if (_LastGazeData.GazeTarget != null && data.GazeTarget == null)
     {
         var emptyData = AbandonedTarget(data, eventsToBroadcast);
         ChangeFocusTo(emptyData);
     }
     else if (data.GazeTarget != _LastGazeData.GazeTarget)
     {
         var enterData = DifferentTarget(data, eventsToBroadcast);
         ChangeFocusTo(enterData);
     }
     else if (data.GazeTarget == _LastGazeData.GazeTarget)
     {
         PolicyDoesNothingWithSameTarget();
     }
 }
Пример #2
0
        private GazeData AbandonedTarget(GazeData data, List <GazeData> eventsToBroadcast)
        {
            var info = CreateExitData(_LastGazeData);

            eventsToBroadcast.Add(info);
            return(default(GazeData));
        }
Пример #3
0
 private void DisplayGaze(GazeData data)
 {
     if (this.IsLoaded)
     {
         if (lastDrawedLeft != null)
             canvas.Children.Remove(lastDrawedLeft);
         if (lastDrawedRight != null)
             canvas.Children.Remove(lastDrawedRight);
         switch (data.TrackingStatus)
         {
             case TrackingStatus.BothEyesTracked:
                 DisplayEyeLeft(data.Left);
                 DisplayEyeRight(data.Right);
                 break;
             case TrackingStatus.OnlyLeftEyeTracked:
             case TrackingStatus.OneEyeTrackedProbablyLeft:
                 DisplayEyeLeft(data.Left);
                 break;
             case TrackingStatus.OnlyRightEyeTracked:
             case TrackingStatus.OneEyeTrackedProbablyRight:
                 DisplayEyeRight(data.Right);
                 break;
         }
     }
 }
Пример #4
0
        public void OnGazeUpdate(GazeData gazeData)
        {
            bool Smooth = true;

            var x = this.Bounds.X;
            var y = this.Bounds.Y;

            var gX = Smooth ? gazeData.SmoothedCoordinates.X : gazeData.RawCoordinates.X;
            var gY = Smooth ? gazeData.SmoothedCoordinates.Y : gazeData.RawCoordinates.Y;

            screenX = (int)Math.Round(x + gX, 0);
            screenY = (int)Math.Round(y + gY, 0);

            m_RawX = gazeData.RawCoordinates.X;
            m_RawY = gazeData.RawCoordinates.Y;

            m_SmoothedX = gazeData.SmoothedCoordinates.X;
            m_SmoothedY = gazeData.SmoothedCoordinates.Y;

            // return in case of 0,0
            if (screenX == 0 && screenY == 0)
            {
                return;
            }

            // Write the Updated gaze Data
            writeGazeData();

            Win32.SetCursorPos((int)m_SmoothedX, (int)m_SmoothedY);
        }
Пример #5
0
    public void OnGazeUpdate(GazeData gazeData)
    {
        if (CooldownTimer > 0f)
        {
            return;
        }

        if ((gazeData.State & (GazeData.STATE_TRACKING_FAIL | GazeData.STATE_TRACKING_LOST)) > 0)
        {
            Blinked = true;
        }

        Point2D point = UnityGazeUtils.getGazeCoordsToUnityWindowCoords(gazeData.SmoothedCoordinates);

        if ((gazeData.State & GazeData.STATE_TRACKING_GAZE) > 0)
        {
            if (Blinked)
            {
                CooldownTimer = BlinkCooldown;
                Blinked       = false;
            }
            else
            {
                gazeX = (float)point.X;
                gazeY = (float)point.Y;
            }
        }
    }
Пример #6
0
        /// <summary>
        /// Extracts the tracker data.
        /// </summary>
        /// <param name="data">The data.</param>
        /// <returns></returns>
        public GazeData ExtractTrackerData(EyeTrackingController.SampleStruct data)
        {
            GazeData result = new GazeData();

            float gazePosXLeft  = Convert.ToSingle(data.leftEye.gazeX);
            float gazePosXRight = Convert.ToSingle(data.rightEye.gazeX);
            float gazePosYLeft  = Convert.ToSingle(data.leftEye.gazeY);
            float gazePosYRight = Convert.ToSingle(data.rightEye.gazeY);

            result.GazePosX = (gazePosXLeft + gazePosXRight) / 2;
            result.GazePosY = (gazePosYLeft + gazePosYRight) / 2;

            if (result.GazePosX > 1000 * 10)
            {
                result.GazePosX = result.GazePosX / 1000;
            }
            if (result.GazePosY > 1000 * 10)
            {
                result.GazePosY = result.GazePosY / 1000;
            }

            long MICROSECONDS = 1000;

            result.Time   = (data.timestamp / MICROSECONDS);
            this.lastTime = result.Time;

            result.PupilDiaX = Convert.ToSingle(data.leftEye.diam);
            result.PupilDiaY = Convert.ToSingle(data.rightEye.diam);

            return(result);
        }
Пример #7
0
        private bool DetermineBlink(GazeData gazeData, int historyLenght, int minDuration)
        {
            // Add sample to log (lazy init and pop.)
            if (sampleLog == null)
            {
                sampleLog = new List <GazeData>(LOG_LENGTH);

                GazeData dummy = new GazeData();
                dummy.LeftEye.PupilSize  = 10;
                dummy.RightEye.PupilSize = 10;

                for (int i = 0; i < LOG_LENGTH; i++)
                {
                    sampleLog.Add(dummy);
                }
            }

            sampleLog.Add(gazeData);
            sampleLog.RemoveAt(0);

            bool leftBlinkFound  = FindBlinkEvent(historyLenght, true, minDuration);
            bool rightBlinkFound = FindBlinkEvent(historyLenght, false, minDuration);

            if (leftBlinkFound && rightBlinkFound)
            {
                return(true);
            }
            else
            {
                return(false);
            }
        }
        private void DisplayGaze(GazeData data)
        {
            if (this.IsLoaded)
            {
                if (lastDrawedLeft != null)
                {
                    canvas.Children.Remove(lastDrawedLeft);
                }
                if (lastDrawedRight != null)
                {
                    canvas.Children.Remove(lastDrawedRight);
                }
                switch (data.TrackingStatus)
                {
                case TrackingStatus.BothEyesTracked:
                    DisplayEyeLeft(data.Left);
                    DisplayEyeRight(data.Right);
                    break;

                case TrackingStatus.OnlyLeftEyeTracked:
                case TrackingStatus.OneEyeTrackedProbablyLeft:
                    DisplayEyeLeft(data.Left);
                    break;

                case TrackingStatus.OnlyRightEyeTracked:
                case TrackingStatus.OneEyeTrackedProbablyRight:
                    DisplayEyeRight(data.Right);
                    break;
                }
            }
        }
    public void Upload(GazeData data)
    {
        WWWForm form = new WWWForm();

        form.AddField("PlayerID", data.playerID);
        form.AddField("ObjectID", data.objectID);
        form.AddField("Time", data.time);
        form.AddField("TimeEnter", data.timeEnter);
        form.AddField("TimeExit", data.timeExit);
        form.AddField("UserPosition", data.userPosition);
        form.AddField("BoundaryPoint", data.userDirection);
        form.AddField("ObjectPoint", data.objectPoint);
        form.AddField("ObjectDistance", data.objectDistance);

        UnityWebRequest www = UnityWebRequest.Post("http://localhost:8084/VisualAnalytics/VisualAnalyticsServlet", form);

        www.Send();

        if (www.isError)
        {
            Debug.Log(www.error);
        }
        else
        {
            Debug.Log("Form upload complete!");
        }
    }
Пример #10
0
        public void OnGazeUpdate(GazeData gazeData)
        {
            //Get Gaze data
            double gX = gazeData.SmoothedCoordinates.X;
            double gY = gazeData.SmoothedCoordinates.Y;

            //Smoothen data for more meaningful results.
            double eX = roundNumber(gX, GAZE_ROUND) - OFF_X;
            double eY = roundNumber(gY, GAZE_ROUND) - OFF_Y;

            //prevent negative values
            if (eX < 0)
            {
                eX = 0;
            }
            if (eY < 0)
            {
                eY = 0;
            }

            eyePoint.X = eX;
            eyePoint.Y = eY;

            eyeOnLine = (int)(Math.Round(eY / view.LineHeight) + 1);
        }
Пример #11
0
        public void OnGazeUpdate(GazeData gazeData)
        {
            if (Dispatcher.CheckAccess() == false)
            {
                Dispatcher.BeginInvoke(new Action(() => OnGazeUpdate(gazeData)));
                return;
            }

            // Start or stop tracking lost animation
            if ((gazeData.State & GazeData.STATE_TRACKING_GAZE) == 0 &&
                (gazeData.State & GazeData.STATE_TRACKING_PRESENCE) == 0)
            {
                return;
            }
            //Tracking coordinates
            var d = Utility.Instance.ScaleDpi;
            var x = Utility.Instance.RecordingPosition.X;
            var y = Utility.Instance.RecordingPosition.Y;

            //var gX = gazeData.RawCoordinates.X;
            //var gY = gazeData.RawCoordinates.Y;

            var gX = gazeData.SmoothedCoordinates.X;
            var gY = gazeData.SmoothedCoordinates.Y;

            Left = d * x + d * gX - Width / 2;
            Top  = d * y + d * gY - Height / 2;
        }
Пример #12
0
        /// <summary>
        /// Is called when new gaze data is available, updates track status and raises OnGazeDataChanged
        /// </summary>
        /// <param name="sender">The Sender.</param>
        /// <param name="e">Gaze data.</param>
        private void SmartEyeGazeDataAvailable(object sender, GazeDataReceivedEventArgs e)
        {
            // Send the gaze data to the track status control.
            SmartEyeGazeData gd = e.Gazedata;

            this.smartEyeTrackStatus.OnGazeData(gd);
            if (this.dlgTrackStatus != null && this.dlgTrackStatus.Visible)
            {
                this.dlgTrackStatus.Update(gd);
            }

            GazeData newGD = new GazeData();

            newGD.Time = gd.Time;

            if (gd.HeadQuality >= 1 && gd.GazeQuality > this.smartEyeSettings.QualityThreshold) // cut off bad quality data
            {
                newGD.GazePosX  = gd.GazePosX;
                newGD.GazePosY  = gd.GazePosY;
                newGD.PupilDiaX = gd.PupilDiaX;
                newGD.PupilDiaY = gd.PupilDiaY;
            }
            else
            {
                newGD.GazePosX  = null;
                newGD.GazePosY  = null;
                newGD.PupilDiaX = null;
                newGD.PupilDiaY = null;
            }

            this.OnGazeDataChanged(new GazeDataChangedEventArgs(newGD));
        }
Пример #13
0
 public void LogGazeDataAsCsv(GazeData gazeData, long normalizedTimestamp)
 {
     lock (_fileLock)
     {
         var s = String.Format("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},{19},{20},{21},{22},{23},{24}",
                               gazeData.Timestamp.ToString(),
                               normalizedTimestamp.ToString(),
                               gazeData.TrackingStatus.ToString(),
                               gazeData.Left.EyePositionFromEyeTrackerMM.X.ToString(),
                               gazeData.Left.EyePositionFromEyeTrackerMM.Y.ToString(),
                               gazeData.Left.EyePositionFromEyeTrackerMM.Z.ToString(),
                               gazeData.Left.EyePositionInTrackBoxNormalized.X.ToString(),
                               gazeData.Left.EyePositionInTrackBoxNormalized.Y.ToString(),
                               gazeData.Left.EyePositionInTrackBoxNormalized.Z.ToString(),
                               gazeData.Left.GazePointFromEyeTrackerMM.X.ToString(),
                               gazeData.Left.GazePointFromEyeTrackerMM.Y.ToString(),
                               gazeData.Left.GazePointFromEyeTrackerMM.Z.ToString(),
                               gazeData.Left.GazePointOnDisplayNormalized.X.ToString(),
                               gazeData.Left.GazePointOnDisplayNormalized.Y.ToString(),
                               gazeData.Right.EyePositionFromEyeTrackerMM.X.ToString(),
                               gazeData.Right.EyePositionFromEyeTrackerMM.Y.ToString(),
                               gazeData.Right.EyePositionFromEyeTrackerMM.Z.ToString(),
                               gazeData.Right.EyePositionInTrackBoxNormalized.X.ToString(),
                               gazeData.Right.EyePositionInTrackBoxNormalized.Y.ToString(),
                               gazeData.Right.EyePositionInTrackBoxNormalized.Z.ToString(),
                               gazeData.Right.GazePointFromEyeTrackerMM.X.ToString(),
                               gazeData.Right.GazePointFromEyeTrackerMM.Y.ToString(),
                               gazeData.Right.GazePointFromEyeTrackerMM.Z.ToString(),
                               gazeData.Right.GazePointOnDisplayNormalized.X.ToString(),
                               gazeData.Right.GazePointOnDisplayNormalized.Y.ToString());
         _writer.WriteLine(s);
     }
 }
Пример #14
0
 public void StopTimer()
 {
     CursorTriggered = false;
     //set the GazeObject
     GazeData.setGazeById(pid, visited, ttff.ToString(), timespent, fixation, revisitor);
     timespent = 0;
 }
Пример #15
0
        public void OnGazeUpdate(GazeData gazeData)
        {
            // start or stop tracking lost animation
            if ((gazeData.State & GazeData.STATE_TRACKING_GAZE) == 0 &&
                (gazeData.State & GazeData.STATE_TRACKING_PRESENCE) == 0)
            {
                return;
            }
            var x = (int)Math.Round(gazeData.SmoothedCoordinates.X, 0);
            var y = (int)Math.Round(gazeData.SmoothedCoordinates.Y, 0);

            //var gX = Smooth ? gazeData.SmoothedCoordinates.X : gazeData.RawCoordinates.X;
            //var gY = Smooth ? gazeData.SmoothedCoordinates.Y : gazeData.RawCoordinates.Y;
            //var screenX = (int)Math.Round(x + gX, 0);
            //var screenY = (int)Math.Round(y + gY, 0);
            // Debug.WriteLine("OnGazeUpdate       " + x + "    " + y);

            // return in case of 0,0
            if (x == 0 && y == 0)
            {
                return;
            }

            determine_Button(x, y);
        }
Пример #16
0
        public GazeDataProccessed(GazeData gazeData)
        {
            if (gazeData.TrackingStatus == TrackingStatus.BothEyesTracked)
            {
                Vector3D gazeVectorRight = new Vector3D(gazeData.Right.EyePositionFromEyeTrackerMM, gazeData.Right.GazePointFromEyeTrackerMM);
                Vector3D gazeVectorLeft  = new Vector3D(gazeData.Left.EyePositionFromEyeTrackerMM, gazeData.Left.GazePointFromEyeTrackerMM);

                angleAtDistance = Vector3D.Angle(gazeVectorLeft, gazeVectorRight);
                angleAtDistance = 180 * angleAtDistance / Math.PI;

                Vector3D eyeAxis = new Vector3D(gazeData.Left.EyePositionFromEyeTrackerMM, gazeData.Right.EyePositionFromEyeTrackerMM);

                if (Vector3D.Angle(eyeAxis, gazeVectorRight) > Vector3D.Angle(eyeAxis, gazeVectorLeft))
                {
                    typeAtDistance = "eso";
                }
                else if (Vector3D.Angle(eyeAxis, gazeVectorRight) < Vector3D.Angle(eyeAxis, gazeVectorLeft))
                {
                    typeAtDistance = "exo";
                }

                Vector3D gazeVectorLeftIdeal = new Vector3D(gazeData.Left.EyePositionFromEyeTrackerMM, gazeData.Right.GazePointFromEyeTrackerMM);
                angleAtScreen = Vector3D.Angle(gazeVectorLeft, gazeVectorLeftIdeal);
                angleAtScreen = 180 * angleAtScreen / Math.PI;

                if (Vector3D.Angle(eyeAxis, gazeVectorLeftIdeal) > Vector3D.Angle(eyeAxis, gazeVectorLeft))
                {
                    typeAtScreen = "eso";
                }
                else if (Vector3D.Angle(eyeAxis, gazeVectorRight) < Vector3D.Angle(eyeAxis, gazeVectorLeft))
                {
                    typeAtScreen = "exo";
                }
            }
        }
Пример #17
0
        static void Main(string[] args)
        {
            //load a file
            //string namefile = @"C:\Users\Olivier\Documents\GitHub\eyex\RecordingEyeGaze\RecordingEyeGaze\bin\Debug\readingNature_screenshots\readingNaturep1-913words";
            ////string namefile = @"C:\Users\Olivier\Documents\GitHub\eyex\RecordingEyeGaze\RecordingEyeGaze\bin\Debug\notreading_screenshots\notreading";

            //GazeData gd = new GazeData(namefile);
            //gd.gazes = GazeData.fixationBusher2008(gd.gazes);
            //List<bool> reading = scoreReading1(gd.gazes);

            string folder = @"C:\Users\Olivier\Documents\GitHub\eyex\RecordingEyeGaze\ReadingVSNotReading\bin\Debug\verticalJapanese";

            //string folder = @"C:\Users\Olivier\Documents\GitHub\eyex\RecordingEyeGaze\ReadingVSNotReading\bin\Debug\English";
            string[] files = Directory.GetFiles(folder);
            foreach (var file in files)
            {
                Console.WriteLine(file);
                GazeData gd = new GazeData(file);
                gd.gazes = GazeData.fixationBusher2008(gd.gazes);
                List <int> reading = scoreReading2(gd.gazes, 5, 20);
                double     average = reading.Average();
                Console.WriteLine(average);
                Console.WriteLine("\n");

                //If reading, apply the wordometer
                //if (average > 0.5)
                //{
                //    gd.lines = GazeData.lineBreakDetectionSimple(ref gd.gazes);
                //    int nbWords = (int)(gd.TotalLinesLength / 70);
                //    Console.WriteLine("Nb of words = " + nbWords);
                //}
            }
            Console.WriteLine("\n\nPress a key to exit...");
            Console.ReadLine();
        }
Пример #18
0
        public void OnGazeUpdate(GazeData gazeData)
        {
            if (!Enabled)
            {
                return;
            }

            // start or stop tracking lost animation
            if ((gazeData.State & GazeData.STATE_TRACKING_GAZE) == 0 &&
                (gazeData.State & GazeData.STATE_TRACKING_PRESENCE) == 0)
            {
                return;
            }

            // tracking coordinates
            var x       = ActiveScreen.Bounds.X;
            var y       = ActiveScreen.Bounds.Y;
            var gX      = Smooth ? gazeData.SmoothedCoordinates.X : gazeData.RawCoordinates.X;
            var gY      = Smooth ? gazeData.SmoothedCoordinates.Y : gazeData.RawCoordinates.Y;
            var screenX = (int)Math.Round(x + gX, 0);
            var screenY = (int)Math.Round(y + gY, 0);

            // return in case of 0,0
            if (screenX == 0 && screenY == 0)
            {
                return;
            }

            NativeMethods.SetCursorPos(screenX, screenY);
        }
Пример #19
0
        private void ProcessSample(GazeData gazeData)
        {
            var right = PointF.Empty;
            var left  = PointF.Empty;

            if ((gazeData.State & GazeData.STATE_TRACKING_EYES) != 0 || (gazeData.State & GazeData.STATE_TRACKING_PRESENCE) != 0)
            {
                if (gazeData.LeftEye.PupilCenterCoordinates.X != 0 && gazeData.LeftEye.PupilCenterCoordinates.Y != 0)
                {
                    left.X = (float)gazeData.LeftEye.PupilCenterCoordinates.X;
                    left.Y = (float)gazeData.LeftEye.PupilCenterCoordinates.Y;
                }

                if (gazeData.RightEye.PupilCenterCoordinates.X != 0 && gazeData.RightEye.PupilCenterCoordinates.Y != 0)
                {
                    right.X = (float)gazeData.RightEye.PupilCenterCoordinates.X;
                    right.Y = (float)gazeData.RightEye.PupilCenterCoordinates.Y;
                }
            }

            // create a new trackbox sample and enqueue it
            CurrentTrackboxObj = new TrackBoxObject
            {
                Left          = left,
                Right         = right,
                LeftValidity  = left != PointF.Empty ? EyeCount.One : EyeCount.Zero,
                RightValidity = right != PointF.Empty ? EyeCount.One : EyeCount.Zero
            };
            EnqueueTrackBoxObject(CurrentTrackboxObj);
        }
Пример #20
0
    }     //end function

    //IGazeListener onGazeUpdate used to receive tracking data
    //called each time new gaze data is sampled
    public void OnGazeUpdate(GazeData theData)
    {
        //update the latest gaze data
        gazeValidator.UpdateData(theData);

        //get data for eyes
        //for positioning based on eye/head position
        Eye eyeL = gazeValidator.getPrevEyeL();
        Eye eyeR = gazeValidator.getPrevEyeR();

        //get center point for eyes
        if (eyeL != null && eyeR != null)
        {
            double eyeX = Math.Min(eyeL.PupilCenterCoordinates.X, eyeR.PupilCenterCoordinates.X) + (Math.Abs((eyeR.PupilCenterCoordinates.X - eyeL.PupilCenterCoordinates.X)) / 2);
            double eyeY = Math.Min(eyeL.PupilCenterCoordinates.Y, eyeR.PupilCenterCoordinates.Y) + (Math.Abs((eyeR.PupilCenterCoordinates.Y - eyeL.PupilCenterCoordinates.Y)) / 2);
            eyeCoords = new Point2D(eyeX, eyeY);
        }

        //smoothed gaze data
        //for positioning based on gaze point
        Point2D prevSmoothCoords = gazeValidator.getPrevSmoothCoords();

        if (prevSmoothCoords != null)
        {
            gazeCoords = gazeValidator.getPrevSmoothCoords();
        }

        //print data
        //UnityEngine.Debug.Log("Avg Smooth Coordinates: (" + xAvgSmooth + ", " + yAvgSmooth + ")");
    } //end function
Пример #21
0
        internal static GazeData ToGazeData(this Tobii.Research.GazeDataEventArgs gazeDataItem)
        {
            var validity = GetEyeValidity(gazeDataItem.LeftEye.GazePoint.Validity, gazeDataItem.RightEye.GazePoint.Validity);

            var gaze = new GazeData
                       (
                validity,
                new EyeGazeData
                (
                    validity.GetLeftEyeValidity(),
                    gazeDataItem.LeftEye.GazePoint.PositionOnDisplayArea.ToPoint2(),
                    gazeDataItem.LeftEye.GazePoint.PositionInUserCoordinates.ToPoint3(),
                    gazeDataItem.LeftEye.GazeOrigin.PositionInUserCoordinates.ToPoint3(),
                    gazeDataItem.LeftEye.GazeOrigin.PositionInTrackBoxCoordinates.ToPoint3(),
                    gazeDataItem.LeftEye.Pupil.PupilDiameter
                ),
                new EyeGazeData
                (
                    validity.GetRightEyeValidity(),
                    gazeDataItem.RightEye.GazePoint.PositionOnDisplayArea.ToPoint2(),
                    gazeDataItem.RightEye.GazePoint.PositionInUserCoordinates.ToPoint3(),
                    gazeDataItem.RightEye.GazeOrigin.PositionInUserCoordinates.ToPoint3(),
                    gazeDataItem.RightEye.GazeOrigin.PositionInTrackBoxCoordinates.ToPoint3(),
                    gazeDataItem.RightEye.Pupil.PupilDiameter
                ),
                gazeDataItem.DeviceTimeStamp,
                DateTime.Now // TODO derive timestamp from Tobii timestamp
                       );

            return(gaze);
        }
Пример #22
0
        public void OnGazeUpdate(GazeData gazeData)
        {
            var isValid = ((gazeData.State & GazeData.STATE_TRACKING_GAZE) != 0) &&
                          ((gazeData.State & GazeData.STATE_TRACKING_PRESENCE) != 0) &&
                          ((gazeData.State & GazeData.STATE_TRACKING_EYES) != 0) &&
                          ((gazeData.State & GazeData.STATE_TRACKING_FAIL) == 0) &&
                          ((gazeData.State & GazeData.STATE_TRACKING_LOST) == 0) &&
                          gazeData.SmoothedCoordinates != null &&
                          gazeData.SmoothedCoordinates.X != 0 &&
                          gazeData.SmoothedCoordinates.Y != 0
            ;

            if (!isValid)
            {
                return;
            }

            var headPosition = gazeData.HeadPosition();

            if (headPosition == null)
            {
                return;
            }

            //Now that we can suppose the data is probably valid we can make use of it.
            var cursorPoint = GetCursorPosition();
            var gazePoint   = gazeData.SmoothedCoordinates;

            ZCurrent = headPosition.Z;
            YGaze    = gazePoint.Y;
            YMouse   = cursorPoint.Y;
        }
Пример #23
0
        public static void scanMatch()
        {
            Console.WriteLine("Enter the path of the first csv file");
            string path1 = Console.ReadLine();
            //open csv and filter fixation/saccade
            GazeData gd1 = openAndFilter(path1);
            //create spacial binning
            //create temporal binning
            GazeData gd1bin = createTemporalBinning(gd1);


            Console.WriteLine("Enter the path of the second csv file");
            string path2 = Console.ReadLine();
            //open csv and filter fixation/saccade
            GazeData gd2 = openAndFilter(path2);
            //create spacial binning
            //create temporal binning
            GazeData gd2bin = createTemporalBinning(gd2);


            //compare two temporal binning
            //create substitution matrix
            int[,] subsMat = computeSubstitutionMatrix(gd1bin, gd2bin);
            //track back for computing the score
            int score = computeScore(subsMat);
        }
Пример #24
0
 private void OnGazeData(GazeData data)
 {
     if (onStream)
     {
         SendMessage("GAZE_DATA", data.GazePositionX + " " + data.GazePositionY);
     }
 }
Пример #25
0
        public static GazeData openAndFilter(string path)
        {
            GazeData gd = new GazeData(Path.ChangeExtension(Path.GetFullPath(path), null));

            gd.gazes = GazeData.fixationBusher2008(gd.gazes);
            return(gd);
        }
        public GazeDelta Next(GazeData data)
        {
            if (data == null)
            {
                return(null);
            }
            var result =
                new GazeDelta
            {
                IsFixated    = data.IsFixated,
                TimeStamp    = data.TimeStamp,
                Presence     = (data.State & GazeData.STATE_TRACKING_PRESENCE) != 0,
                Gaze         = (data.State & GazeData.STATE_TRACKING_PRESENCE) != 0,
                Failed       = (data.State & GazeData.STATE_TRACKING_FAIL) != 0,
                LostTracking = (data.State & GazeData.STATE_TRACKING_LOST) != 0,
                Eyes         = (data.State & GazeData.STATE_TRACKING_EYES) != 0,

                LeftEye  = data.LeftEye,
                RightEye = data.RightEye,

                RawCoordinates      = data.RawCoordinates,
                SmoothedCoordinates = data.SmoothedCoordinates,

                Head = data.HeadPosition(),

                IsUsable = true,
                Last     = this
            };

            Last = null;
            return(result);
        }
Пример #27
0
        public static GazeData createTemporalBinning(GazeData gd, int timeBin = 50)
        {
            GazeData tempBin = new GazeData();

            //process
            return(tempBin);
        }
Пример #28
0
        private GazeData NewTarget(GazeData data, List <GazeData> eventsToBroadcast)
        {
            GazeData diffData = CreateEnterData(data);

            eventsToBroadcast.Add(diffData);
            return(diffData);
        }
Пример #29
0
        private void Work()
        {
            try
            {
                //while thread not killed
                while (isRunning)
                {
                    GazeData gaze = blockingGazeQueue.Take();

                    if (null != gaze)
                    {
                        lock (gazeListeners)
                        {
                            foreach (IGazeListener listener in gazeListeners)
                            {
                                ThreadPool.QueueUserWorkItem(new WaitCallback(HandleOnGazeUpdate), new Object[] { listener, gaze });
                            }
                        }
                    }
                }
            }
            catch (Exception e)
            {
                Debug.WriteLine("Exception while running broadcaster: " + e.Message);
            }
            finally
            {
                Debug.WriteLine("Broadcaster closing down");
            }
        }
Пример #30
0
        public void OnGazeUpdate(GazeData gazeData)
        {
            var x = (int)Math.Round(gazeData.SmoothedCoordinates.X, 0);
            var y = (int)Math.Round(gazeData.SmoothedCoordinates.Y, 0);

            if (x == 0 & y == 0)
            {
                return;
            }

            // Invoke thread

            //Dispatcher.BeginInvoke(new Action(() => UpdateUI(x, y)));
            //NativeMethods.SetCursorPos(x, y);
            CheckClick(x, y, buttonHeight, buttonWidth);

            // start or stop tracking lost animation
            if ((gazeData.State & GazeData.STATE_TRACKING_GAZE) == 0 &&
                (gazeData.State & GazeData.STATE_TRACKING_PRESENCE) == 0)
            {
                //return;
                Dispatcher.BeginInvoke(new Action(() => buttonSelect.Fill = new SolidColorBrush(System.Windows.Media.Colors.Red)));
            }

            //OnConnectionStateChanged(GazeManager.Instance.IsActivated);
            //if (listener.OnConnectionStateChanged == true)
            //if ((gazeData.State & GazeData.STATE_TRACKING_LOST) != 0)
            //Dispatcher.BeginInvoke(new Action(() => buttonSelect.Fill = new SolidColorBrush(System.Windows.Media.Colors.Red)));
        }
        public void OnGazeUpdate(GazeData gazeData)
        {
            responseData1.Enqueue(new double[]
            {
                #region General gazeData
                gazeData.RawCoordinates.X, gazeData.RawCoordinates.Y,
                gazeData.SmoothedCoordinates.X, gazeData.SmoothedCoordinates.Y,
                Convert.ToDouble(gazeData.IsFixated), //Coverting boolean to Double here. Watch out
                gazeData.State,
                #endregion

                #region LeftEye gazeData
                gazeData.LeftEye.RawCoordinates.X, gazeData.LeftEye.RawCoordinates.Y,
                gazeData.LeftEye.SmoothedCoordinates.X, gazeData.LeftEye.SmoothedCoordinates.Y,
                gazeData.LeftEye.PupilCenterCoordinates.X, gazeData.LeftEye.PupilCenterCoordinates.Y,
                gazeData.LeftEye.PupilSize,
                #endregion

                #region RightEye gazeData
                gazeData.RightEye.RawCoordinates.X, gazeData.RightEye.RawCoordinates.Y,
                gazeData.RightEye.SmoothedCoordinates.X, gazeData.RightEye.SmoothedCoordinates.Y,
                gazeData.RightEye.PupilCenterCoordinates.X, gazeData.RightEye.PupilCenterCoordinates.Y,
                gazeData.RightEye.PupilSize,
                #endregion

                gazeData.TimeStamp
            });
            responseData2.Enqueue(gazeData.TimeStampString);
            Task.Run(() => saveToFile());
        }
Пример #32
0
 private void DisplayGaze(GazeData data)
 {
     if (this.IsLoaded)
     {
         switch (data.TrackingStatus)
         {
             case TrackingStatus.BothEyesTracked:
                 DisplayGazeLeft(data.Left);
                 DisplayGazeRight(data.Right);
                 break;
             case TrackingStatus.OnlyLeftEyeTracked:
             case TrackingStatus.OneEyeTrackedProbablyLeft:
                 DisplayGazeLeft(data.Left);
                 break;
             case TrackingStatus.OnlyRightEyeTracked:
             case TrackingStatus.OneEyeTrackedProbablyRight:
                 DisplayGazeRight(data.Right);
                 break;
         }
     }
 }
Пример #33
0
    /// <summary>
    /// Extracts the tracker data.
    /// </summary>
    /// <param name="data">The data.</param>
    /// <returns></returns>
    public GazeData ExtractTrackerData(EyeTrackingController.SampleStruct data)
    {
      GazeData result = new GazeData();

      float gazePosXLeft = Convert.ToSingle(data.leftEye.gazeX);
      float gazePosXRight = Convert.ToSingle(data.rightEye.gazeX);
      float gazePosYLeft = Convert.ToSingle(data.leftEye.gazeY);
      float gazePosYRight = Convert.ToSingle(data.rightEye.gazeY);

      result.GazePosX = (gazePosXLeft + gazePosXRight) / 2;
      result.GazePosY = (gazePosYLeft + gazePosYRight) / 2;

      if (result.GazePosX > 1000 * 10)
      {
          result.GazePosX = result.GazePosX / 1000;
      }
      if (result.GazePosY > 1000 * 10)
      {
          result.GazePosY = result.GazePosY / 1000;
      }

      long MICROSECONDS = 1000;
      result.Time = (data.timestamp / MICROSECONDS);
      this.lastTime = result.Time;

      result.PupilDiaX = Convert.ToSingle(data.leftEye.diam);
      result.PupilDiaY = Convert.ToSingle(data.rightEye.diam);

      return result;
    }
Пример #34
0
    /// <summary>
    /// The <see cref="GTExtendedData"/> event handler
    /// which is called whenever there is a new frame arrived.
    /// Sends the GazeDataChanged event to the recording module.
    /// </summary>
    /// <param name="data">The tracking data with the mapped gaze 
    /// coordinates in pixel units.</param>
    private void OnGTExtendedDataChanged(GTExtendedData data)
    {
      var gazeData = new GazeData
        {
          Time = data.TimeStamp,
          PupilDiaX = (float)data.PupilDiameterLeft,
          PupilDiaY = (float)data.PupilDiameterRight,

          // Calculate values between 0..1
          GazePosX = (float)(data.GazePositionX / this.presentationScreenSize.Width),
          GazePosY = (float)(data.GazePositionY / this.presentationScreenSize.Height)
        };

      this.OnGazeDataChanged(new GazeDataChangedEventArgs(gazeData));
    }
Пример #35
0
    /// <summary>
    /// Start eye tracker recording. 
    /// </summary>
    public override void Record()
    {
      //// add the aslRawDataReceived method from the Notify event
      this.aslPort.Notify +=
          new _IASLSerialOutPort2Events_NotifyEventHandler(this.aslRawDataReceived);

      this.LastTimeStamp = 0;
      try
      {
        // Create and/or start the timer
        if (this.stopwatch != null)
        {
          this.stopwatch.Reset();
          this.stopwatch.Start();
        }
        else
        {
          this.stopwatch = new Stopwatch();
          this.stopwatch.Start();
        }

        // Initiate callbacks from COM Server (used in streaming mode)
        this.aslPort.StartContinuousMode();

        // If not exist create the Gaze data structure with fields
        // that match the database columns
        if (this.newGazeData.Equals(null))
        {
          this.newGazeData = new GazeData();
        }
      }
      catch (Exception ex)
      {
        if (!this.GetLastError("start the recording failed "))
        {
          ExceptionMethods.ProcessErrorMessage("start the recording failed "
              + ex.Message);
        }

        // remove the aslRawDataReceived method from the Notify event
        this.aslPort.Notify -=
          new _IASLSerialOutPort2Events_NotifyEventHandler(this.aslRawDataReceived);
        if (this.stopwatch != null)
        {
          this.stopwatch.Reset();
        }

        this.CleanUp();
      }
    } // end of Record()
Пример #36
0
 private void RaiseGazePoint(GazeData GazeData)
 {
     var handler = GazePoint;
     if (handler != null)
     {
         handler(this, new GazePointEventArgs(GazeData));
     }
 }
Пример #37
0
 public GazePointEventArgs(GazeData GazedataTemp)
 {
     GazeDataReceived = GazedataTemp;
 }
Пример #38
0
    /// <summary>
    /// Raw Data Event Handler. Throws a GazeDataChanged-Event if isRecording is true
    /// </summary>
    /// <param name="data">Tracker RawData</param>
    /// <param name="userData">Pointer to user Data</param>
    private void AleaInterface_APIRawDataReceived(ref RawData data, IntPtr userData)
    {
        if (this.RecordModule.InvokeRequired)
        {
            this.RecordModule.BeginInvoke(new RawDataDelegate(this.AleaInterface_APIRawDataReceived), new object[] { data, userData });
        }
        else
        {
            if (this.isRecording && this.resolutionX != 0 && this.resolutionY != 0)
            {
                // Save current timestamp
                lock (this.timeLock)
                {
                    this.lastTimeStamp = data.timeStamp;

                    // Reset Stopwatch
                    this.stopwatch.Reset();

                    // Start stopwatch, if at least one timestamp is saved.
                    this.stopwatch.Start();
                }

                GazeData newGazeData = new GazeData();

                // Get gazeTimestamp in milliseconds.
                newGazeData.Time = this.lastTimeStamp;

                // Calculate values between 0..1
                newGazeData.GazePosX = (float)(data.intelliGazeX / (float)this.resolutionX);
                newGazeData.GazePosY = (float)(data.intelliGazeY / (float)this.resolutionY);

                // Set pupil diameter
                newGazeData.PupilDiaX = (float)data.leftEye.pupilDiameter;
                newGazeData.PupilDiaY = (float)data.rightEye.pupilDiameter;

                // raise event
                this.OnGazeDataChanged(new GazeDataChangedEventArgs(newGazeData));
            }
        }
    }
Пример #39
0
 public Stream(Client cli)
 {
     client = cli;
     gazeData = new GazeData();
 }
Пример #40
0
    /// <summary>
    /// Process received message from the tracker
    /// </summary>
    /// <param name="messageEventArgs">Message to process</param>
    protected void ProcessReceivedMessage(StringEventArgs messageEventArgs)
    {
      // CANDO : Separate this method in submethods for each process depending on received message
      try
      {
        if (this.RecordModule.InvokeRequired)
        {
          this.RecordModule.BeginInvoke(new ProcessMessageReceivedDelegate(this.ProcessReceivedMessage), new object[] { messageEventArgs });
        }
        else
        {
          string grossMessage = messageEventArgs.Param;
          int endIdex = grossMessage.IndexOf("\r\n");
          if (endIdex != -1)
          {
            string availableMessage = grossMessage.Substring(0, endIdex);
            XmlDocument doc = new XmlDocument();
            doc.InnerXml = availableMessage;
            XmlElement root = doc.DocumentElement;
            string attribute;

            if (root != null)
            {
              if (root.Name == "ACK")
              {
                attribute = root.GetAttribute("ID");
                if (attribute == "CALIBRATE_RESULT_SUMMARY")
                {
                  this.memCalibrationResult.Text = root.GetAttribute("AVE_ERROR");
                  if (this.memSettings.HideCalibWindow)
                  {
                    this.memNetworkManager.SendMessage("<SET ID=\"CALIBRATE_SHOW\" STATE=\"0\" />\r\n");
                  }
                }
                else if (attribute == "SCREEN_SELECTED")
                {
                  if (root.GetAttribute("VALUE") == "0")
                  {
                    Properties.Settings.Default.PresentationScreenMonitor = "Primary";
                  }
                  else if (root.GetAttribute("VALUE") == "1")
                  {
                    Properties.Settings.Default.PresentationScreenMonitor = "Secondary";
                  }
                }
                else if (attribute == "SCREEN_SIZE")
                {
                  int screenWith;
                  if (int.TryParse(root.GetAttribute("WIDTH"), out screenWith))
                  {
                    int screenHeight;
                    if (int.TryParse(root.GetAttribute("HEIGHT"), out screenHeight))
                    {
                      if (screenHeight != Document.ActiveDocument.PresentationSize.Height
                        || screenWith != Document.ActiveDocument.PresentationSize.Width)
                      {
                        // Send correct size to tracker
                        string screenSizeString =
                          string.Format(
                            "<SET ID=\"SCREEN_SIZE\" WIDTH=\"{0}\" HEIGHT=\"{1}\"/>\r\n",
                            Document.ActiveDocument.PresentationSize.Width,
                            Document.ActiveDocument.PresentationSize.Height);
                        this.memNetworkManager.SendMessage(screenSizeString);
                      }
                    }
                  }
                }
                else if (attribute == "TIME_TICK_FREQUENCY")
                {
                  // REPLY: <ACK ID="TIME_TICK_FREQUENCY" FREQ="2405480000" />
                  ulong readedTickFrequency;
                  if (ulong.TryParse(root.GetAttribute("FREQ"), out readedTickFrequency))
                  {
                    this.tickFrequency = readedTickFrequency;
                  }
                }
              }
              else if (root.Name == "CAL")
              {
                attribute = root.GetAttribute("ID");
                if (attribute == "CALIB_RESULT")
                {
                  // TODO : Hide calibration result screen after calibration
                  this.memIsCalibrating = false;
                  this.memNetworkManager.SendMessage("<GET ID=\"CALIBRATE_RESULT_SUMMARY\" />\r\n");
                  this.ShowCalibPlot();
                }
              }
              else if (root.Name == "REC")
              {
                // TODO : Optimize data send to Ogama by verifying if they are valid
                var newGazeData = new GazeData();

                //// Get gazeTimestamp in milliseconds.
                ////newGazeData.Time = this.memTimeOfRecordingStart.ElapsedMilliseconds;

                // Get time stamp in ticks
                attribute = root.GetAttribute("TIME_TICK");
                var timeInTicks = double.Parse(attribute, CultureInfo.InvariantCulture);

                // Convert to milliseconds
                newGazeData.Time = (long)(timeInTicks / (this.tickFrequency / 1000.0));

                // Calculate values between 0..1
                attribute = root.GetAttribute("BPOGX");
                newGazeData.GazePosX = float.Parse(attribute, CultureInfo.InvariantCulture);
                attribute = root.GetAttribute("BPOGY");
                newGazeData.GazePosY = float.Parse(attribute, CultureInfo.InvariantCulture);

                // Set pupil diameter
                attribute = root.GetAttribute("LPD");
                newGazeData.PupilDiaX = float.Parse(attribute, CultureInfo.InvariantCulture);
                attribute = root.GetAttribute("RPD");
                newGazeData.PupilDiaY = float.Parse(attribute, CultureInfo.InvariantCulture);
                this.OnGazeDataChanged(new GazeDataChangedEventArgs(newGazeData));

                // Values needed by the trackstatus windows
                attribute = root.GetAttribute("LPS");
                float lED = float.Parse(attribute, CultureInfo.InvariantCulture);
                attribute = root.GetAttribute("RPS");
                float rED = float.Parse(attribute, CultureInfo.InvariantCulture);
                float averageRD = (lED + rED) / 2;
                if (this.memDlgTrackStatus != null)
                {
                  this.memDlgTrackStatus.UpdateStatus((float)newGazeData.GazePosX, (float)newGazeData.GazePosY, averageRD);
                }
              }
            }
          }
        }
      }
      catch (Exception e)
      {
        string message = "Exception catched in ProcessReceivedMessage(...) Method : " +
            Environment.NewLine + e.Message;
        ExceptionMethods.ProcessErrorMessage(message);
      }
    }
Пример #41
0
    /// <summary>
    /// Processes the message.
    /// </summary>
    /// <param name="msg">The message from the haytham server.</param>
    private void ProcessMessage(string msg)
    {
      if (msg == "Commands|CalibrationFinished|")
      {
        this.OnCalibrationFinished(EventArgs.Empty);
        return;
      }

      string[] msgArray = this.ConvertMsgToArray(msg);

      if (msg.StartsWith("Eye|"))
      {
        var newData = new GazeData();

        // Get time
        var timeInTicks = long.Parse(msgArray[0]);

        // Convert to milliseconds
        newData.Time = (long)(timeInTicks / 10000.0);

        // Get Position in screen coordinates
        var absoluteX = float.Parse(msgArray[1]);
        var absoluteY = float.Parse(msgArray[2]);

        // Convert to relative coordinates, cause this is expected by Ogama
        newData.GazePosX = absoluteX / this.presentationScreenSize.Width;
        newData.GazePosY = absoluteY / this.presentationScreenSize.Height;

        newData.PupilDiaX = int.Parse(msgArray[3]);

        this.OnGazeDataReceived(new GazeDataChangedEventArgs(newData));

        var newTrackStatusData = new TrackStatusData();
        newTrackStatusData.TrackedEyes = Eye.Left;
        var isPupilFound = bool.Parse(msgArray[4]);
        newTrackStatusData.LeftEyeValidity = isPupilFound ? Validity.Good : Validity.Missing;
        var pupilPosX = float.Parse(msgArray[5]);
        var pupilPosY = float.Parse(msgArray[6]);
        newTrackStatusData.LeftEyePosition = new Vector3D(pupilPosX, pupilPosY, 0);
        this.OnTrackStatusDataChanged(new TrackStatusDataChangedEventArgs(newTrackStatusData));
      }
    }
Пример #42
0
    /// <summary>
    /// Is called when new gaze data is available, updates track status and raises OnGazeDataChanged
    /// </summary>
    /// <param name="sender">The Sender.</param>
    /// <param name="e">Gaze data.</param>
    private void SmartEyeGazeDataAvailable(object sender, GazeDataReceivedEventArgs e)
    {
      // Send the gaze data to the track status control.
      SmartEyeGazeData gd = e.Gazedata;
      this.smartEyeTrackStatus.OnGazeData(gd);
      if (this.dlgTrackStatus != null && this.dlgTrackStatus.Visible)
      {
        this.dlgTrackStatus.Update(gd);
      }

      GazeData newGD = new GazeData();
      newGD.Time = gd.Time;

      if (gd.HeadQuality >= 1 && gd.GazeQuality > this.smartEyeSettings.QualityThreshold)   // cut off bad quality data
      {
        newGD.GazePosX = gd.GazePosX;
        newGD.GazePosY = gd.GazePosY;
        newGD.PupilDiaX = gd.PupilDiaX;
        newGD.PupilDiaY = gd.PupilDiaY;
      }
      else
      {
        newGD.GazePosX = null;
        newGD.GazePosY = null;
        newGD.PupilDiaX = null;
        newGD.PupilDiaY = null;
      }

      this.OnGazeDataChanged(new GazeDataChangedEventArgs(newGD));
    }
Пример #43
0
    /// <summary>
    /// Processes an <see cref="ImageData"/> object and reports it to Ogama.
    /// </summary>
    /// <param name="data">The <see cref="ImageData"/> to process.</param>
    private void ProcessImageData(ImageData data)
    {
      var newGazeData = new GazeData();

      if (data.LeftEye.Found && data.RightEye.Found)
      {
        newGazeData.GazePosX = (float)((data.LeftEye.GazePoint.x + data.RightEye.GazePoint.x) / 2.0);
        newGazeData.GazePosY = (float)((data.LeftEye.GazePoint.y + data.RightEye.GazePoint.y) / 2.0);
        newGazeData.PupilDiaX = (float)data.LeftEye.PupilDiameter;
        newGazeData.PupilDiaY = (float)data.RightEye.PupilDiameter;

        newGazeData.GazePosX = newGazeData.GazePosX / (float)this.resolutionX;
        newGazeData.GazePosY = newGazeData.GazePosY / (float)this.resolutionY;
      }
      else if (data.LeftEye.Found)
      {
        newGazeData.GazePosX = (float)data.LeftEye.GazePoint.x;
        newGazeData.GazePosY = (float)data.LeftEye.GazePoint.y;
        newGazeData.PupilDiaX = (float)data.LeftEye.PupilDiameter;
        newGazeData.PupilDiaY = null;

        newGazeData.GazePosX = newGazeData.GazePosX / (float)this.resolutionX;
        newGazeData.GazePosY = newGazeData.GazePosY / (float)this.resolutionY;
      }
      else if (data.RightEye.Found)
      {
        newGazeData.GazePosX = (float)data.RightEye.GazePoint.x;
        newGazeData.GazePosY = (float)data.RightEye.GazePoint.y;
        newGazeData.PupilDiaX = null;
        newGazeData.PupilDiaY = (float)data.RightEye.PupilDiameter;

        newGazeData.GazePosX = newGazeData.GazePosX / (float)this.resolutionX;
        newGazeData.GazePosY = newGazeData.GazePosY / (float)this.resolutionY;
      }
      else
      {
        newGazeData.GazePosX = null;
        newGazeData.GazePosY = null;
        newGazeData.PupilDiaX = null;
        newGazeData.PupilDiaY = null;
      }

      newGazeData.Time = (long)data.Time;

      this.OnGazeDataChanged(new GazeDataChangedEventArgs(newGazeData));
    }
Пример #44
0
    ///////////////////////////////////////////////////////////////////////////////
    // Eventhandler                                                              //
    ///////////////////////////////////////////////////////////////////////////////
    #region EVENTS

    ///////////////////////////////////////////////////////////////////////////////
    // Eventhandler for UI, Menu, Buttons, Toolbars etc.                         //
    ///////////////////////////////////////////////////////////////////////////////
    #region WINDOWSEVENTHANDLER
    #endregion //WINDOWSEVENTHANDLER

    ///////////////////////////////////////////////////////////////////////////////
    // Eventhandler for Custom Defined Events                                    //
    ///////////////////////////////////////////////////////////////////////////////
    #region CUSTOMEVENTHANDLER

    /// <summary>
    /// <see cref="Timer.Tick"/> event handler for
    /// the <see cref="trackingTimer"/> <see cref="Timer"/>
    /// This event fires whenever the timer intervall is elapsed.
    /// It sends an empty gaze structure with the current timing in
    /// a OGAMA readable format and fires the <see cref="Tracker.OnGazeDataChanged"/>
    /// event to the recorder.
    /// </summary>
    /// <param name="sender">Source of the event.</param>
    /// <param name="e">An empty <see cref="EventArgs"/>.</param>
    private void TrackingTimerTick(object sender, EventArgs e)
    {
      var newGazeData = new GazeData
        {
          Time = this.GetCurrentTime(),
          GazePosX = null,
          GazePosY = null,
          PupilDiaX = null,
          PupilDiaY = null
        };

      this.OnGazeDataChanged(new GazeDataChangedEventArgs(newGazeData));
      this.counter++;
    }
Пример #45
0
    /// <summary>
    /// Extracts the received sample into a <see cref="GazeData"/>
    /// struct.
    /// A dataStr may contain the following data:
    /// ET_SPL 91555722812 b 420 420 564 564 17.219431 17.855097 17.219431 17.855097 -53.704  9.674 13.589 15.935 624.140 612.472 419.313368 680.213202 455.167761 443.716013 4.72 4.72 3
    /// </summary>
    /// <param name="dataStr">The <see cref="String"/> with the data line from the iViewX.</param>
    /// <returns>A filled <see cref="GazeData"/> that represents the line
    /// according to the <see cref="SMISetting"/></returns>
    public GazeData ExtractTrackerData(string dataStr)
    {

      //Console.WriteLine("ExtractTrackerData:" + dataStr);

      GazeData data = new GazeData();
      char[] seperator = { ' ' };
      string[] tmp = dataStr.Split(seperator);

      try
      {
        this.lastTime = Convert.ToInt64(tmp[1]);
      }
      catch (System.Exception)
      {

      }
      string availableEye = tmp[2];
      switch (availableEye)
      {
        case "b":
          data.PupilDiaX = Convert.ToSingle(tmp[3]);
          data.PupilDiaY = Convert.ToSingle(tmp[5]);
          float gazePosXLeft = Convert.ToSingle(tmp[7]) / this.presentationScreenSize.Height;
          float gazePosXRight = Convert.ToSingle(tmp[8]) / this.presentationScreenSize.Height;
          float gazePosYLeft = Convert.ToSingle(tmp[9]) / this.presentationScreenSize.Height;
          float gazePosYRight = Convert.ToSingle(tmp[10]) / this.presentationScreenSize.Height;

          switch (this.smiSettings.AvailableEye)
          {
            case AvailableEye.Left:
              data.GazePosX = gazePosXLeft;
              data.GazePosY = gazePosYLeft;
              break;
            case AvailableEye.Right:
              data.GazePosX = gazePosXRight;
              data.GazePosY = gazePosYRight;
              break;
            case AvailableEye.Both:
              // Use the mean
              data.GazePosX = (gazePosXLeft + gazePosXRight) / 2;
              data.GazePosY = (gazePosYLeft + gazePosYRight) / 2;
              break;
          }

          break;
        case "l":
        case "r":
          data.PupilDiaX = Convert.ToSingle(tmp[3]);
          data.PupilDiaY = Convert.ToSingle(tmp[4]);
          data.GazePosX = Convert.ToSingle(tmp[5]) / this.presentationScreenSize.Width;
          data.GazePosY = Convert.ToSingle(tmp[6]) / this.presentationScreenSize.Height;
          break;
      }


      data.Time = this.lastTime;
      return data;
    }
Пример #46
0
    /// <summary>
    /// OnGazeData event handler for connected tracker.
    ///   This event fires whenever there are new gaze data
    ///   to receive.
    ///   It converts the interface internal gaze structure into
    ///   a OGAMA readable format and fires the <see cref="Tracker.OnGazeDataChanged"/>
    ///   event to the recorder.
    /// </summary>
    /// <param name="sender">
    /// Source of the event
    /// </param>
    /// <param name="e">
    /// The <see cref="GazeDataEventArgs"/> with the new gaze data
    ///   from the device.
    /// </param>
    private void ConnectedTrackerGazeDataReceived(object sender, GazeDataEventArgs e)
    {
      // Send the gaze data to the track status control.
      GazeDataItem gd = e.GazeDataItem;
      this.tobiiTrackStatus.OnGazeData(gd);
      if (this.dlgTrackStatus != null && this.dlgTrackStatus.Visible)
      {
        this.dlgTrackStatus.Update(gd);
      }

      // Convert Tobii gazestamp in milliseconds.
      var newGazeData = new GazeData { Time = gd.TimeStamp / 1000 };

      // The validity code takes one of five values for each eye ranging from 0 to 4, with the
      // following interpretation:
      // 0 - The eye tracker is certain that the data for this eye is right. There is no risk of
      // confusing data from the other eye.
      // 1 - The eye tracker has only recorded one eye, and has made some assumptions and
      // estimations regarding which is the left and which is the right eye. However, it is still
      // very likely that the assumption made is correct. The validity code for the other eye is
      // in this case always set to 3.
      // 2 - The eye tracker has only recorded one eye, and has no way of determining which
      // one is left eye and which one is right eye. The validity code for both eyes is set to 2.
      // 3 - The eye tracker is fairly confident that the actual gaze data belongs to the other
      // eye. The other eye will always have validity code 1.
      // 4 - The actual gaze data is missing or definitely belonging to the other eye.
      // Hence, there are a limited number of possible combinations of validity codes for the
      // two eyes:
      // Code Description
      // 0 - 0 Both eyes found. Data is valid for both eyes.
      // 0 - 4 or 4 - 0 One eye found. Gaze data is the same for both eyes.
      // 1 – 3 or 3 - 1 One eye found. Gaze data is the same for both eyes.
      // 2 – 2 One eye found. Gaze data is the same for both eyes.
      // 4 – 4 No eye found. Gaze data for both eyes are invalid.
      // Use data only if both left and right eye was found by the eye tracker
      // It is recommended that the validity codes are always used for data filtering, 
      // to remove data points which are obviously incorrect. 
      // Normally, we recommend removing all data points with a validity code of 2 or higher.
      if (gd.LeftValidity == 0 && gd.RightValidity == 0)
      {
        // Let the x, y and distance be the right and left eye average
        newGazeData.GazePosX = (float)((gd.LeftGazePoint2D.X + gd.RightGazePoint2D.X) / 2);
        newGazeData.GazePosY = (float)((gd.LeftGazePoint2D.Y + gd.RightGazePoint2D.Y) / 2);
        newGazeData.PupilDiaX = gd.LeftPupilDiameter;
        newGazeData.PupilDiaY = gd.RightPupilDiameter;
      }
      else if (gd.LeftValidity == 4 && gd.RightValidity == 4)
      {
        newGazeData.GazePosX = 0;
        newGazeData.GazePosY = 0;
        newGazeData.PupilDiaX = 0;
        newGazeData.PupilDiaY = 0;
      }
      else if (gd.LeftValidity == 2 && gd.RightValidity == 2)
      {
        newGazeData.GazePosX = 0;
        newGazeData.GazePosY = 0;
        newGazeData.PupilDiaX = 0;
        newGazeData.PupilDiaY = 0;
      }
      else if (gd.LeftValidity == 1 && gd.RightValidity == 3)
      {
        newGazeData.GazePosX = (float)gd.LeftGazePoint2D.X;
        newGazeData.GazePosY = (float)gd.LeftGazePoint2D.Y;
        newGazeData.PupilDiaX = gd.LeftPupilDiameter;
        newGazeData.PupilDiaY = null;
      }
      else if (gd.LeftValidity == 3 && gd.RightValidity == 1)
      {
        newGazeData.GazePosX = (float)gd.RightGazePoint2D.X;
        newGazeData.GazePosY = (float)gd.RightGazePoint2D.Y;
        newGazeData.PupilDiaX = null;
        newGazeData.PupilDiaY = gd.RightPupilDiameter;
      }
      else if (gd.LeftValidity == 0 && gd.RightValidity == 4)
      {
        newGazeData.GazePosX = (float)gd.LeftGazePoint2D.X;
        newGazeData.GazePosY = (float)gd.LeftGazePoint2D.Y;
        newGazeData.PupilDiaX = gd.LeftPupilDiameter;
        newGazeData.PupilDiaY = null;
      }
      else if (gd.LeftValidity == 4 && gd.RightValidity == 0)
      {
        newGazeData.GazePosX = (float)gd.RightGazePoint2D.X;
        newGazeData.GazePosY = (float)gd.RightGazePoint2D.Y;
        newGazeData.PupilDiaX = null;
        newGazeData.PupilDiaY = gd.RightPupilDiameter;
      }
      else
      {
        newGazeData.GazePosX = null;
        newGazeData.GazePosY = null;
        newGazeData.PupilDiaX = null;
        newGazeData.PupilDiaY = null;
      }

      this.OnGazeDataChanged(new GazeDataChangedEventArgs(newGazeData));
    }