public void StartGazeDataStreams(GazePointCallback gazePointCallback, FixationCallback fixationCallback, EyePositionCallback eyePositionCallback) { if (gazePointCallback != null) { if (gazePointDataStream == null) { gazePointDataStream = eyeXHost.CreateGazePointDataStream(Tobii.EyeX.Framework.GazePointDataMode.Unfiltered); } gazePointDataStream.Next += InvokeGazePointEvent; GazePointEvent += gazePointCallback; GazePointEvent += RecordGazePoint; } if (fixationCallback != null) { if (fixationDataStream == null) { fixationDataStream = eyeXHost.CreateFixationDataStream(FixationDataMode.Sensitive); } fixationDataStream.Next += InvokeFixationEvent; FixationEvent += fixationCallback; FixationEvent += RecordFixation; } if (eyePositionCallback != null) { if (eyePositionDataStream == null) { eyePositionDataStream = eyeXHost.CreateEyePositionDataStream(); } eyePositionDataStream.Next += InvokeEyePositionEvent; EyePositionEvent += eyePositionCallback; EyePositionEvent += RecordEyePosition; } }
private void setupMainWindow() { DataContext = this; InitializeComponent(); eyeXHost = new EyeXHost(); eyeXHost.Start(); var fixationData = eyeXHost.CreateFixationDataStream(FixationDataMode.Sensitive); var gazeData = eyeXHost.CreateGazePointDataStream(GazePointDataMode.LightlyFiltered); fixationData.Next += fixTrack; gazeData.Next += trackDot; if (ReceiverOn) { IPHostEntry ipHostInfo = Dns.GetHostByName(Dns.GetHostName()); IPAddress ipAddress = ipHostInfo.AddressList[0]; Receive_Status_Text.Text = "Receiving Data at\nIP:" + ipAddress.ToString(); Receive_Status_Text.Visibility = Visibility.Visible; } if (SenderOn) { SenderIP = defaultSenderIP; Share_Status_Text.Text = "Sharing Data to\nIP:" + SenderIP.ToString(); Share_Status_Text.Visibility = Visibility.Visible; communication_started_Sender = false; } setupTimer(); //setupTask(); }
/// <summary> /// Detect EyeX engine presence, and starts it to get gaze positions /// </summary> /// <param name="fps">Number of gaze positions expected per second</param> /// <exception cref="Exception">If EyeX engine is not installed or running</exception> public Gaze(int fps) { this.interpolated = duration * fps / 1000; this.Position = Cursor.Position; positions = new Queue <Point>(interpolated); // simple check of EyeX presence switch (EyeXHost.EyeXAvailability) { case EyeXAvailability.NotAvailable: throw new Exception("Please install the EyeX Engine"); case EyeXAvailability.NotRunning: throw new Exception("Please make sure that the EyeX Engine is started"); } Engine = new EyeXHost(); // track display size Engine.ScreenBoundsChanged += (object s, EngineStateValue <Rect> e) => displaySize = e.Value;; // track gaze position to set cursor on it gazeStream = Engine.CreateFixationDataStream(FixationDataMode.Sensitive); gazeStream.Next += OnGazeChange; // start the EyeX engine Engine.Start(); displaySize = Engine.ScreenBounds.Value; // start timer timer = new System.Timers.Timer(duration / interpolated); timer.Enabled = true; timer.Elapsed += OnTick; }
public bool Initialize() { if (_useFixationStream) { _eyeXHost.CreateFixationDataStream(FixationDataMode.Slow).Next += OnFixationPointNext; } else { _eyeXHost.CreateGazePointDataStream(GazePointDataMode.LightlyFiltered).Next += OnGazePointNext; } _eyeXHost.Start(); // TODO: Initialize needs to be replaced with an InitializeAsync to void // having to do this callback, waiting and semaphore dance. var initialized = false; var initialSemaphore = new SemaphoreSlim(0); void initialCallback(object s, EngineStateValue <EyeTrackingDeviceStatus> e) { switch (e.Value) { case EyeTrackingDeviceStatus.Configuring: case EyeTrackingDeviceStatus.Initializing: case EyeTrackingDeviceStatus.Tracking: case EyeTrackingDeviceStatus.TrackingPaused: initialized = true; initialSemaphore.Release(); break; case EyeTrackingDeviceStatus.DeviceNotConnected: initialSemaphore.Release(); break; } } _eyeXHost.EyeTrackingDeviceStatusChanged += initialCallback; // Wait a bit for the tracking engine to initialize the device status initialSemaphore.Wait(1000); _eyeXHost.EyeTrackingDeviceStatusChanged -= initialCallback; return(initialized); }
/*************************EYE_X_COMPONENT.INIT********************/ /* * Receive method for the EyeX gaze data stream. */ private void ReceiveGazeData(Envelope e) { using (gazeDataStream = host.CreateGazePointDataStream(GazePointDataMode.LightlyFiltered)) { Console.WriteLine("EyeX data stream initiated."); gazePoint.Post(gazeDataStream, e.OriginatingTime); using (eyePositionStream = host.CreateEyePositionDataStream()) { Console.WriteLine("EyeX eye position stream initiated."); eyeData.Post(eyePositionStream, e.OriginatingTime); gazeDataStream.Next += (s, q) => { frameDataX = q.X; frameDataY = q.Y; }; eyePositionStream.Next += (s, q) => { leftEyeX = q.LeftEye.X; leftEyeY = q.LeftEye.Y; leftEyeZ = q.LeftEye.Z; leftEyeNormalizedX = q.LeftEyeNormalized.X; leftEyeNormalizedY = q.LeftEyeNormalized.Y; leftEyeNormalizedZ = q.LeftEyeNormalized.Z; rightEyeX = q.RightEye.X; rightEyeY = q.RightEye.Y; rightEyeZ = q.RightEye.Z; rightEyeNormalizedX = q.RightEyeNormalized.X; rightEyeNormalizedY = q.RightEyeNormalized.Y; rightEyeNormalizedZ = q.RightEyeNormalized.Z; }; Console.WriteLine("Listening for gaze data..."); using (FixationDataStream fixationStream = host.CreateFixationDataStream(FixationDataMode.Sensitive)) { fixationData.Post(fixationStream, e.OriginatingTime); fixationStream.Next += (s, q) => { fixationX = q.X; fixationY = q.Y; }; Console.WriteLine("Recording fixation points..."); Console.In.Read(); } } } }
public Form1() { eyeXHost.Start(); fixation = eyeXHost.CreateFixationDataStream(FixationDataMode.Slow); InitializeComponent(); pictureBox1.ImageLocation = @"..\a.png"; pictureBox3.ImageLocation = @"..\c.png"; pictureBox2.ImageLocation = @"..\b.png"; fixation.Next += fixationEventHandler; }
private void CollectData() { double previousTime = GetUnixTimestampForNow(); double elapsedTime = 0; double startTime = 0; double endTime = 0; int numFixations = 0; double lastFixationStartTime = 0; double fixationLengthRunningTotal = 0; // Increment the number of fixations each time we receive data from the eye tracker. fixationDataStream = eyeXHost.CreateFixationDataStream(FixationDataMode.Sensitive); System.EventHandler <FixationEventArgs> inc = delegate(object s, FixationEventArgs e) { if (e.EventType == FixationDataEventType.Begin) { numFixations++; lastFixationStartTime = e.Timestamp; } if (e.EventType == FixationDataEventType.End) { fixationLengthRunningTotal += e.Timestamp - lastFixationStartTime; } }; fixationDataStream.Next += inc; // Keep collecting the data until we tell it not to. while (!shouldEndCollection) { if (elapsedTime < instanceLength) { elapsedTime += GetUnixTimestampForNow() - previousTime; previousTime = GetUnixTimestampForNow(); } else { endTime = GetUnixTimestampForNow(); double fixationsPerSecond = numFixations / (instanceLength / 1000); double meanLengthOfFixation = fixationLengthRunningTotal / numFixations; Instance instance = new Instance(startTime, endTime, numFixations, fixationsPerSecond, meanLengthOfFixation, instanceClass); collectedInstances.Add(instance); startTime = GetUnixTimestampForNow(); numFixations = 0; fixationLengthRunningTotal = 0; elapsedTime = 0; previousTime = GetUnixTimestampForNow(); } } }
public Form1() { eyeXHost.Start(); fixation = eyeXHost.CreateFixationDataStream(FixationDataMode.Slow); InitializeComponent(); comboBox1.SelectedItem = comboBox1.Items[0]; s.SelectVoice("Microsoft Zira Desktop"); //bmp = new Bitmap(500, 500); //using (Graphics g = Graphics.FromImage(bmp)) //{ // g.Clear(Color.White); //} }
public MainEngine() { sharedData = SharedDataSingleton.Instance(); _settingsList = SettingsSingleton.Instance(); inputSimulator = sharedData.inputSimulator; controlState = new ControlContext(); controlState.changedState += StateChanged; controlState.ControlState = new CommandState(inputSimulator, controlState); //System.Diagnostics.Process.Start("C:/Program Files (x86)/Nuance/NaturallySpeaking13/Program/natspeak.exe"); SetupSpeechRecognition(); //Instantiating and starting the eye tracker host eyex = new EyeXHost(); eyex.CreateFixationDataStream(FixationDataMode.Sensitive).Next += (s, e) => Fixation(e.EventType, (int)e.X, (int)e.Y, e.Timestamp); eyex.Start(); }
protected EventSingleton() { //Setting up and starting the timers updateTimer.Interval = UPDATEINTERVAL; updateTimer.Start(); drawTimer.Interval = DRAWINTERVAL; drawTimer.Start(); //keyboardHook = new Hook("Global Action Hook"); //systemHook = new Hook(); fixationEvent += EventSingleton_fixationEvent; //Instantiating and starting the eye tracker host eyex = new EyeXHost(); eyex.CreateFixationDataStream(FixationDataMode.Sensitive).Next += (s, e) => fixationEvent(CreateFixation(e.EventType, (int)e.X, (int)e.Y)); eyex.Start(); }
public static void Capture() { try { // Initializes Eyetracker. EyeXHost eyeXHost = new EyeXHost(); eyeXHost.Start(); FixationDataStream lightlyFilteredGazeDataStream = eyeXHost.CreateFixationDataStream(FixationDataMode.Slow); lightlyFilteredGazeDataStream.Next += lightlyFilteredGazeDataStream_Next; _eyeXhost = eyeXHost; // Timer which checks URL every 0.1s Timer t = new Timer(); t.Interval = 100; t.Elapsed += CheckUrl; // Starting URL. CHANGE IT HERE IF NEEDED. string url = "http://panache.fr/"; //string url = "http://www.fmf.uni-lj.si/si/"; //string url = "http://stackoverflow.com/questions/21555394/how-to-create-bitmap-from-byte-array"; /* Creates new visible ChromeDriver, maximizes the browser window, navigates to the starting URL * and starts the "check url" timer. CHANGE THE PATH TO THE DRIVER HERE IF NEEDED. */ driver = new ChromeDriver("D:\\IJS\\EyeTracker"); driver.Manage().Window.Maximize(); driver.Navigate().GoToUrl(url); t.Start(); lastUrl = url; Console.ReadKey(); } finally { // Dispose the driver if the above code fails. if (driver != null) { driver.Dispose(); } } }
public void StartEyeTracking() { using (var eyeXHost = new EyeXHost()) { // Create a data stream: lightly filtered gaze point data. // Other choices of data streams include EyePositionDataStream and FixationDataStream. using (var fixationGazeDataStream = eyeXHost.CreateFixationDataStream(FixationDataMode.Slow)) { // Start the EyeX host. eyeXHost.Start(); eyeXHost.EyeTrackingDeviceStatusChanged += EyeXHost_EyeTrackingDeviceStatusChanged; eyeXHost.UserPresenceChanged += EyeXHost_UserPresenceChanged; //eyeXHost.CreateEyePositionDataStream double smoothX = 0; double smoothY = 0; double box = 35; // Write the data to the console. fixationGazeDataStream.Next += (s, e) => { if (e.X > smoothX + box || e.X < smoothX - box || e.Y > smoothY + box || e.Y < smoothY - box) { Cursor.Position = new Point(Convert.ToInt32(e.X), Convert.ToInt32(e.Y)); smoothX = e.X; smoothY = e.Y; } else { Cursor.Position = new Point(Convert.ToInt32(smoothX), Convert.ToInt32(smoothY)); } }; while (cantStopDontStop) { Thread.Sleep(1000); } } } }