void InitSettings() { // instantiate the eye tracking controller eyeTrackingController = new EyeTrackingController.EyeTrackingController(); // define callbacks for data stream sampleCallback = new GetSampleCallback(GetSampleCallbackFunction); eyeTrackingController.iV_SetSampleCallback(sampleCallback); backgroundWorker = new BackgroundWorker(); backgroundWorker.WorkerReportsProgress = true; backgroundWorker.DoWork += new DoWorkEventHandler(CheckEyeTrackerStatus); backgroundWorker.ProgressChanged += new ProgressChangedEventHandler(StartEyeTracker); backgroundWorker.RunWorkerAsync(); Utils.AddDirectorySecurity(configPath, Environment.UserDomainName + "\\" + Environment.UserName, System.Security.AccessControl.FileSystemRights.FullControl, System.Security.AccessControl.AccessControlType.Allow); experimentLogHandler = new ConfigDataXMLSerializer <ExperimentLog>(configPath + settingsFile); experimentLogHandler.GetConfigs(out experimentLogs); if (experimentLogs == null) { experimentLogs = new ExperimentLog(0); } LoadImages(); pictureBox.Location = new Point(this.Width / 2 - pictureBox.Width / 2, this.Height / 2 - (int)(pictureBox.Height * (float)(0.8))); flowLayoutPanel2.Location = new Point(this.Width / 2 - flowLayoutPanel2.Width / 2, this.Height - flowLayoutPanel2.Height - panel1.Height * 3); panel1.Location = new Point(this.Width / 2 - panel1.Width / 2, this.Height - panel1.Height * 4); groupBox1.Location = new Point(this.Width / 2 - groupBox1.Width / 2, (this.Height - groupBox1.Height) / 2); groupBox1.Visible = false; }
private void SmiSample_Load(object sender, EventArgs e) { ETDevice = new EyeTrackingController.EyeTrackingController(); m_CalibrationCallback = new CalibrationCallback(CalibrationCallbackFunction); m_SampleCallback = new GetSampleCallback(GetSampleCallbackFunction); m_EventCallback = new GetEventCallback(GetEventCallbackFunction); m_EyeImageCallback = new GetEyeImageCallback(GetEyeImageCallbackFunction); m_SceneVideoCallback = new GetSceneVideoCallback(GetSceneVideoCallbackFunction); m_TrackingMonitorCallback = new GetTrackingMonitorCallback(GetTrackingMonitorCallbackFunction); }
private void Form1_Load(object sender, EventArgs e) { //Schritt 06: ETDevice = new EyeTrackingController.EyeTrackingController(); //Schritt 07: m_SampleCallback = new GetSampleCallback(GetSampleCallbackFunction); //Schritt 08: mit EyeTracker verbinden ETDevice.iV_ConnectLocal(); }
public GazeController() { //Init the Controller ETDevice = new EyeTrackingController.EyeTrackingController(); //Init the Callbacks calibrationCallback = new CalibrationCallback(getCalibration); sampleDataCallback = new GetSampleCallback(getSampleData); eyeImageCallback = new GetEyeImagedata(getEyeImagedata); trackingMonitorCallback = new GetTrackingMonitor(getTrackingMonitordata); setCallbacks(); ETDevice.iV_EnableGazeDataFilter(); }
private void Window_Initialized(object sender, EventArgs e) { try { ETDevice = new EyeTrackingController.EyeTrackingController(); m_CalibrationWindow = new CalibrationSample(); m_CalibrationCallback = new CalibrationCallback(CalibrationCallbackFunction); m_SampleCallback = new GetSampleCallback(GetSampleCallbackFunction); m_EventCallback = new GetEventCallback(GetEventCallbackFunction); } catch (System.Exception exc) { logger4.Text = "Exception: " + exc.Message; } }
void getTrackingMonitordata(EyeTrackingController.EyeTrackingController.ImageStruct imageData) { if (gazeModel.isRunning) { gazeModel.widthTrackingImage = imageData.imageWidth; gazeModel.heightTrackingImage = imageData.imageHeight; byte[] imageBuffer = new byte[imageData.imageSize]; Marshal.Copy(imageData.imageBuffer, imageBuffer, 0, imageData.imageSize); Color32[] colorArray = new Color32[imageData.imageSize / 3]; for (int i = 0; i < imageBuffer.Length; i += 3) { Color32 color = new Color32(imageBuffer[i], imageBuffer[i], imageBuffer[i], 255); colorArray[i / 3] = color; } gazeModel.trackingMonitorArray = colorArray; System.GC.Collect(); } }
//Write the Data from the current Sample into the Gazemodel.cs void getSampleData( EyeTrackingController.EyeTrackingController.SampleStruct sampleData) { if (gazeModel.isRunning) { //Set Position of the Eyes gazeModel.posRightEye = new Vector3((float)sampleData.rightEye.eyePositionX, (float)sampleData.rightEye.eyePositionY, (float)sampleData.rightEye.eyePositionZ); gazeModel.posLeftEye = new Vector3((float)sampleData.leftEye.eyePositionX, (float)sampleData.leftEye.eyePositionY, (float)sampleData.leftEye.eyePositionZ); //Set Position of the Head gazeModel.posHead = new Vector3(0.5f * (gazeModel.posLeftEye.x + gazeModel.posRightEye.x), 0.5f * (gazeModel.posLeftEye.y + gazeModel.posRightEye.y), 0.5f * (gazeModel.posLeftEye.z + gazeModel.posRightEye.z)); //TEST: yPosition right? //Set Gaze Left&Right gazeModel.posGazeLeft = new Vector2((float)sampleData.leftEye.gazeX, (float)sampleData.leftEye.gazeY); gazeModel.posGazeRight = new Vector2((float)sampleData.rightEye.gazeX, (float)sampleData.rightEye.gazeY); //Set Diameter of the Eyes gazeModel.diameter_leftEye = (float)sampleData.leftEye.diam; gazeModel.diameter_rightEye = (float)sampleData.rightEye.diam; } }
void getEyeImagedata(EyeTrackingController.EyeTrackingController.ImageStruct imageData) { if (gazeModel.isRunning) { //TODO: CONVERT MONO TO RGB gazeModel.widthEyeImage = imageData.imageWidth; gazeModel.heightEyeImage = imageData.imageHeight; int colorchannels = 3; byte[] imageBuffer = new byte[imageData.imageSize]; Marshal.Copy(imageData.imageBuffer, imageBuffer, 0, imageData.imageSize); Color32[] colorArray = new Color32[imageData.imageSize]; for (int i = 0; i < imageBuffer.Length; i += colorchannels) { Color32 color = new Color32(imageBuffer[i], imageBuffer[i + 1], imageBuffer[i + 2], 255); colorArray[i / colorchannels] = color; } gazeModel.eyeImageColorArray = colorArray; System.GC.Collect(); } }
void getCalibration(EyeTrackingController.EyeTrackingController.CalibrationPointStruct calibrationPoints) { }
private void Window_Activated(object sender, EventArgs e) { ETDevice = new EyeTrackingController.EyeTrackingController(); }