public void AddSample(TrackData t) { if (db.Count > logSize) db.Dequeue(); db.Enqueue(t); if (t.LeftROI.Y == t.RightROI.Y && t.LeftROI.X == t.RightROI.X) if (t.LeftROI.Y != 0 && t.RightROI.X != 0) Console.Out.WriteLine("Same.."); //Log.Performance.Now.Stamp("TrackDB Add"); }
public void AddSample(TrackData t) { if (db.Count > logSize) { db.Dequeue(); } db.Enqueue(t); if (t.LeftROI.Y == t.RightROI.Y && t.LeftROI.X == t.RightROI.X) { if (t.LeftROI.Y != 0 && t.RightROI.X != 0) { Console.Out.WriteLine("Same.."); } } //Log.Performance.Now.Stamp("TrackDB Add"); }
public void Tune() { TrackData lastSample = TrackDB.Instance.GetLastSample(); if (lastSample == null) { return; } if (GTSettings.Current.Processing.TrackingEye && GTSettings.Current.Processing.AutoEye) { TuneEye(lastSample); } if (GTSettings.Current.Processing.TrackingPupil && GTSettings.Current.Processing.AutoPupil) { TunePupil(lastSample); } if (GTSettings.Current.Processing.TrackingGlints && GTSettings.Current.Processing.AutoGlint) { TuneGlint(lastSample); } }
public TrackData Copy() { // Use shallow copy instead? var t = new TrackData { calibrationDataLeft = calibrationDataLeft, calibrationDataRight = calibrationDataRight, eyeDetected = eyeDetected, eyeMovement = eyeMovement, eyesDetected = eyesDetected, cameraROI = cameraROI, eyesROI = eyesROI, frameNumber = frameNumber, glintDataLeft = glintDataLeft, glintDataRight = glintDataRight, glintsLeftDetected = glintsLeftDetected, glintsRightDetected = glintsRightDetected, leftROI = leftROI, pupilDataLeft = pupilDataLeft, pupilDataRight = pupilDataRight, pupilLeftDetected = pupilLeftDetected, pupilRightDetected = pupilRightDetected, rightROI = rightROI, timeStamp = timeStamp, unfilteredBlobCountLeft = unfilteredBlobCountLeft, unfilteredBlobCountRight = unfilteredBlobCountRight, unfilteredTotalBlobAreaLeft = unfilteredTotalBlobAreaLeft, unfilteredTotalBloblAreaRight = unfilteredTotalBloblAreaRight, gazeDataRaw = gazeDataRaw, gazeDataSmoothed = gazeDataSmoothed, processingOk = processingOk }; return(t); }
private void TuneGlint(TrackData lastSample) { if (lastSample.GlintDataLeft == null && lastSample.GlintDataRight == null) { return; } if (lastSample.PupilLeftDetected == false || lastSample.PupilRightDetected == false) { return; } if (lastSample.GlintDataLeft != null) { if (lastSample.GlintDataLeft.Glints == null && lastSample.GlintDataRight == null) { return; } } #region Reset on too many misses //if (lastSample.GlintsLeftDetected == false) //{ // missCountGlintLeft++; // GTSettings.Current.Processing.GlintThresholdLeft--; // if (missCountGlintLeft > CameraControl.Instance.FPS/5) // { // GTSettings.Current.Processing.GlintThreshold = GTSettings.Current.Processing.GlintThreshold; // //reset to default (for both) // missCountGlintLeft = 0; // } //} //else //{ // missCountGlintLeft = 0; //} //if (lastSample.GlintsRightDetected == false) //{ // missCountGlintRight++; // GTSettings.Current.Processing.GlintThresholdRight--; // if (missCountGlintRight > CameraControl.Instance.FPS/5) // { // GTSettings.Current.Processing.GlintThreshold = GTSettings.Current.Processing.GlintThreshold; // //default // missCountGlintRight = 0; // } //} //else //{ // missCountGlintRight = 0; //} #endregion //int lCount = lastSample.GlintDataLeft.Glints.UnfilteredCount; if (lastSample.GlintDataLeft != null) { if (lastSample.GlintDataLeft.Glints != null) { double lArea = lastSample.GlintDataLeft.Glints.UnfilteredTotalArea; double rArea = lastSample.GlintDataRight.Glints.UnfilteredTotalArea; // No glints, drop threshold if (lastSample.GlintDataLeft.Glints.Count == 0) { GTSettings.Current.Processing.GlintThresholdLeft -= 3; } if (lastSample.GlintDataRight.Glints.Count == 0) { GTSettings.Current.Processing.GlintThresholdRight -= 3; } // Area too big (compare with pupil) // Left if (lastSample.PupilLeftDetected && lastSample.PupilDataLeft.Blob.Area > 10) { if (GTSettings.Current.Processing.GlintThresholdLeft < 230) // max { if (lArea > lastSample.PupilDataLeft.Blob.Area * 4) { GTSettings.Current.Processing.GlintThresholdLeft += 3; } else if (lArea > lastSample.PupilDataLeft.Blob.Area * 3) { GTSettings.Current.Processing.GlintThresholdLeft += 1; } } } //Right if (lastSample.PupilRightDetected && lastSample.PupilDataRight.Blob.Area > 10) { if (GTSettings.Current.Processing.GlintThresholdRight < 230) { if (rArea > lastSample.PupilDataRight.Blob.Area * 4) { GTSettings.Current.Processing.GlintThresholdRight += 3; } else if (rArea > lastSample.PupilDataRight.Blob.Area * 3) { GTSettings.Current.Processing.GlintThresholdRight += 1; } } } } } //Console.Out.WriteLine("L#: " + lCount + " lA: " + lArea + " R#:" + rCount + " rA: " + rArea); }
public TrackData Copy() { // Use shallow copy instead? var t = new TrackData { calibrationDataLeft = calibrationDataLeft, calibrationDataRight = calibrationDataRight, eyeDetected = eyeDetected, eyeMovement = eyeMovement, eyesDetected = eyesDetected, cameraROI = cameraROI, eyesROI = eyesROI, frameNumber = frameNumber, glintDataLeft = glintDataLeft, glintDataRight = glintDataRight, glintsLeftDetected = glintsLeftDetected, glintsRightDetected = glintsRightDetected, leftROI = leftROI, pupilDataLeft = pupilDataLeft, pupilDataRight = pupilDataRight, pupilLeftDetected = pupilLeftDetected, pupilRightDetected = pupilRightDetected, rightROI = rightROI, timeStamp = timeStamp, unfilteredBlobCountLeft = unfilteredBlobCountLeft, unfilteredBlobCountRight = unfilteredBlobCountRight, unfilteredTotalBlobAreaLeft = unfilteredTotalBlobAreaLeft, unfilteredTotalBloblAreaRight = unfilteredTotalBloblAreaRight, gazeDataRaw = gazeDataRaw, gazeDataSmoothed = gazeDataSmoothed, processingOk = processingOk }; return t; }
private void SaveCalibInfo(TrackData td) { CalibrationTarget ct = calibration.CalibMethod.GetTarget(calibration.InstanceTargetNumber); ct.pupilCentersLeft.Add(td.PupilDataLeft.Center); ct.pupilCentersRight.Add(td.PupilDataRight.Center); if (Settings.Instance.Processing.TrackingGlints) { ct.glintsLeft.Add(td.GlintDataLeft.Glints); ct.glintsRight.Add(td.GlintDataRight.Glints); } // Important: Only print if really needed for debugging, you'll only receive 1/10 of the samples.. /* foreach (CalibrationTarget ctOutput in this.calibration.CalibrationTargets) { foreach (GTPoint pLeft in ctOutput.pupilCentersLeft) Console.Out.WriteLine("Target " + calibration.InstanceTargetNumber + " PupilCenterLeft:" + pLeft.X + " " + pLeft.Y); foreach (GTPoint pRight in ctOutput.pupilCentersRight) Console.Out.WriteLine("Target " + calibration.InstanceTargetNumber + " PupilCenterRight:" + pRight.X + " " + pRight.Y); } */ Performance.Now.Stamp("SaveCalibInfo"); }
private static void TuneEye(TrackData lastSample) { // To be implemented }
public void Visualize(TrackData trackData, Image <Gray, byte> image) { gray = image; Visualize(trackData); }
/// <summary> /// The main event handler that is called whenever the camera capture class /// has an new eye video image available. /// Starts to process the frame trying to find pupil and glints. /// </summary> /// <param name="newVideoFrame">The <see cref="Emgu.CV.Image{Emgu.CV.Structure.Bgr, byte}"/> /// image with the new frame.</param> private void Device_OnImage(object sender, GTHardware.Cameras.ImageEventArgs e) { imgCounter++; processingDone = false; bool processingOk; Performance.Now.IsEnabled = false; Performance.Now.Start(); // Stop output by setting IsEnabled = false or Stop() // TrackData object stores all information on pupil centers, glints etc. trackData = new TrackData(); trackData.TimeStamp = DateTime.UtcNow.Ticks / TimeSpan.TicksPerMillisecond; trackData.FrameNumber = imgCounter; // Keep reference to image in local variable Image <Gray, byte> gray = e.Image; // Flip image here, directshow flipping is not supported by every device if (Settings.Instance.Camera.FlipImage) { gray = gray.Flip(FLIP.VERTICAL); } // Tracking disabled, if visible just set gray image in visulization and return if (Settings.Instance.Visualization.VideoMode == VideoModeEnum.RawNoTracking) { Performance.Now.Stop(); if (Settings.Instance.Visualization.IsDrawing) { visualization.Gray = gray; visualization.TrackData = trackData; // not sure if there is anything to visualize here.. CalculateFPS(); RaiseFrameProcessingCompletedEvent(true); } return; } try { // Process image, find features, main entry point to processing chain trackData.ProcessingOk = detectionManager.ProcessImage(gray, trackData); if (trackData.ProcessingOk) { if (calibration.CalibMethod.IsCalibrated) { CalculateGazeCoordinates(trackData); if (Settings.Instance.FileSettings.LoggingEnabled) { logGaze.LogData(trackData); } } else { if (isCalibrating) { SaveCalibInfo(trackData); } // Really log uncalibrated data? For pupil size? //if (Settings.Instance.FileSettings.LoggingEnabled) // logGaze.LogData(trackData); } } else { if (Settings.Instance.FileSettings.LoggingEnabled) { logGaze.LogData(trackData); } } } catch (Exception) { trackData.ProcessingOk = false; } // ********************************************************************* MODIFICATO *********************************************************************** // Sends values via the UDP server directly if (server.SendSmoothedData) { // server.SendGazeData(gazeDataSmoothed.GazePositionX, gazeDataSmoothed.GazePositionY, // trackData.PupilDataLeft.Diameter); //else // Send avg. value server.SendGazeData(gazeDataRaw.GazePositionX, gazeDataRaw.GazePositionY, trackData.PupilDataLeft.Diameter); } //server.SendTrackData(trackData); Autotune.Instance.Tune(); // Set data for visualization if (Settings.Instance.Visualization.IsDrawing && isCalibrating == false) { // Drawn on-demand by calling GetProcessed or GetOriginalImage visualization.Gray = gray.Copy(); visualization.TrackData = trackData; } // Recenter camera ROI detectionManager.CameraCenterROI(trackData, gray.Size); // Store camera roi position trackData.CameraROI = GTHardware.Camera.Instance.ROI; // Add sample to database TrackDB.Instance.AddSample(trackData.Copy()); // Calculate the frames per second we're tracking at CalculateFPS(); // Stop performance timer Performance.Now.Stop(); // Raise FrameCaptureComplete event (UI listens for updating video stream) RaiseFrameProcessingCompletedEvent(trackData.ProcessingOk); }
private void CalculateGazeCoordinates(TrackData td) { GTPoint gazedCoordinatesLeft; GTPoint gazedCoordinatesRight = new GTPoint(); GTPoint smoothedCoordinates; #region Monocular/Left eye calibration.PupilCenterLeft = trackData.PupilDataLeft.Center; if (GTSettings.Current.Processing.TrackingGlints) { calibration.GlintConfigLeft = td.GlintDataLeft.Glints; } gazedCoordinatesLeft = calibration.GetGazeCoordinates(td, EyeEnum.Left); #endregion #region Binocular/Right eye if (GTSettings.Current.Processing.TrackingMode == TrackingModeEnum.Binocular) { calibration.PupilCenterRight = td.PupilDataRight.Center; if (GTSettings.Current.Processing.TrackingGlints) { calibration.GlintConfigRight = td.GlintDataRight.Glints; } gazedCoordinatesRight = calibration.GetGazeCoordinates(td, EyeEnum.Right); } #endregion #region Smoothing/Eye movement state if (GTSettings.Current.Processing.EyeMouseSmooth) { var p = new GTPoint(gazedCoordinatesLeft.X, gazedCoordinatesLeft.Y); if (GTSettings.Current.Processing.TrackingMode == TrackingModeEnum.Binocular) { if (gazedCoordinatesRight.Y != 0 && gazedCoordinatesRight.X != 0) { p.X += gazedCoordinatesRight.X; p.Y += gazedCoordinatesRight.Y; p.X = p.X / 2; p.Y = p.Y / 2; } } this.eyeMovement.CalculateEyeMovement(p); if (this.eyeMovement.EyeMovementState == Classifier.EyeMovementStateEnum.Fixation) { smoothedCoordinates = exponentialSmoother.Smooth(p); } else { smoothedCoordinates = p; this.exponentialSmoother.Stop(); } trackData.EyeMovement = this.eyeMovement.EyeMovementState; gazeDataSmoothed.Set(smoothedCoordinates.X, smoothedCoordinates.Y, smoothedCoordinates.X, smoothedCoordinates.Y); } #endregion #region Set values, raise events // trigger OnGazeData events gazeDataRaw.Set(gazedCoordinatesLeft.X, gazedCoordinatesLeft.Y, gazedCoordinatesRight.X, gazedCoordinatesRight.Y); // Sends values via the UDP server directly if (server.IsStreamingGazeData) { if (server.SendSmoothedData) { server.SendGazeData(gazeDataSmoothed.GazePositionX, gazeDataSmoothed.GazePositionY); } else { // Send avg. value server.SendGazeData(gazeDataRaw.GazePositionX, gazeDataRaw.GazePositionY); } } trackData.GazeDataRaw = gazeDataRaw; trackData.GazeDataSmoothed = gazeDataSmoothed; #endregion }
/// <summary> /// The main event handler that is called whenever the camera capture class /// has an new eye video image available. /// Starts to process the frame trying to find pupil and glints. /// </summary> /// <param name="newVideoFrame">The <see cref="Emgu.CV.Image{Emgu.CV.Structure.Bgr, byte}"/> /// image with the new frame.</param> private void Camera_FrameCaptureComplete(Image <Gray, byte> newVideoFrame) { imgCounter++; processingDone = false; bool processingOk; Performance.Now.IsEnabled = false; Performance.Now.Start(); // Stop output by setting IsEnabled = false or Stop() // TrackData object stores all information on pupil centers, glints etc. trackData = new TrackData(); trackData.TimeStamp = DateTime.UtcNow.Ticks / TimeSpan.TicksPerMillisecond; trackData.FrameNumber = imgCounter; // Flip image here, directshow flipping is not supported by every device if (GTSettings.Current.Camera.FlipImage) { newVideoFrame = newVideoFrame.Flip(FLIP.VERTICAL); } // Set the original gray frame for visualization if (isCalibrating == false) { visualization.Gray = newVideoFrame; } // Calculate the frames per second we're tracking at CalculateFPS(); if (GTSettings.Current.Visualization.VideoMode == VideoModeEnum.RawNoTracking) { Performance.Now.Stop(); RaiseFrameProcessingCompletedEvent(true); return; } try { // Process image, find features, main entry point to processing chain processingOk = detectionManager.ProcessImage(newVideoFrame, trackData); if (processingOk) { if (calibration.calibMethod.IsCalibrated) { CalculateGazeCoordinates(trackData); if (GTSettings.Current.FileSettings.LoggingEnabled) { logGaze.LogData(trackData); } } else { if (isCalibrating) { SaveCalibInfo(trackData); } if (GTSettings.Current.FileSettings.LoggingEnabled) { logGaze.LogData(trackData); } } } else { if (GTSettings.Current.FileSettings.LoggingEnabled) { logGaze.LogData(trackData); } } } catch (Exception) { processingOk = false; } // Add sample to database TrackDB.Instance.AddSample(trackData.Copy()); Autotune.Instance.Tune(); // Update visualization when features have been detected if (isCalibrating == false) { visualization.Visualize(trackData); } // Stop performance timer and calculate FPS Performance.Now.Stop(); // Raise FrameCaptureComplete event (UI listens for updating video stream) RaiseFrameProcessingCompletedEvent(processingOk); }
public void Visualize(TrackData trackData) { if (gray == null) return; if (Settings.Instance.Visualization.VideoMode == VideoModeEnum.RawNoTracking) return; // no further actions #region Paint processed if (Settings.Instance.Visualization.VideoMode == VideoModeEnum.Processed) { processed = gray.Convert<Bgr, byte>(); width = processed.Width; height = processed.Height; #region Draw threshold pupil if (Settings.Instance.Visualization.DrawPupil) { // Left if (trackData.LeftROI.Y > 0) //roi ThresholdColorizePupil( trackData.LeftROI, Settings.Instance.Processing.PupilThresholdLeft, Settings.Instance.Visualization.PupilThresholdColor); // Right if (trackData.RightROI.Y > 0) //roi ThresholdColorizePupil( trackData.RightROI, Settings.Instance.Processing.PupilThresholdRight, Settings.Instance.Visualization.PupilThresholdColor); if (trackData.LeftROI.Y == 0 && trackData.RightROI.Y == 0) //full image ThresholdColorizePupilFullImage(); } #endregion #region Draw glints glints if (Settings.Instance.Processing.TrackingGlints) { if (trackData.LeftROI.Y > 0) //roi ThresholdColorizeGlints( trackData.LeftROI, Settings.Instance.Processing.GlintThresholdLeft, Settings.Instance.Visualization.GlintThresholdColor); if (trackData.RightROI.Y > 0) //roi ThresholdColorizeGlints(trackData.RightROI, Settings.Instance.Processing.GlintThresholdRight, Settings.Instance.Visualization.GlintThresholdColor); if (trackData.LeftROI.Y == 0 && trackData.RightROI.Y == 0) //full image ThresholdColorizeGlintsFullImage(); } #endregion } #endregion #region Draw roi, pupil, glint crosses etc. // Eye ROI if (Settings.Instance.Visualization.DrawEyesROI && trackData.EyesROI.Width != 0) DrawEyesROI(trackData.EyesROI); if (Settings.Instance.Visualization.DrawEyeROI) { if (trackData.LeftROI.Width != 0) DrawEyeROI(trackData.LeftROI); if (trackData.RightROI.Width != 0) DrawEyeROI(trackData.RightROI); } // Pupil if (Settings.Instance.Visualization.DrawPupil) { if (trackData.PupilDataLeft.Center.X != 0) DrawPupil(trackData.PupilDataLeft.Center.ToPoint(), Settings.Instance.Processing.PupilSizeMaximum*2); if (trackData.PupilDataRight.Center.X != 0) DrawPupil(trackData.PupilDataRight.Center.ToPoint(), Settings.Instance.Processing.PupilSizeMaximum*2); } // Glint if (Settings.Instance.Processing.TrackingGlints) { if (trackData.GlintDataLeft.Glints != null && trackData.GlintDataLeft.Glints.Count != 0 && trackData.GlintDataLeft.Glints.Centers[0].X != 0) DrawGlints(trackData.GlintDataLeft.Glints, Settings.Instance.Processing.GlintSizeMaximum/2); if (trackData.GlintDataRight.Glints != null && trackData.GlintDataRight.Glints.Count != 0 && trackData.GlintDataRight.Glints.Centers[0].X != 0) DrawGlints(trackData.GlintDataRight.Glints, Settings.Instance.Processing.GlintSizeMaximum/2); } #endregion Performance.Now.Stamp("Visualized"); }
public void Visualize(TrackData trackData, Image<Gray, byte> image) { gray = image; Visualize(trackData); }
private void TunePupil(TrackData lastSample) { int cLeft = lastSample.UnfilteredBlobCountLeft; int cRight = lastSample.UnfilteredBlobCountRight; #region Left eye if (lastSample.PupilLeftDetected) { missCountPupilLeft = 0; double meanPupilGray = lastSample.PupilDataLeft.Blob.ColorMean.G; //Only the Green channel is needed double minOuterGray = Math.Min(Math.Min(lastSample.PupilDataLeft.GrayCorners[0], lastSample.PupilDataLeft.GrayCorners[1]), Math.Min(lastSample.PupilDataLeft.GrayCorners[2], lastSample.PupilDataLeft.GrayCorners[3])); Settings.Instance.Processing.PupilThresholdLeft = Math.Max((int) (meanPupilGray + minOuterGray)/2, (int) (meanPupilGray + lastSample.PupilDataLeft.Blob.ColorStdDev.G)); //Max size (tracking iris?) if (lastSample.LeftROI.Width != 0) if (lastSample.PupilDataLeft.Diameter > lastSample.LeftROI.Width*0.8) Settings.Instance.Processing.PupilThresholdLeft -= 2; if (lastSample.PupilDataLeft.Blob.Rectangle.Width != 0) Settings.Instance.Processing.PupilSizeMaximum = Convert.ToInt32(lastSample.PupilDataLeft.Blob.Rectangle.Width*1.2); if (cLeft > 10) // eyebrow? Settings.Instance.Processing.PupilThresholdLeft -= 5; } else { // No blobs detected, increase threshold if (lastSample.LeftROI.Y != 0 && cLeft == 0) Settings.Instance.Processing.PupilThresholdLeft += 2; else if (cLeft > 0 && cLeft < 15 && lastSample.UnfilteredTotalBlobAreaLeft < Math.PI * Math.Pow(Settings.Instance.Processing.PupilSizeMaximum, 2)) Settings.Instance.Processing.PupilThresholdLeft += 5; else if (cLeft > 15 && lastSample.UnfilteredTotalBlobAreaLeft < Math.PI * Math.Pow(Settings.Instance.Processing.PupilSizeMaximum, 2)) Settings.Instance.Processing.PupilThresholdLeft += 10; else if (lastSample.UnfilteredTotalBlobAreaLeft > Math.PI * Math.Pow(Settings.Instance.Processing.PupilSizeMinimum, 2)) if (Settings.Instance.Processing.PupilThresholdLeft > 10) Settings.Instance.Processing.PupilThresholdLeft -= 10; } // Reset when gone to high.. if (Settings.Instance.Processing.PupilThresholdLeft > 100) Settings.Instance.Processing.PupilThresholdLeft = 30; if (debug) Console.WriteLine("Pupil threshold: {0}", Settings.Instance.Processing.PupilThresholdLeft); #endregion #region Right eye if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular) { if (lastSample.PupilRightDetected) { missCountPupilRight = 0; double meanPupilGray = lastSample.PupilDataRight.Blob.ColorMean.G; //Only the Green channel is needed double minOuterGray = Math.Min( Math.Min(lastSample.PupilDataRight.GrayCorners[0], lastSample.PupilDataRight.GrayCorners[1]), Math.Min(lastSample.PupilDataRight.GrayCorners[2], lastSample.PupilDataRight.GrayCorners[3])); Settings.Instance.Processing.PupilThresholdRight = Math.Max((int) (meanPupilGray + minOuterGray)/2, (int) (meanPupilGray + lastSample.PupilDataRight.Blob.ColorStdDev.G)); //Max size (tracking iris?) if (lastSample.RightROI.Width > 0) if (lastSample.PupilDataRight.Diameter > lastSample.RightROI.Width*0.8) Settings.Instance.Processing.PupilThresholdRight -= 2; if (cRight > 10) // too many blobs, eyelid, eyebrow or noise? Settings.Instance.Processing.PupilThresholdRight -= 5; } else { // No blobs detected, increase threshold if (lastSample.RightROI.Y != 0 && cRight == 0) Settings.Instance.Processing.PupilThresholdRight += 2; else if (cRight > 0 && cRight < 15 && lastSample.UnfilteredTotalBlobAreaRight < Math.PI*Math.Pow(Settings.Instance.Processing.PupilSizeMaximum, 2)) Settings.Instance.Processing.PupilThresholdRight += 5; else if (cRight > 15 && lastSample.UnfilteredTotalBlobAreaRight < Math.PI*Math.Pow(Settings.Instance.Processing.PupilSizeMaximum, 2)) Settings.Instance.Processing.PupilThresholdRight += 10; else if (lastSample.UnfilteredTotalBlobAreaRight > Math.PI*Math.Pow(Settings.Instance.Processing.PupilSizeMinimum, 2)) Settings.Instance.Processing.PupilThresholdRight -= 10; } // Reset when gone to high if (Settings.Instance.Processing.PupilThresholdRight > 100) Settings.Instance.Processing.PupilThresholdRight = 30; } //if (cRight < 1 && Settings.Instance.Processing.PupilThresholdLeft < 70) // Settings.Instance.Processing.PupilThresholdRight += 1; //if (cRight == 1 && lastSample.PupilDataRight.Blob.Fullness < 0.65) // Settings.Instance.Processing.PupilThresholdRight += 2; //else if(cRight >= 15 && Settings.Instance.Processing.PupilThresholdRight >= 11) // don't go too low // Settings.Instance.Processing.PupilThresholdRight -= 8; //else if(cRight >= 8 && Settings.Instance.Processing.PupilThresholdRight >= 6) // don't go too low // Settings.Instance.Processing.PupilThresholdRight -= 4; // // check area // if(cRight >=2 && lastSample.UnfilteredTotalBlobAreaRight < Math.PI * Math.Pow(Settings.Instance.Processing.PupilSizeMinimum, 2)) // Settings.Instance.Processing.PupilThresholdRight += 5; #endregion }
private void TunePupil(TrackData lastSample) { int cLeft = lastSample.UnfilteredBlobCountLeft; int cRight = lastSample.UnfilteredBlobCountRight; #region Left eye if (lastSample.PupilLeftDetected) { missCountPupilLeft = 0; double meanPupilGray = lastSample.PupilDataLeft.Blob.ColorMean.G; //Only the Green channel is needed double minOuterGray = Math.Min(Math.Min(lastSample.PupilDataLeft.GrayCorners[0], lastSample.PupilDataLeft.GrayCorners[1]), Math.Min(lastSample.PupilDataLeft.GrayCorners[2], lastSample.PupilDataLeft.GrayCorners[3])); GTSettings.Current.Processing.PupilThresholdLeft = Math.Max((int)(meanPupilGray + minOuterGray) / 2, (int)(meanPupilGray + lastSample.PupilDataLeft.Blob.ColorStdDev.G)); //Max size GTSettings.Current.Processing.PupilSizeMaximum = Convert.ToInt32(lastSample.PupilDataLeft.Blob.Rectangle.Width * 1.3); if (GTSettings.Current.Processing.PupilThresholdLeft > 100) { GTSettings.Current.Processing.PupilThresholdLeft = 30; } } else { // No blobs detected, increase threshold if (lastSample.LeftROI.Y != 0 && cLeft == 0) { GTSettings.Current.Processing.PupilThresholdLeft += 2; } else if (cLeft > 0 && cLeft < 15 && lastSample.UnfilteredTotalBlobAreaLeft < Math.PI * Math.Pow(GTSettings.Current.Processing.PupilSizeMaximum, 2)) { GTSettings.Current.Processing.PupilThresholdLeft += 5; } else if (cLeft > 15 && lastSample.UnfilteredTotalBlobAreaLeft < Math.PI * Math.Pow(GTSettings.Current.Processing.PupilSizeMaximum, 2)) { GTSettings.Current.Processing.PupilThresholdLeft += 10; } else if (lastSample.UnfilteredTotalBlobAreaLeft > Math.PI * Math.Pow(GTSettings.Current.Processing.PupilSizeMinimum, 2)) { if (GTSettings.Current.Processing.PupilThresholdLeft > 10) { GTSettings.Current.Processing.PupilThresholdLeft -= 10; } } } if (debug) { Console.WriteLine("Pupil threshold: {0}", GTSettings.Current.Processing.PupilThresholdLeft); } #endregion #region Right eye if (GTSettings.Current.Processing.TrackingMode == TrackingModeEnum.Binocular) { if (lastSample.PupilRightDetected) { missCountPupilRight = 0; double meanPupilGray = lastSample.PupilDataRight.Blob.ColorMean.G; //Only the Green channel is needed double minOuterGray = Math.Min( Math.Min(lastSample.PupilDataRight.GrayCorners[0], lastSample.PupilDataRight.GrayCorners[1]), Math.Min(lastSample.PupilDataRight.GrayCorners[2], lastSample.PupilDataRight.GrayCorners[3])); GTSettings.Current.Processing.PupilThresholdRight = Math.Max((int)(meanPupilGray + minOuterGray) / 2, (int)(meanPupilGray + lastSample.PupilDataRight.Blob.ColorStdDev.G)); if (GTSettings.Current.Processing.PupilThresholdRight > 100) { GTSettings.Current.Processing.PupilThresholdRight = 40; } } else { // No blobs detected, increase threshold if (lastSample.RightROI.Y != 0 && cRight == 0) { GTSettings.Current.Processing.PupilThresholdRight += 2; } else if (cRight > 0 && cRight < 15 && lastSample.UnfilteredTotalBlobAreaRight < Math.PI * Math.Pow(GTSettings.Current.Processing.PupilSizeMaximum, 2)) { GTSettings.Current.Processing.PupilThresholdRight += 5; } else if (cRight > 15 && lastSample.UnfilteredTotalBlobAreaRight < Math.PI * Math.Pow(GTSettings.Current.Processing.PupilSizeMaximum, 2)) { GTSettings.Current.Processing.PupilThresholdRight += 10; } else if (lastSample.UnfilteredTotalBlobAreaRight > Math.PI * Math.Pow(GTSettings.Current.Processing.PupilSizeMinimum, 2)) { GTSettings.Current.Processing.PupilThresholdRight -= 10; } } } //if (cRight < 1 && GTSettings.Current.Processing.PupilThresholdLeft < 70) // GTSettings.Current.Processing.PupilThresholdRight += 1; //if (cRight == 1 && lastSample.PupilDataRight.Blob.Fullness < 0.65) // GTSettings.Current.Processing.PupilThresholdRight += 2; //else if(cRight >= 15 && GTSettings.Current.Processing.PupilThresholdRight >= 11) // don't go too low // GTSettings.Current.Processing.PupilThresholdRight -= 8; //else if(cRight >= 8 && GTSettings.Current.Processing.PupilThresholdRight >= 6) // don't go too low // GTSettings.Current.Processing.PupilThresholdRight -= 4; // // check area // if(cRight >=2 && lastSample.UnfilteredTotalBlobAreaRight < Math.PI * Math.Pow(GTSettings.Current.Processing.PupilSizeMinimum, 2)) // GTSettings.Current.Processing.PupilThresholdRight += 5; #endregion }
private void CalculateGazeCoordinates(TrackData td) { GTPoint gazedCoordinatesLeft; GTPoint gazedCoordinatesRight = new GTPoint(); GTPoint smoothedCoordinates; #region Monocular/Left eye calibration.PupilCenterLeft = trackData.PupilDataLeft.Center; if (Settings.Instance.Processing.TrackingGlints) calibration.GlintConfigLeft = td.GlintDataLeft.Glints; gazedCoordinatesLeft = calibration.GetGazeCoordinates(td, EyeEnum.Left); #endregion #region Binocular/Right eye if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular) { calibration.PupilCenterRight = td.PupilDataRight.Center; if (Settings.Instance.Processing.TrackingGlints) calibration.GlintConfigRight = td.GlintDataRight.Glints; gazedCoordinatesRight = calibration.GetGazeCoordinates(td, EyeEnum.Right); } #endregion #region Smoothing/Eye movement state if (Settings.Instance.Processing.EyeMouseSmooth) { var p = new GTPoint(gazedCoordinatesLeft.X, gazedCoordinatesLeft.Y); if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular) { if (gazedCoordinatesRight.Y != 0 && gazedCoordinatesRight.X != 0) { p.X += gazedCoordinatesRight.X; p.Y += gazedCoordinatesRight.Y; p.X = p.X / 2; p.Y = p.Y / 2; } } this.eyeMovement.CalculateEyeMovement(p); smoothedCoordinates = exponentialSmoother.Smooth(p); //if (this.eyeMovement.EyeMovementState == Classifier.EyeMovementStateEnum.Fixation) // smoothedCoordinates = exponentialSmoother.Smooth(p); //else //{ // smoothedCoordinates = p; // this.exponentialSmoother.Stop(); //} trackData.EyeMovement = this.eyeMovement.EyeMovementState; gazeDataSmoothed.Set(smoothedCoordinates.X, smoothedCoordinates.Y, smoothedCoordinates.X, smoothedCoordinates.Y); } #endregion #region Set values, raise events // trigger OnGazeData events this.gazeDataRaw.Set( gazedCoordinatesLeft.X, gazedCoordinatesLeft.Y, gazedCoordinatesRight.X, gazedCoordinatesRight.Y); this.trackData.GazeDataRaw = this.gazeDataRaw; this.trackData.GazeDataSmoothed = this.gazeDataSmoothed; // Trigger OnExtendedGazeData events this.gazeDazaExtended.Set( this.trackData.TimeStamp, this.gazeDataRaw.GazePositionX, this.gazeDataRaw.GazePositionY, this.trackData.PupilDataLeft.Diameter, this.trackData.PupilDataRight.Diameter); #endregion }
private void CalculateGazeCoordinates(TrackData td) { GTPoint gazedCoordinatesLeft; GTPoint gazedCoordinatesRight = new GTPoint(); GTPoint smoothedCoordinates; #region Monocular/Left eye calibration.PupilCenterLeft = trackData.PupilDataLeft.Center; if (Settings.Instance.Processing.TrackingGlints) { calibration.GlintConfigLeft = td.GlintDataLeft.Glints; } gazedCoordinatesLeft = calibration.GetGazeCoordinates(td, EyeEnum.Left); #endregion #region Binocular/Right eye if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular) { calibration.PupilCenterRight = td.PupilDataRight.Center; if (Settings.Instance.Processing.TrackingGlints) { calibration.GlintConfigRight = td.GlintDataRight.Glints; } gazedCoordinatesRight = calibration.GetGazeCoordinates(td, EyeEnum.Right); } #endregion #region Smoothing/Eye movement state if (Settings.Instance.Processing.EyeMouseSmooth) { var p = new GTPoint(gazedCoordinatesLeft.X, gazedCoordinatesLeft.Y); if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular) { if (gazedCoordinatesRight.Y != 0 && gazedCoordinatesRight.X != 0) { p.X += gazedCoordinatesRight.X; p.Y += gazedCoordinatesRight.Y; p.X = p.X / 2; p.Y = p.Y / 2; } } this.eyeMovement.CalculateEyeMovement(p); smoothedCoordinates = exponentialSmoother.Smooth(p); //if (this.eyeMovement.EyeMovementState == Classifier.EyeMovementStateEnum.Fixation) // smoothedCoordinates = exponentialSmoother.Smooth(p); //else //{ // smoothedCoordinates = p; // this.exponentialSmoother.Stop(); //} trackData.EyeMovement = this.eyeMovement.EyeMovementState; gazeDataSmoothed.Set(smoothedCoordinates.X, smoothedCoordinates.Y, smoothedCoordinates.X, smoothedCoordinates.Y); } #endregion #region Set values, raise events // trigger OnGazeData events this.gazeDataRaw.Set( gazedCoordinatesLeft.X, gazedCoordinatesLeft.Y, gazedCoordinatesRight.X, gazedCoordinatesRight.Y); this.trackData.GazeDataRaw = this.gazeDataRaw; this.trackData.GazeDataSmoothed = this.gazeDataSmoothed; // Trigger OnExtendedGazeData events this.gazeDazaExtended.Set( this.trackData.TimeStamp, this.gazeDataRaw.GazePositionX, this.gazeDataRaw.GazePositionY, this.trackData.PupilDataLeft.Diameter, this.trackData.PupilDataRight.Diameter); #endregion }
/// <summary> /// The main event handler that is called whenever the camera capture class /// has an new eye video image available. /// Starts to process the frame trying to find pupil and glints. /// </summary> /// <param name="newVideoFrame">The <see cref="Emgu.CV.Image{Emgu.CV.Structure.Bgr, byte}"/> /// image with the new frame.</param> private void Device_OnImage(object sender, GTHardware.Cameras.ImageEventArgs e) { imgCounter++; processingDone = false; bool processingOk; Performance.Now.IsEnabled = false; Performance.Now.Start(); // Stop output by setting IsEnabled = false or Stop() // TrackData object stores all information on pupil centers, glints etc. trackData = new TrackData(); trackData.TimeStamp = DateTime.UtcNow.Ticks / TimeSpan.TicksPerMillisecond; trackData.FrameNumber = imgCounter; // Keep reference to image in local variable Image<Gray, byte> gray = e.Image; // Flip image here, directshow flipping is not supported by every device if (Settings.Instance.Camera.FlipImage) gray = gray.Flip(FLIP.VERTICAL); // Tracking disabled, if visible just set gray image in visulization and return if (Settings.Instance.Visualization.VideoMode == VideoModeEnum.RawNoTracking) { Performance.Now.Stop(); if (Settings.Instance.Visualization.IsDrawing) { visualization.Gray = gray; visualization.TrackData = trackData; // not sure if there is anything to visualize here.. CalculateFPS(); RaiseFrameProcessingCompletedEvent(true); } return; } try { // Process image, find features, main entry point to processing chain trackData.ProcessingOk = detectionManager.ProcessImage(gray, trackData); if (trackData.ProcessingOk) { if (calibration.CalibMethod.IsCalibrated) { CalculateGazeCoordinates(trackData); if (Settings.Instance.FileSettings.LoggingEnabled) logGaze.LogData(trackData); } else { if (isCalibrating) SaveCalibInfo(trackData); // Really log uncalibrated data? For pupil size? //if (Settings.Instance.FileSettings.LoggingEnabled) // logGaze.LogData(trackData); } } else { if (Settings.Instance.FileSettings.LoggingEnabled) logGaze.LogData(trackData); } } catch (Exception) { trackData.ProcessingOk = false; } // ********************************************************************* MODIFICATO *********************************************************************** // Sends values via the UDP server directly if (server.SendSmoothedData) // server.SendGazeData(gazeDataSmoothed.GazePositionX, gazeDataSmoothed.GazePositionY, // trackData.PupilDataLeft.Diameter); //else // Send avg. value server.SendGazeData(gazeDataRaw.GazePositionX, gazeDataRaw.GazePositionY, trackData.PupilDataLeft.Diameter); //server.SendTrackData(trackData); Autotune.Instance.Tune(); // Set data for visualization if (Settings.Instance.Visualization.IsDrawing && isCalibrating == false) { // Drawn on-demand by calling GetProcessed or GetOriginalImage visualization.Gray = gray.Copy(); visualization.TrackData = trackData; } // Recenter camera ROI detectionManager.CameraCenterROI(trackData, gray.Size); // Store camera roi position trackData.CameraROI = GTHardware.Camera.Instance.ROI; // Add sample to database TrackDB.Instance.AddSample(trackData.Copy()); // Calculate the frames per second we're tracking at CalculateFPS(); // Stop performance timer Performance.Now.Stop(); // Raise FrameCaptureComplete event (UI listens for updating video stream) RaiseFrameProcessingCompletedEvent(trackData.ProcessingOk); }
public void Visualize(TrackData trackData) { if (gray == null) { return; } if (Settings.Instance.Visualization.VideoMode == VideoModeEnum.RawNoTracking) { return; // no further actions } #region Paint processed if (Settings.Instance.Visualization.VideoMode == VideoModeEnum.Processed) { processed = gray.Convert <Bgr, byte>(); width = processed.Width; height = processed.Height; #region Draw threshold pupil if (Settings.Instance.Visualization.DrawPupil) { // Left if (trackData.LeftROI.Y > 0) //roi { ThresholdColorizePupil( trackData.LeftROI, Settings.Instance.Processing.PupilThresholdLeft, Settings.Instance.Visualization.PupilThresholdColor); } // Right if (trackData.RightROI.Y > 0) //roi { ThresholdColorizePupil( trackData.RightROI, Settings.Instance.Processing.PupilThresholdRight, Settings.Instance.Visualization.PupilThresholdColor); } if (trackData.LeftROI.Y == 0 && trackData.RightROI.Y == 0) //full image { ThresholdColorizePupilFullImage(); } } #endregion #region Draw glints glints if (Settings.Instance.Processing.TrackingGlints) { if (trackData.LeftROI.Y > 0) //roi { ThresholdColorizeGlints( trackData.LeftROI, Settings.Instance.Processing.GlintThresholdLeft, Settings.Instance.Visualization.GlintThresholdColor); } if (trackData.RightROI.Y > 0) //roi { ThresholdColorizeGlints(trackData.RightROI, Settings.Instance.Processing.GlintThresholdRight, Settings.Instance.Visualization.GlintThresholdColor); } if (trackData.LeftROI.Y == 0 && trackData.RightROI.Y == 0) //full image { ThresholdColorizeGlintsFullImage(); } } #endregion } #endregion #region Draw roi, pupil, glint crosses etc. // Eye ROI if (Settings.Instance.Visualization.DrawEyesROI && trackData.EyesROI.Width != 0) { DrawEyesROI(trackData.EyesROI); } if (Settings.Instance.Visualization.DrawEyeROI) { if (trackData.LeftROI.Width != 0) { DrawEyeROI(trackData.LeftROI); } if (trackData.RightROI.Width != 0) { DrawEyeROI(trackData.RightROI); } } // Pupil if (Settings.Instance.Visualization.DrawPupil) { if (trackData.PupilDataLeft.Center.X != 0) { DrawPupil(trackData.PupilDataLeft.Center.ToPoint(), Settings.Instance.Processing.PupilSizeMaximum * 2); } if (trackData.PupilDataRight.Center.X != 0) { DrawPupil(trackData.PupilDataRight.Center.ToPoint(), Settings.Instance.Processing.PupilSizeMaximum * 2); } } // Glint if (Settings.Instance.Processing.TrackingGlints) { if (trackData.GlintDataLeft.Glints != null && trackData.GlintDataLeft.Glints.Count != 0 && trackData.GlintDataLeft.Glints.Centers[0].X != 0) { DrawGlints(trackData.GlintDataLeft.Glints, Settings.Instance.Processing.GlintSizeMaximum / 2); } if (trackData.GlintDataRight.Glints != null && trackData.GlintDataRight.Glints.Count != 0 && trackData.GlintDataRight.Glints.Centers[0].X != 0) { DrawGlints(trackData.GlintDataRight.Glints, Settings.Instance.Processing.GlintSizeMaximum / 2); } } #endregion Performance.Now.Stamp("Visualized"); }
private void TuneGlint(TrackData lastSample) { if (lastSample.GlintDataLeft == null && lastSample.GlintDataRight == null) return; if (lastSample.PupilLeftDetected == false || lastSample.PupilRightDetected == false) return; if (lastSample.GlintDataLeft != null) if (lastSample.GlintDataLeft.Glints == null && lastSample.GlintDataRight == null) return; #region Reset on too many misses //if (lastSample.GlintsLeftDetected == false) //{ // missCountGlintLeft++; // Settings.Instance.Processing.GlintThresholdLeft--; // if (missCountGlintLeft > CameraControl.Instance.FPS/5) // { // Settings.Instance.Processing.GlintThreshold = Settings.Instance.Processing.GlintThreshold; // //reset to default (for both) // missCountGlintLeft = 0; // } //} //else //{ // missCountGlintLeft = 0; //} //if (lastSample.GlintsRightDetected == false) //{ // missCountGlintRight++; // Settings.Instance.Processing.GlintThresholdRight--; // if (missCountGlintRight > CameraControl.Instance.FPS/5) // { // Settings.Instance.Processing.GlintThreshold = Settings.Instance.Processing.GlintThreshold; // //default // missCountGlintRight = 0; // } //} //else //{ // missCountGlintRight = 0; //} #endregion //int lCount = lastSample.GlintDataLeft.Glints.UnfilteredCount; if (lastSample.GlintDataLeft != null) { if (lastSample.GlintDataLeft.Glints != null) { double lArea = lastSample.GlintDataLeft.Glints.UnfilteredTotalArea; double rArea = lastSample.GlintDataRight.Glints.UnfilteredTotalArea; // No glints, drop threshold if (lastSample.GlintDataLeft.Glints.Count == 0) Settings.Instance.Processing.GlintThresholdLeft -= 3; if (lastSample.GlintDataRight.Glints.Count == 0) Settings.Instance.Processing.GlintThresholdRight -= 3; // Area too big (compare with pupil) // Left if (lastSample.PupilLeftDetected && lastSample.PupilDataLeft.Blob.Area > 10) { if (Settings.Instance.Processing.GlintThresholdLeft < 230) // max { if (lArea > lastSample.PupilDataLeft.Blob.Area*4) Settings.Instance.Processing.GlintThresholdLeft += 3; else if (lArea > lastSample.PupilDataLeft.Blob.Area*3) Settings.Instance.Processing.GlintThresholdLeft += 1; } } //Right if (lastSample.PupilRightDetected && lastSample.PupilDataRight.Blob.Area > 10) { if (Settings.Instance.Processing.GlintThresholdRight < 230) { if (rArea > lastSample.PupilDataRight.Blob.Area*4) Settings.Instance.Processing.GlintThresholdRight += 3; else if (rArea > lastSample.PupilDataRight.Blob.Area*3) Settings.Instance.Processing.GlintThresholdRight += 1; } } } } //Console.Out.WriteLine("L#: " + lCount + " lA: " + lArea + " R#:" + rCount + " rA: " + rArea); }