예제 #1
0
        public void BinocularDetectionTest()
        {
            Settings.Instance.Processing.TrackingMode = TrackingModeEnum.Binocular;
            GazeTrackingLibrary.Detection.Eye.Eyetracker et = new GazeTrackingLibrary.Detection.Eye.Eyetracker();
            Assert.IsTrue(et.IsReady);
            List <double> rightEyeProportionalErrors = new List <double>();
            List <double> leftEyeProportionalErrors  = new List <double>();
            long          countEyesNotFound          = 0;
            long          countNoEyeCentreLandmarks  = 0;
            long          countEyesFoundCorrectly    = 0;
            long          countEyesFoundIncorrectly  = 0;
            long          imageCounter = 0;

            AppendedMediaSequences allMs = datasetsOfInterest();

            foreach (IMediaSequence ms in allMs.getMediaSequences())
            {
                foreach (IMediaFrame mf in ms.getMediaFrames())
                {
                    Image img = mf.getImage();
                    imageCounter = imageCounter + 1;
                    ILabelledFeature eyeR = mf.getLabelledFeature(DataSetEnums.FeatureName.RIGHT_EYE_CENTRE);
                    ILabelledFeature eyeL = mf.getLabelledFeature(DataSetEnums.FeatureName.LEFT_EYE_CENTRE);
                    if ((eyeR != null) && (eyeL != null))
                    {
                        Point  rightEyeCentre = eyeR.getPath().getPoints()[0];
                        Point  leftEyeCentre  = eyeL.getPath().getPoints()[0];
                        Bitmap bmp            = new Bitmap(img);

                        Image <Gray, byte> gray      = new Image <Gray, byte>(bmp);
                        TrackData          trackData = new TrackData();
                        bool eyesFound = et.DetectEyes(gray, trackData);
                        if (eyesFound)
                        {
                            // check more things about the location of the eyes that were found
                            // Record the detection information alongside the MediaFrame so that a little later we can compare the distance that the
                            // landmark eye centre is from the centre of the detected region, and the corresponding distance between the eyes for each sample
                            if (trackData.RightROI.Contains(leftEyeCentre) && trackData.LeftROI.Contains(rightEyeCentre))
                            {
                                countEyesFoundCorrectly++;
                                renderEyeDetection(imageCounter, bmp, gray, trackData, SearchResult.FOUND_OK);
                            }
                            else
                            {
                                countEyesFoundIncorrectly++;
                                renderEyeDetection(imageCounter, bmp, gray, trackData, SearchResult.FOUND_WRONG);
                            }

                            // Create a measure for the error
                            Point roiCentreLeftEye  = new Point((trackData.RightROI.Right + trackData.RightROI.Left) / 2, (trackData.RightROI.Top + trackData.RightROI.Bottom) / 2);
                            Point roiCentreRightEye = new Point((trackData.LeftROI.Right + trackData.LeftROI.Left) / 2, (trackData.LeftROI.Top + trackData.LeftROI.Bottom) / 2);
                            roiCentreRightEye.Offset(-rightEyeCentre.X, -rightEyeCentre.Y);
                            roiCentreLeftEye.Offset(-leftEyeCentre.X, -leftEyeCentre.Y);
                            rightEyeCentre.Offset(-leftEyeCentre.X, -leftEyeCentre.Y);
                            double radialProportionRight = radialDistance(roiCentreRightEye) / radialDistance(rightEyeCentre);
                            double radialProportionLeft  = radialDistance(roiCentreLeftEye) / radialDistance(rightEyeCentre);
                            rightEyeProportionalErrors.Add(radialProportionRight);
                            leftEyeProportionalErrors.Add(radialProportionLeft);
                        }
                        else
                        {
                            countEyesNotFound++;
                            renderEyeDetection(imageCounter, bmp, gray, trackData, SearchResult.NOT_FOUND);
                        }
                        bmp.Dispose();
                        gray.Dispose();
                    }
                    else
                    {
                        countNoEyeCentreLandmarks++;
                    }
                    img.Dispose();
                }
            }

            double reMean   = calculateMean(rightEyeProportionalErrors);
            double leMean   = calculateMean(leftEyeProportionalErrors);
            double reStdDev = calculateStdDev(rightEyeProportionalErrors);
            double leStdDev = calculateStdDev(leftEyeProportionalErrors);

            // For combined POLY_U NIR FACE Database & PUT Face Database
            Assert.True(reMean < 0.15D);
            Assert.True(leMean < 0.15D);
            Assert.True(( double )countEyesFoundCorrectly / (( double )countEyesFoundCorrectly + ( double )countEyesFoundIncorrectly + ( double )countEyesNotFound) > 0.6);
            Assert.True(( double )countEyesFoundCorrectly / (( double )countEyesFoundCorrectly + ( double )countEyesFoundIncorrectly) > 0.74);
            Assert.AreEqual(615, countNoEyeCentreLandmarks);

            // For POLY_U NIR FACE Database
            // Only 24.3% success at the moment (POLYU NIR FACE)
            Assert.True(reMean < 0.15D);
            Assert.True(leMean < 0.15D);
            Assert.True(( double )countEyesFoundCorrectly / (( double )countEyesFoundCorrectly + ( double )countEyesFoundIncorrectly + ( double )countEyesNotFound) > 0.2);
            Assert.True(( double )countEyesFoundCorrectly / (( double )countEyesFoundCorrectly + ( double )countEyesFoundIncorrectly) > 0.85);
            Assert.AreEqual(0, countNoEyeCentreLandmarks);

            // For PUT Face Database
            Assert.True(reMean < 0.15D);                     // actual is only slightly under this
            Assert.True(leMean < 0.14D);                     // actual is only slightly under this
            Assert.True(countEyesFoundCorrectly >= 6014);    // We get 6014 as baseline
            Assert.True(countEyesFoundIncorrectly <= 2048);  // We get 2048 as baseline
            Assert.True(countEyesNotFound <= 1199);          // We get 1199 as baseline
            Assert.True(countNoEyeCentreLandmarks <= 615);   // We get 615as baseline
            Assert.AreEqual(615, countNoEyeCentreLandmarks); // Should not suddenly be getting more landmarks!
        }
예제 #2
0
        // This is the main image feature detection chain

        public bool ProcessImage(Image <Gray, byte> input, TrackData trackData)
        {
            counter++;
            //Log.Performance.Now.IsEnabled = false;

            featuresLeftFound  = false;
            featuresRightFound = false;

            #region Face detection

            #endregion

            #region Eyes region tracking (binocular)

            //// If binocular -> Track (head), (eye region), pupil, (glints)
            //if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
            //{
            //    if (Settings.Instance.Processing.TrackingEyes && eyestracker.IsReady)
            //    {
            //        if (doEyes && CameraControl.Instance.UsingUC480 == true && CameraControl.Instance.IsROISet == false)
            //        {
            //            if (eyestracker.DetectEyes(input, trackData))
            //            {
            //               doEyes = false; // found both eyes
            //               CameraControl.Instance.ROI = trackData.EyesROI;
            //               TrackDB.Instance.Data.Clear();
            //               doEyes = false;
            //               doEye = true;
            //               return false;
            //            }
            //        }
            //    }
            //}

            #endregion

            #region Eye region tracking

            if (Settings.Instance.Processing.TrackingEye && doEye)
            {
                // Eye feature detector ready when haar cascade xml file loaded
                if (eyetracker.IsReady)
                {
                    if (eyetracker.DetectEyes(input, trackData))// will set left/right roi
                    {
                        missCounter = 0;
                        doEye       = false;
                    }
                    else
                    {
                        // No eye/eys found
                        doEye = true;
                        missCounter++;

                        if (GTHardware.Camera.Instance.Device.IsSupportingROI && missCounter > GTHardware.Camera.Instance.Device.FPS / 3)
                        {
                            GTHardware.Camera.Instance.Device.ClearROI();
                        }

                        return(false);
                    }
                }
            }

            #endregion

            #region Left eye

            // Set sub-roi, if eye feature detection was performed do nothing otherwise use values from previous frame
            ApplyEstimatedEyeROI(EyeEnum.Left, trackData, input.Size);
            inputLeftEye = input.Copy(trackData.LeftROI);

            // Detect pupil
            if (pupilDetectionLeft.DetectPupil(inputLeftEye, trackData))
            {
                trackData.PupilDataLeft = pupilDetectionLeft.PupilData;

                // Detect glint(s)
                if (Settings.Instance.Processing.TrackingGlints)
                {
                    if (glintDetectionLeft.DetectGlints(inputLeftEye, pupilDetectionLeft.PupilData.Center))
                    {
                        trackData.GlintDataLeft = ConvertGlintsToAbsolute(glintDetectionLeft.GlintData, trackData.LeftROI);
                        featuresLeftFound       = true;
                    }
                }
                else
                {
                    featuresLeftFound = true;
                }

                // Convert values from subROI to whole absolute image space (ex. from 70x70 to 1280x1024)
                trackData.PupilDataLeft = ConvertPupilToAbsolute(EyeEnum.Left, pupilDetectionLeft.PupilData, trackData);
            }

            #endregion

            #region Right eye

            if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
            {
                ApplyEstimatedEyeROI(EyeEnum.Right, trackData, input.Size);
                inputRightEye = input.Copy(trackData.RightROI);

                // Detect pupil
                if (pupilDetectionRight.DetectPupil(inputRightEye, trackData))
                {
                    trackData.PupilDataRight = pupilDetectionRight.PupilData;

                    // Detect glint(s)
                    if (Settings.Instance.Processing.TrackingGlints)
                    {
                        if (glintDetectionRight.DetectGlints(inputRightEye, pupilDetectionRight.PupilData.Center))
                        {
                            trackData.GlintDataRight = ConvertGlintsToAbsolute(glintDetectionRight.GlintData, trackData.RightROI);
                            featuresRightFound       = true;
                        }
                    }
                    else
                    {
                        featuresRightFound = true;
                    }

                    trackData.PupilDataRight = ConvertPupilToAbsolute(EyeEnum.Right, pupilDetectionRight.PupilData, trackData);
                }
            }

            #endregion

            #region ROI mode / state / update

            #region Monocular

            if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Monocular)
            {
                if (!featuresLeftFound)
                {
                    if (Settings.Instance.Processing.TrackingEye)
                    {
                        doEye = true;
                        if (GTHardware.Camera.Instance.Device.IsSettingROI && GTHardware.Camera.Instance.Device.IsROISet == false)
                        {
                            GTHardware.Camera.Instance.Device.ClearROI();
                        }
                    }
                    else
                    {
                        trackData.LeftROI = new Rectangle(new Point(0, 0), new Size(0, 0));
                    }
                }
                else
                {
                    trackData.LeftROI = SetROI(input.Size, trackData.PupilDataLeft.Center, trackData.PupilDataLeft.Diameter);
                    doEye             = false;

                    // If using special camera, set roi and adjust EyeROIs
                    if (GTHardware.Camera.Instance.Device.IsSupportingROI)
                    {
                        if (GTHardware.Camera.Instance.Device.IsROISet == false)
                        {
                            CameraSetROI(trackData);
                        }
                        else
                        {
                            CenterROIOnPupil(trackData, EyeEnum.Left, input.Size);

                            // Re-center sub-ROIs, enuse that eyes stays within by margins
                            if (GTHardware.Camera.Instance.Device.IsSupportingROI && GTHardware.Camera.Instance.Device.IsROISet)
                            {
                                CameraCenterROI(trackData, input.Size);
                            }
                        }
                    }
                }
            }

            #endregion

            #region Binocular

            if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
            {
                // Nothing found, run eye classifier on next frame
                if (!featuresLeftFound || !featuresRightFound)
                {
                    if (Settings.Instance.Processing.TrackingEye)
                    {
                        doEye = true;
                    }
                    else
                    {
                        trackData.LeftROI  = new Rectangle(new Point(0, 0), new Size(0, 0));
                        trackData.RightROI = new Rectangle(new Point(0, 0), new Size(0, 0));
                    }
                }
                else
                {
                    trackData.LeftROI  = SetROI(input.Size, trackData.PupilDataLeft.Center, trackData.PupilDataLeft.Diameter);
                    trackData.RightROI = SetROI(input.Size, trackData.PupilDataRight.Center, trackData.PupilDataRight.Diameter);

                    doEye = false;

                    // If using special camera, set roi and adjust EyeROIs
                    if (GTHardware.Camera.Instance.Device.IsSupportingROI)
                    {
                        if (GTHardware.Camera.Instance.Device.IsROISet == false)
                        {
                            CameraSetROI(trackData);
                        }
                        else
                        {
                            CenterROIOnPupil(trackData, EyeEnum.Left, input.Size);
                            CenterROIOnPupil(trackData, EyeEnum.Right, input.Size);

                            // Re-center sub-ROIs, enuse that eyes stays within by margins
                            if (GTHardware.Camera.Instance.Device.IsSupportingROI && GTHardware.Camera.Instance.Device.IsROISet)
                            {
                                CameraCenterROI(trackData, input.Size);
                            }
                        }
                    }
                }
            }

            #endregion

            #endregion

            //Performance.Now.Stamp("Processing all done");

            if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
            {
                return(featuresRightFound);
            }
            else
            {
                return(featuresLeftFound);
            }
        }