コード例 #1
0
        public bool ProcessImage(Image <Gray, byte> input, TrackData trackData)
        {
            counter++;
            //Log.Performance.Now.IsEnabled = false;

            featuresLeftFound  = false;
            featuresRightFound = false;

            #region Face detection

            #endregion

            #region Eyes region tracking (binocular)

            //// If binocular -> Track (head), (eye region), pupil, (glints)
            //if (aSee.Settings.Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
            //{
            //    if (aSee.Settings.Settings.Instance.Processing.TrackingEyes && eyestracker.IsReady)
            //    {
            //        if (doEyes && CameraControl.Instance.UsingUC480 == true && CameraControl.Instance.IsROISet == false)
            //        {
            //            if (eyestracker.DetectEyes(input, trackData))
            //            {
            //               doEyes = false; // found both eyes
            //               CameraControl.Instance.ROI = trackData.EyesROI;
            //               TrackDB.Instance.Data.Clear();
            //               doEyes = false;
            //               doEye = true;
            //               return false;
            //            }
            //        }
            //    }
            //}

            #endregion

            #region Eye region tracking

            if (aSee.Settings.Settings.Instance.Processing.TrackingEye && doEye)
            {
                // Eye feature detector ready when haar cascade xml file loaded
                if (eyetracker.IsReady)
                {
                    if (eyetracker.DetectEyes(input, trackData))// will set left/right roi
                    {
                        missCounter = 0;
                        doEye       = false;
                    }
                    else
                    {
                        // No eye/eys found
                        doEye = true;
                        missCounter++;

                        if (aSee.Devices.Camera.Instance.Device.IsSupportingROI && missCounter > aSee.Devices.Camera.Instance.Device.FPS / 3)
                        {
                            aSee.Devices.Camera.Instance.Device.ClearROI();
                        }

                        return(false);
                    }
                }
            }

            #endregion

            #region Left eye

            string strPath;
            strPath = String.Format("..\\pLROI{0:d}.jpg", trackData.FrameNumber);

            // Set sub-roi, if eye feature detection was performed do nothing otherwise use values from previous frame
            ApplyEstimatedEyeROI(EyeEnum.Left, trackData, input.Size);

            inputLeftEye = input.Copy(trackData.LeftROI);

            // Detect pupil
            Image <Gray, byte> RLGray = input.Copy();
            RLGray.Draw(trackData.LeftROI, new Gray(255), 1);

            strPath = String.Format("..\\pLEye{0:d}.jpg", trackData.FrameNumber);
            // inputLeftEye.Save(strPath);


            //是否带眼镜
            Threshold(inputLeftEye);
            if (!aSee.Settings.Settings.Instance.Processing.IsWearGlasses)
            {
                int ostuImage             = GetMinimumThreshold();
                int newPupilThresholdLeft = ostuImage + aSee.Settings.Settings.Instance.Processing.AutoAddPupilThresholdLeft;
                aSee.Settings.Settings.Instance.Processing.PupilThresholdLeft = newPupilThresholdLeft;

                strPath = String.Format("..\\pLThreshold{0:d}.jpg", trackData.FrameNumber);

                //  inputLeftEye.ThresholdToZeroInv(new Gray(aSee.Settings.Settings.Instance.Processing.PupilThresholdLeft)).Save(strPath);
            }
            else
            {
                /////带眼镜时阈值处理方法
                strPath = String.Format("..\\{0:d}Get1DMaxEntropyThreshold.jpg", trackData.FrameNumber);
                int ostuImage = Get1DMaxEntropyThreshold();
                //  inputLeftEye.ThresholdToZeroInv(new Gray(ostuImage)).Save(strPath);
                int newPupilThresholdLeft = ostuImage + aSee.Settings.Settings.Instance.Processing.AutoAddPupilThresholdLeft;

                aSee.Settings.Settings.Instance.Processing.PupilThresholdLeft = newPupilThresholdLeft;
            }



            if (pupilDetectionLeft.DetectPupil(inputLeftEye, trackData))
            {
                trackData.PupilDataLeft = pupilDetectionLeft.PupilData;

                // Detect glint(s)
                if (aSee.Settings.Settings.Instance.Processing.TrackingGlints)
                {
                    if (glintDetectionLeft.DetectGlints(inputLeftEye, pupilDetectionLeft.PupilData.Center))
                    {
                        trackData.GlintDataLeft = ConvertGlintsToAbsolute(glintDetectionLeft.GlintData, trackData.LeftROI);
                        featuresLeftFound       = true;

                        PointF gf = new PointF();
                        gf.X = (float)trackData.GlintDataLeft.Glints.Centers[0].X;
                        gf.Y = (float)trackData.GlintDataLeft.Glints.Centers[0].Y;
                        RLGray.Draw(new CircleF(gf, 40), new Gray(255), 1);


                        gf.X = (float)trackData.GlintDataLeft.Glints.Centers[1].X;
                        gf.Y = (float)trackData.GlintDataLeft.Glints.Centers[1].Y;
                        RLGray.Draw(new CircleF(gf, 40), new Gray(255), 2);
                    }
                    else
                    {
                        trackData.GlintDataLeft = ConvertGlintsToAbsolute(glintDetectionLeft.GlintData, trackData.LeftROI);
                    }
                }
                else
                {
                    featuresLeftFound = true;
                }

                //    aSeeLogger.Logger.Instance.Debug(String.Format("{0:d}", DateTime.Now.Millisecond - start.Millisecond));

                // Convert values from subROI to whole absolute image space (ex. from 70x70 to 1280x1024)
                trackData.PupilDataLeft = ConvertPupilToAbsolute(EyeEnum.Left, pupilDetectionLeft.PupilData, trackData);

                PointF pf = new PointF();
                pf.X = (float)trackData.PupilDataLeft.Center.X;
                pf.Y = (float)trackData.PupilDataLeft.Center.Y;

                RLGray.Draw(new CircleF(pf, 20), new Gray(255), 2);

                strPath = String.Format("..\\pLR{0:d}.jpg", trackData.FrameNumber);

                // RLGray.Save(strPath);
            }

            #endregion

            #region Right eye

            if (aSee.Settings.Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
            {
                ApplyEstimatedEyeROI(EyeEnum.Right, trackData, input.Size);

                inputRightEye = input.Copy(trackData.RightROI);

                //  Image<Gray, byte> rGray = input.Copy();

                RLGray.Draw(trackData.RightROI, new Gray(255), 1);

                strPath = String.Format("..\\pREye{0:d}.jpg", trackData.FrameNumber);

                // inputRightEye.Save(strPath);

                // Detect pupil
                //改变瞳孔阈值
                Threshold(inputRightEye);
                if (!aSee.Settings.Settings.Instance.Processing.IsWearGlasses)
                {
                    int ostuImage = GetMinimumThreshold();
                    int newPupilThresholdRight = ostuImage + aSee.Settings.Settings.Instance.Processing.AutoAddPupilThresholdRight;

                    aSee.Settings.Settings.Instance.Processing.PupilThresholdRight = newPupilThresholdRight;

                    strPath = String.Format("..\\pRThreshold{0:d}.jpg", trackData.FrameNumber);
                    //   inputRightEye.ThresholdToZeroInv(new Gray(aSee.Settings.Settings.Instance.Processing.PupilThresholdRight)).Save(strPath);
                }
                else
                {
                    //带眼镜时阈值处理方法
                    strPath = String.Format("..\\{0:d}Get1DMaxEntropyThreshold.jpg", trackData.FrameNumber);
                    int ostuImage = Get1DMaxEntropyThreshold();
                    //inputRightEye.ThresholdToZeroInv(new Gray(ostuImage)).Save(strPath);
                    int newPupilThresholdRight = ostuImage + aSee.Settings.Settings.Instance.Processing.AutoAddPupilThresholdRight;

                    aSee.Settings.Settings.Instance.Processing.PupilThresholdRight = newPupilThresholdRight;
                }



                if (pupilDetectionRight.DetectPupil(inputRightEye, trackData))
                {
                    trackData.PupilDataRight = pupilDetectionRight.PupilData;


                    // Detect glint(s)
                    if (aSee.Settings.Settings.Instance.Processing.TrackingGlints)
                    {
                        if (glintDetectionRight.DetectGlints(inputRightEye, pupilDetectionRight.PupilData.Center))
                        {
                            trackData.GlintDataRight = ConvertGlintsToAbsolute(glintDetectionRight.GlintData, trackData.RightROI);


                            PointF gf = new PointF();
                            gf.X = (float)trackData.GlintDataRight.Glints.Centers[0].X;
                            gf.Y = (float)trackData.GlintDataRight.Glints.Centers[0].Y;
                            RLGray.Draw(new CircleF(gf, 40), new Gray(255), 1);


                            gf.X = (float)trackData.GlintDataRight.Glints.Centers[1].X;
                            gf.Y = (float)trackData.GlintDataRight.Glints.Centers[1].Y;
                            RLGray.Draw(new CircleF(gf, 40), new Gray(255), 2);

                            featuresRightFound = true;
                        }
                        else
                        {
                            trackData.GlintDataRight = ConvertGlintsToAbsolute(glintDetectionRight.GlintData, trackData.RightROI);
                        }
                    }
                    else
                    {
                        featuresRightFound = true;
                    }

                    trackData.PupilDataRight = ConvertPupilToAbsolute(EyeEnum.Right, pupilDetectionRight.PupilData, trackData);

                    PointF pf = new PointF();
                    pf.X = (float)trackData.PupilDataRight.Center.X;
                    pf.Y = (float)trackData.PupilDataRight.Center.Y;
                    RLGray.Draw(new CircleF(pf, 20), new Gray(255), 4);

                    strPath = String.Format("..\\pLR{0:d}.jpg", trackData.FrameNumber);

                    //  RLGray.Save(strPath);
                }
            }

            //  if (!doEye)
            //       RLGray.Save(strPath);

            #endregion

            #region ROI mode / state / update

            #region Monocular

            if (aSee.Settings.Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Monocular)
            {
                if (!featuresLeftFound)
                {
                    if (aSee.Settings.Settings.Instance.Processing.TrackingEye)
                    {
                        doEye = true;
                        if (aSee.Devices.Camera.Instance.Device.IsSettingROI && aSee.Devices.Camera.Instance.Device.IsROISet == false)
                        {
                            aSee.Devices.Camera.Instance.Device.ClearROI();
                        }
                    }
                    else
                    {
                        trackData.LeftROI = new Rectangle(new Point(0, 0), new Size(0, 0));
                    }
                }
                else
                {
                    trackData.LeftROI = SetROI(input.Size, trackData.PupilDataLeft.Center, trackData.PupilDataLeft.Diameter);
                    doEye             = false;

                    // If using special camera, set roi and adjust EyeROIs
                    if (aSee.Devices.Camera.Instance.Device.IsSupportingROI)
                    {
                        if (aSee.Devices.Camera.Instance.Device.IsROISet == false)
                        {
                            CameraSetROI(trackData);
                        }
                        else
                        {
                            CenterROIOnPupil(trackData, EyeEnum.Left, input.Size);

                            // Re-center sub-ROIs, enuse that eyes stays within by margins
                            if (aSee.Devices.Camera.Instance.Device.IsSupportingROI && aSee.Devices.Camera.Instance.Device.IsROISet)
                            {
                                CameraCenterROI(trackData, input.Size);
                            }
                        }
                    }
                }
            }

            #endregion

            #region Binocular

            if (aSee.Settings.Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
            {
                // Nothing found, run eye classifier on next frame
                if (!featuresLeftFound || !featuresRightFound || trackData.LeftROI.IntersectsWith(trackData.RightROI))
                {
                    if (aSee.Settings.Settings.Instance.Processing.TrackingEye)
                    {
                        doEye = true;
                    }
                    else
                    {
                        trackData.LeftROI  = new Rectangle(new Point(0, 0), new Size(0, 0));
                        trackData.RightROI = new Rectangle(new Point(0, 0), new Size(0, 0));
                    }
                }
                else
                {
                    trackData.LeftROI  = SetROI(input.Size, trackData.PupilDataLeft.Center, trackData.PupilDataLeft.Diameter);
                    trackData.RightROI = SetROI(input.Size, trackData.PupilDataRight.Center, trackData.PupilDataRight.Diameter);

                    Image <Gray, byte> trackROIGray = input.Copy();
                    trackROIGray.Draw(trackData.LeftROI, new Gray(255), 1);
                    trackROIGray.Draw(trackData.RightROI, new Gray(255), 1);

                    strPath = String.Format("..\\trackROIGray{0:d}.jpg", trackData.FrameNumber);

                    //  trackROIGray.Save(strPath);



                    doEye = false;

                    // If using special camera, set roi and adjust EyeROIs
                    if (aSee.Devices.Camera.Instance.Device.IsSupportingROI)
                    {
                        if (aSee.Devices.Camera.Instance.Device.IsROISet == false)
                        {
                            CameraSetROI(trackData);
                        }
                        else
                        {
                            CenterROIOnPupil(trackData, EyeEnum.Left, input.Size);
                            CenterROIOnPupil(trackData, EyeEnum.Right, input.Size);

                            // Re-center sub-ROIs, enuse that eyes stays within by margins
                            if (aSee.Devices.Camera.Instance.Device.IsSupportingROI && aSee.Devices.Camera.Instance.Device.IsROISet)
                            {
                                CameraCenterROI(trackData, input.Size);
                            }
                        }
                    }
                }
            }

            #endregion

            #endregion

            //Performance.Now.Stamp("Processing all done");

            if (aSee.Settings.Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
            {
                if (featuresRightFound)
                {
                    if (trackData.PupilRightDetected && trackData.GlintsRightDetected &&
                        trackData.GlintDataRight.Glints.Count ==
                        aSee.Settings.Settings.Instance.Processing.NumberOfGlints

                        && trackData.PupilLeftDetected && trackData.GlintsLeftDetected &&
                        trackData.GlintDataLeft.Glints.Count ==
                        aSee.Settings.Settings.Instance.Processing.NumberOfGlints)
                    {
                        return(true);
                    }
                    else
                    {
                        return(false);
                    }
                }
                else
                {
                    return(false);
                }
            }
            else
            {
                if (featuresLeftFound)
                {
                    if (trackData.PupilLeftDetected && trackData.GlintsLeftDetected &&
                        trackData.GlintDataLeft.Glints.Count ==
                        aSee.Settings.Settings.Instance.Processing.NumberOfGlints)
                    {
                        return(true);
                    }
                    else
                    {
                        return(false);
                    }
                }
                else
                {
                    return(false);
                }
            }
        }
コード例 #2
0
ファイル: DetectionManager.cs プロジェクト: HE-Arc/ErgoOgama
        // This is the main image feature detection chain

        public bool ProcessImage(Image <Gray, byte> input, TrackData trackData)
        {
            counter++;
            //Log.Performance.Now.IsEnabled = false;

            featuresLeftFound  = false;
            featuresRightFound = false;

            #region Face detection

            #endregion

            #region Eyes region tracking (binocular)

            //// If binocular -> Track (head), (eye region), pupil, (glints)
            //if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
            //{
            //    if (Settings.Instance.Processing.TrackingEyes && eyestracker.IsReady)
            //    {
            //        if (doEyes && CameraControl.Instance.UsingUC480 == true && CameraControl.Instance.IsROISet == false)
            //        {
            //            if (eyestracker.DetectEyes(input, trackData))
            //            {
            //               doEyes = false; // found both eyes
            //               CameraControl.Instance.ROI = trackData.EyesROI;
            //               TrackDB.Instance.Data.Clear();
            //               doEyes = false;
            //               doEye = true;
            //               return false;
            //            }
            //        }
            //    }
            //}

            #endregion

            #region Eye region tracking

            if (Settings.Instance.Processing.TrackingEye && doEye)
            {
                // Eye feature detector ready when haar cascade xml file loaded
                if (eyetracker.IsReady)
                {
                    if (eyetracker.DetectEyes(input, trackData))// will set left/right roi
                    {
                        missCounter = 0;
                        doEye       = false;
                    }
                    else
                    {
                        // No eye/eys found
                        doEye = true;
                        missCounter++;

                        if (GTHardware.Camera.Instance.Device.IsSupportingROI && missCounter > GTHardware.Camera.Instance.Device.FPS / 3)
                        {
                            GTHardware.Camera.Instance.Device.ClearROI();
                        }

                        return(false);
                    }
                }
            }

            #endregion

            #region Left eye

            // Set sub-roi, if eye feature detection was performed do nothing otherwise use values from previous frame
            ApplyEstimatedEyeROI(EyeEnum.Left, trackData, input.Size);
            inputLeftEye = input.Copy(trackData.LeftROI);

            // Detect pupil
            if (pupilDetectionLeft.DetectPupil(inputLeftEye, trackData))
            {
                trackData.PupilDataLeft = pupilDetectionLeft.PupilData;

                // Detect glint(s)
                if (Settings.Instance.Processing.TrackingGlints)
                {
                    if (glintDetectionLeft.DetectGlints(inputLeftEye, pupilDetectionLeft.PupilData.Center))
                    {
                        trackData.GlintDataLeft = ConvertGlintsToAbsolute(glintDetectionLeft.GlintData, trackData.LeftROI);
                        featuresLeftFound       = true;
                    }
                }
                else
                {
                    featuresLeftFound = true;
                }

                // Convert values from subROI to whole absolute image space (ex. from 70x70 to 1280x1024)
                trackData.PupilDataLeft = ConvertPupilToAbsolute(EyeEnum.Left, pupilDetectionLeft.PupilData, trackData);
            }

            #endregion

            #region Right eye

            if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
            {
                ApplyEstimatedEyeROI(EyeEnum.Right, trackData, input.Size);
                inputRightEye = input.Copy(trackData.RightROI);

                // Detect pupil
                if (pupilDetectionRight.DetectPupil(inputRightEye, trackData))
                {
                    trackData.PupilDataRight = pupilDetectionRight.PupilData;

                    // Detect glint(s)
                    if (Settings.Instance.Processing.TrackingGlints)
                    {
                        if (glintDetectionRight.DetectGlints(inputRightEye, pupilDetectionRight.PupilData.Center))
                        {
                            trackData.GlintDataRight = ConvertGlintsToAbsolute(glintDetectionRight.GlintData, trackData.RightROI);
                            featuresRightFound       = true;
                        }
                    }
                    else
                    {
                        featuresRightFound = true;
                    }

                    trackData.PupilDataRight = ConvertPupilToAbsolute(EyeEnum.Right, pupilDetectionRight.PupilData, trackData);
                }
            }

            #endregion

            #region ROI mode / state / update

            #region Monocular

            if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Monocular)
            {
                if (!featuresLeftFound)
                {
                    if (Settings.Instance.Processing.TrackingEye)
                    {
                        doEye = true;
                        if (GTHardware.Camera.Instance.Device.IsSettingROI && GTHardware.Camera.Instance.Device.IsROISet == false)
                        {
                            GTHardware.Camera.Instance.Device.ClearROI();
                        }
                    }
                    else
                    {
                        trackData.LeftROI = new Rectangle(new Point(0, 0), new Size(0, 0));
                    }
                }
                else
                {
                    trackData.LeftROI = SetROI(input.Size, trackData.PupilDataLeft.Center, trackData.PupilDataLeft.Diameter);
                    doEye             = false;

                    // If using special camera, set roi and adjust EyeROIs
                    if (GTHardware.Camera.Instance.Device.IsSupportingROI)
                    {
                        if (GTHardware.Camera.Instance.Device.IsROISet == false)
                        {
                            CameraSetROI(trackData);
                        }
                        else
                        {
                            CenterROIOnPupil(trackData, EyeEnum.Left, input.Size);

                            // Re-center sub-ROIs, enuse that eyes stays within by margins
                            if (GTHardware.Camera.Instance.Device.IsSupportingROI && GTHardware.Camera.Instance.Device.IsROISet)
                            {
                                CameraCenterROI(trackData, input.Size);
                            }
                        }
                    }
                }
            }

            #endregion

            #region Binocular

            if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
            {
                // Nothing found, run eye classifier on next frame
                if (!featuresLeftFound || !featuresRightFound)
                {
                    if (Settings.Instance.Processing.TrackingEye)
                    {
                        doEye = true;
                    }
                    else
                    {
                        trackData.LeftROI  = new Rectangle(new Point(0, 0), new Size(0, 0));
                        trackData.RightROI = new Rectangle(new Point(0, 0), new Size(0, 0));
                    }
                }
                else
                {
                    trackData.LeftROI  = SetROI(input.Size, trackData.PupilDataLeft.Center, trackData.PupilDataLeft.Diameter);
                    trackData.RightROI = SetROI(input.Size, trackData.PupilDataRight.Center, trackData.PupilDataRight.Diameter);

                    doEye = false;

                    // If using special camera, set roi and adjust EyeROIs
                    if (GTHardware.Camera.Instance.Device.IsSupportingROI)
                    {
                        if (GTHardware.Camera.Instance.Device.IsROISet == false)
                        {
                            CameraSetROI(trackData);
                        }
                        else
                        {
                            CenterROIOnPupil(trackData, EyeEnum.Left, input.Size);
                            CenterROIOnPupil(trackData, EyeEnum.Right, input.Size);

                            // Re-center sub-ROIs, enuse that eyes stays within by margins
                            if (GTHardware.Camera.Instance.Device.IsSupportingROI && GTHardware.Camera.Instance.Device.IsROISet)
                            {
                                CameraCenterROI(trackData, input.Size);
                            }
                        }
                    }
                }
            }

            #endregion

            #endregion

            //Performance.Now.Stamp("Processing all done");

            if (Settings.Instance.Processing.TrackingMode == TrackingModeEnum.Binocular)
            {
                return(featuresRightFound);
            }
            else
            {
                return(featuresLeftFound);
            }
        }
コード例 #3
0
        public bool ProcessImage(Image <Gray, byte> input, TrackData trackData)
        {
            counter++;
            //Log.Performance.Now.IsEnabled = false;

            featuresLeftFound  = false;
            featuresRightFound = false;

            #region Eyes region tracking (binocular)

            //// If binocular -> Track (head), (eye region), pupil, (glints)
            //if (GTSettings.Current.Processing.TrackingMode == TrackingModeEnum.Binocular)
            //{
            //    if (GTSettings.Current.Processing.TrackingEyes && eyestracker.IsReady)
            //    {
            //        if (doEyes && CameraControl.Instance.UsingUC480 == true && CameraControl.Instance.IsROISet == false)
            //        {
            //            if (eyestracker.DetectEyes(input, trackData))
            //            {
            //               doEyes = false; // found both eyes
            //               CameraControl.Instance.ROI = trackData.EyesROI;
            //               TrackDB.Instance.Data.Clear();
            //               doEyes = false;
            //               doEye = true;
            //               return false;
            //            }
            //        }
            //    }
            //}

            #endregion

            #region Eye region tracking

            if (GTSettings.Current.Processing.TrackingEye && doEye)
            {
                // Eye feature detector ready when haar cascade xml file loaded
                if (eyetracker.IsReady)
                {
                    if (eyetracker.DetectEyes(input, trackData)) // will set left/right roi
                    {
                        doEye = false;
                    }
                    else
                    {
                        // No eye/eys found
                        doEye = true;
                        return(false);
                    }
                }
            }

            #endregion

            #region Left eye

            // Set sub-roi, if eye feature detection was performed do nothing otherwise use values from previous frame
            ApplyEstimatedEyeROI(EyeEnum.Left, trackData, input.Size);
            inputLeftEye = input.Copy(trackData.LeftROI);

            // Detect pupil
            if (pupilDetectionLeft.DetectPupil(inputLeftEye, trackData))
            {
                trackData.PupilDataLeft = pupilDetectionLeft.PupilData;

                // Detect glint(s)
                if (GTSettings.Current.Processing.TrackingGlints)
                {
                    if (glintDetectionLeft.DetectGlints(inputLeftEye, pupilDetectionLeft.PupilData.Center))
                    {
                        trackData.GlintDataLeft = ConvertGlintsToAbsolute(glintDetectionLeft.GlintData, trackData.LeftROI);
                        featuresLeftFound       = true;
                    }
                }
                else
                {
                    featuresLeftFound = true;
                }

                // Convert values from subROI to whole absolute image space (ex. from 70x70 to 1280x1024)
                trackData.PupilDataLeft = ConvertPupilToAbsolute(EyeEnum.Left, pupilDetectionLeft.PupilData, trackData);
            }


            #endregion

            #region Right eye

            if (GTSettings.Current.Processing.TrackingMode == TrackingModeEnum.Binocular)
            {
                ApplyEstimatedEyeROI(EyeEnum.Right, trackData, input.Size);
                inputRightEye = input.Copy(trackData.RightROI);

                // Detect pupil
                if (pupilDetectionRight.DetectPupil(inputRightEye, trackData))
                {
                    trackData.PupilDataRight = pupilDetectionRight.PupilData;

                    // Detect glint(s)
                    if (GTSettings.Current.Processing.TrackingGlints)
                    {
                        if (glintDetectionRight.DetectGlints(inputRightEye, pupilDetectionRight.PupilData.Center))
                        {
                            trackData.GlintDataRight = ConvertGlintsToAbsolute(glintDetectionRight.GlintData, trackData.RightROI);
                            featuresRightFound       = true;
                        }
                    }
                    else
                    {
                        featuresRightFound = true;
                    }

                    trackData.PupilDataRight = ConvertPupilToAbsolute(EyeEnum.Right, pupilDetectionRight.PupilData, trackData);
                }
            }

            #endregion

            #region ROI mode / state / update

            if (GTSettings.Current.Processing.TrackingMode == TrackingModeEnum.Binocular)
            {
                if (!featuresLeftFound || !featuresRightFound)
                {
                    if (GTSettings.Current.Processing.TrackingEye)
                    {
                        doEye = true;
                        //if(CameraControl.Instance.UsingUC480 && CameraControl.Instance.IsROISet == false)
                        //   CameraControl.Instance.ClearROI();
                    }
                    else
                    {
                        trackData.LeftROI = new Rectangle(new Point(0, 0), new Size(0, 0));
                    }
                }
                else
                {
                    // Disable eyes and eye feature detection
                    doEye = false;

                    // If using UC480 set roi and adjust EyeROIs
                    if (CameraControl.Instance.UsingUC480)
                    {
                        if (CameraControl.Instance.IsROISet)
                        {
                            // Set ROIs if we didn't detect them through features (only searching for eye features on first frame (or when lost)
                            CenterROIOnPupil(trackData, EyeEnum.Left, input.Size);
                            CenterROIOnPupil(trackData, EyeEnum.Right, input.Size);

                            // Re-center sub-ROIs, enuse that eyes stays within by margins
                            this.CenterEyesROI(trackData, input.Size);
                        }
                        else
                        {
                            SetROICamera(trackData);
                        }
                    }
                }
            }

            if (GTSettings.Current.Processing.TrackingMode == TrackingModeEnum.Monocular)
            {
                if (!featuresLeftFound)
                {
                    if (GTSettings.Current.Processing.TrackingEye)
                    {
                        doEye = true;
                        if (CameraControl.Instance.UsingUC480 && CameraControl.Instance.IsROISet == false)
                        {
                            CameraControl.Instance.ClearROI();
                        }
                    }
                    else
                    {
                        trackData.LeftROI = new Rectangle(new Point(0, 0), new Size(0, 0));
                    }
                }
                else
                {
                    trackData.LeftROI = SetROI(input.Size, trackData.PupilDataLeft.Center, Math.Sqrt(trackData.PupilDataLeft.Blob.Area) / 2);
                    doEye             = false;

                    // If using UC480 set roi and adjust EyeROIs
                    if (CameraControl.Instance.UsingUC480)
                    {
                        if (CameraControl.Instance.IsROISet == false)
                        {
                            SetROICamera(trackData);
                        }
                        else
                        {
                            CenterROIOnPupil(trackData, EyeEnum.Left, input.Size);

                            // Re-center sub-ROIs, enuse that eyes stays within by margins
                            if (CameraControl.Instance.UsingUC480 && CameraControl.Instance.IsROISet)
                            {
                                this.CenterEyesROI(trackData, input.Size);
                            }
                        }
                    }
                }
            }

            #endregion

            Performance.Now.Stamp("Processing all done");

            if (GTSettings.Current.Processing.TrackingMode == TrackingModeEnum.Binocular)
            {
                return(featuresRightFound);
            }
            else
            {
                return(featuresLeftFound);
            }
        }