示例#1
0
        public Point Detect(FaceRect face, Mat frame)
        {
            var model      = CurrentModel;
            var properties = ScreenProperties;

            if (face == null)
            {
                throw new ArgumentNullException("face");
            }
            if (frame == null || frame.IsEmpty)
            {
                throw new ArgumentNullException("frame");
            }
            if (properties == null)
            {
                throw new ArgumentNullException("properties");
            }

            if (model.LeftRequired && (face.LeftEye == null || face.RightEye == null))
            {
                return(null);
            }
            if (model.RightRequired && face.RightEye == null)
            {
                return(null);
            }

            Profiler.Start("GazeDetect");

            if (!model.IsLoaded)
            {
                var timer = new System.Diagnostics.Stopwatch();
                timer.Start();
                model.Load();
                timer.Stop();
                Logger.Log($"Model[{model.Name}] load time: {timer.ElapsedMilliseconds} ms");
            }

            Point vecPt  = null;
            Point result = new Point(0, 0);
            Point pt     = new Point(0, 0);

            Profiler.Start("Gaze.Face.Cvt");
            Mat    leftRoi = null, rightRoi = null, faceRoi = null;
            Tensor leftTensor = null, rightTensor = null, faceTensor = null;

            if (model.LeftRequired)
            {
                leftRoi = face.LeftEye.RoiCropByPercent(frame, model.EyeCropPercent);
                leftRoi.Resize(new Size(model.EyeSize));
                var bufLen = (int)Math.Pow(model.EyeSize, 2) * 3;
                if (imgBufferLeft == null || imgBufferLeft.Length != bufLen)
                {
                    imgBufferLeft = new float[bufLen];
                }
                leftTensor = Tools.MatBgr2Tensor(leftRoi, model.ImageNormMode, -1, -1, new long[] { 1, model.EyeSize, model.EyeSize, 3 }, imgBufferLeft);
            }
            if (model.RightRequired)
            {
                rightRoi = face.RightEye.RoiCropByPercent(frame, model.EyeCropPercent);
                rightRoi.Resize(new Size(model.EyeSize));
                var bufLen = (int)Math.Pow(model.EyeSize, 2) * 3;
                if (imgBufferRight == null || imgBufferRight.Length != bufLen)
                {
                    imgBufferRight = new float[bufLen];
                }
                rightTensor = Tools.MatBgr2Tensor(rightRoi, model.ImageNormMode, -1, -1, new long[] { 1, model.EyeSize, model.EyeSize, 3 }, imgBufferRight);
            }
            if (model.FaceRequired)
            {
                faceRoi = face.ROI(frame);
                faceRoi.Resize(new Size(model.FaceSize));
                var bufLen = (int)Math.Pow(model.FaceSize, 2) * 3;
                if (imgBufferFace == null || imgBufferFace.Length != bufLen)
                {
                    imgBufferFace = new float[bufLen];
                }
                faceTensor = Tools.MatBgr2Tensor(faceRoi, model.ImageNormMode, -1, -1, new long[] { 1, model.FaceSize, model.FaceSize, 3 }, imgBufferFace);
            }
            Profiler.End("Gaze.Face.Cvt");

            Profiler.Start("Gaze.Face.Sess");
            Dictionary <string, Tensor> feedDict = new Dictionary <string, Tensor>();

            if (model.LeftRequired)
            {
                feedDict.Add(model.LeftOpName, leftTensor);
            }
            if (model.RightRequired)
            {
                feedDict.Add(model.RightOpName, rightTensor);
            }
            if (model.FaceRequired)
            {
                feedDict.Add(model.FaceOpName, faceTensor);
            }
            if (!string.IsNullOrEmpty(model.PhaseTrainOpName))
            {
                feedDict.Add(model.PhaseTrainOpName, new Tensor(false));
            }
            if (!string.IsNullOrEmpty(model.KeepProbOpName))
            {
                feedDict.Add(model.KeepProbOpName, new Tensor(model.KeepProb));
            }

            var fetch = model.Session.Run(new[] { model.OutputOpName }, feedDict);

            Profiler.End("Gaze.Face.Sess");

            var resultTensor = fetch[0];

            float[,] output = (float[, ])resultTensor.GetValue();

            result = new Point(output[0, 0], output[0, 1]);

            Profiler.Start("Gaze.Face.Dispose");
            leftTensor?.Dispose();
            rightTensor?.Dispose();
            faceTensor?.Dispose();
            leftRoi?.Dispose();
            rightRoi?.Dispose();
            faceRoi?.Dispose();
            Profiler.End("Gaze.Face.Dispose");

            var x = result.X * -1;
            var y = result.Y * -1;

            if (UseModification)
            {
                x = (x + OffsetX) * SensitiveX;
                y = (y + OffsetY) * SensitiveY;
            }

            vecPt = new Point(x, y);
            if (UseSmoothing && !Calibrator.IsCalibrating)
            {
                vecPt = Smoother.Smooth(vecPt);
            }

            Vector <double> vec = CreateVector.Dense(new double[] { vecPt.X, vecPt.Y, -1 });

            pt = face.SolveRayScreenVector(new Point3D(vec.ToArray()), properties);

            if (ClipToBound)
            {
                pt.X = Util.Clamp(pt.X, 0, ScreenProperties.PixelSize.Width);
                pt.Y = Util.Clamp(pt.Y, 0, ScreenProperties.PixelSize.Height);
            }

            face.GazeInfo = new EyeGazeInfo()
            {
                ScreenPoint = pt,
                Vector      = new Point3D(vecPt.X, vecPt.Y, -1),
                ClipToBound = ClipToBound,
            };

            Calibrator.Push(new CalibratingPushData(face));
            if (UseCalibrator)
            {
                Calibrator.Apply(face, ScreenProperties);
            }

            Profiler.End("GazeDetect");
            return(face.GazeInfo.ScreenPoint);
        }
示例#2
0
        public Point Detect(FaceRect face, Mat frame)
        {
            mode = DetectMode;
            var properties = ScreenProperties;

            if (face == null)
            {
                throw new ArgumentNullException("face");
            }
            if (frame == null || frame.IsEmpty)
            {
                throw new ArgumentNullException("frame");
            }
            if (properties == null)
            {
                throw new ArgumentNullException("properties");
            }

            switch (mode)
            {
            case EyeGazeDetectMode.LeftOnly:
                if (face.LeftEye == null)
                {
                    return(null);
                }
                break;

            case EyeGazeDetectMode.FaceV2Mobile:
            case EyeGazeDetectMode.FaceV2:
            case EyeGazeDetectMode.FaceMobile:
            case EyeGazeDetectMode.Face:
            case EyeGazeDetectMode.Both:
                if (face.LeftEye == null || face.RightEye == null)
                {
                    return(null);
                }
                break;

            default:
                throw new NotImplementedException();
            }

            Profiler.Start("GazeDetect");

            Point vecPt  = null;
            Point result = new Point(0, 0);
            Point pt     = new Point(0, 0);

            switch (mode)
            {
            case EyeGazeDetectMode.LeftOnly:
                using (Mat left = face.LeftEye.RoiCropByPercent(frame, .33))
                    result = DetectLeftEyes(left);
                break;

            case EyeGazeDetectMode.Both:
                using (Mat left = face.LeftEye.RoiCropByPercent(frame, .33))
                    using (Mat right = face.RightEye.RoiCropByPercent(frame, .33))
                        result = DetectBothEyes(left, right);
                break;

            case EyeGazeDetectMode.FaceV2Mobile:
            case EyeGazeDetectMode.FaceV2:
            case EyeGazeDetectMode.FaceMobile:
            case EyeGazeDetectMode.Face:
                using (Mat left = face.LeftEye.RoiCropByPercent(frame, .25))
                    using (Mat right = face.RightEye.RoiCropByPercent(frame, .25))
                        using (Mat faceRoi = face.ROI(frame))
                            result = DetectFace(faceRoi, left, right);
                break;

            default:
                throw new NotImplementedException();
            }

            var x = result.X * -1;
            var y = result.Y * -1;

            if (UseModification)
            {
                x = (x + OffsetX) * SensitiveX;
                y = (y + OffsetY) * SensitiveY;
            }

            vecPt = new Point(x, y);
            if (UseSmoothing && !Calibrator.IsCalibrating)
            {
                vecPt = Smoother.Smooth(vecPt);
            }

            Vector <double> vec = CreateVector.Dense(new double[] { vecPt.X, vecPt.Y, -1 });

            pt = face.SolveRayScreenVector(new Point3D(vec.ToArray()), properties);

            if (ClipToBound)
            {
                pt.X = Util.Clamp(pt.X, 0, ScreenProperties.PixelSize.Width);
                pt.Y = Util.Clamp(pt.Y, 0, ScreenProperties.PixelSize.Height);
            }

            face.GazeInfo = new EyeGazeInfo()
            {
                ScreenPoint = pt,
                Vector      = new Point3D(vecPt.X, vecPt.Y, -1),
                ClipToBound = ClipToBound,
            };

            Calibrator.Push(new CalibratingPushData(face));
            if (UseCalibrator)
            {
                Calibrator.Apply(face, ScreenProperties);
            }

            Profiler.End("GazeDetect");
            return(face.GazeInfo.ScreenPoint);
        }