Exemplo n.º 1
0
        private void FindFacialFeaturePoints()
        {
            string facePath;

            try
            {
                // get face detect dataset
                facePath = Path.GetFileName(@"data/haarcascade_frontalface_default.xml");

                // get FFP dataset
                facemarkParam = new FacemarkLBFParams();
                facemark      = new FacemarkLBF(facemarkParam);
                facemark.LoadModel(@"data/lbfmodel.yaml");
            }

            catch (Exception ex)
            {
                throw new Exception(ex.Message);
            }

            // initialize imageMat
            currImageMat = CurrImageI.Mat;
            nextImageMat = NextImageI.Mat;

            // Current Face
            FacesListCurr = facesArrCurr.OfType <Rectangle>().ToList();

            // Find facial feature points
            VectorOfRect vrLeft = new VectorOfRect(facesArrCurr);

            landmarksCurr = new VectorOfVectorOfPointF();

            facemark.Fit(currImageMat, vrLeft, landmarksCurr);
            ffpCurr = landmarksCurr[curr.SelectedFace];


            // Next Face
            FacesListNext = facesArrNext.OfType <Rectangle>().ToList();

            // Find facial feature points
            VectorOfRect vrRight = new VectorOfRect(facesArrNext);

            landmarksNext = new VectorOfVectorOfPointF();

            facemark.Fit(nextImageMat, vrRight, landmarksNext);
            ffpNext = landmarksNext[next.SelectedFace];

            // Add Corner points
            ffpCurr = AddCornerPoints(ffpCurr, this.curr.ResizedImage.Mat);
            ffpNext = AddCornerPoints(ffpNext, this.next.ResizedImage.Mat);
        }
        public PointF[][] Fit(IImage image)
        {
            lock (syncLock)
            {
                var faces          = new VectorOfRect(faceDetector.DetectBoxFaces(image));
                var facesLandmarks = new VectorOfVectorOfPointF();
                if (!facemark.Fit(image, faces, facesLandmarks))
                {
                    throw new ArgumentException("No landamarks point detected for input image");
                }

                var face      = faces.ToArray().First();
                var landmarks = facesLandmarks.ToArrayOfArray().First();
                var componentLandmarkPoints = new PointF[6][];

                // extract landarmarks points for each component
                componentLandmarkPoints[0] = GetComponentPoints(landmarks, EYEBROWS_POINT_RANGE);
                componentLandmarkPoints[1] = GetComponentPoints(landmarks, EYES_POINT_RANGE);
                componentLandmarkPoints[2] = GetComponentPoints(landmarks, NOSE_POINT_RANGE);
                componentLandmarkPoints[3] = GetComponentPoints(landmarks, MOUTH_POINT_RANGE);
                componentLandmarkPoints[4] = landmarks;
                // face bounding box
                componentLandmarkPoints[5] = new PointF[] {
                    new PointF(face.Left, face.Top),
                    new PointF(face.Right, face.Bottom)
                };
                return(componentLandmarkPoints);
            }
        }
Exemplo n.º 3
0
        public Image <Bgr, Byte> GetFacePoints()
        {
            //facemark.SetFaceDetector(MyDetector);

            Image <Bgr, Byte> image = new Image <Bgr, byte>("test.png");


            Image <Gray, byte> grayImage = imgInput.Convert <Gray, byte>();

            grayImage._EqualizeHist();

            VectorOfRect           faces     = new VectorOfRect(faceDetector.DetectMultiScale(grayImage));
            VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF();


            bool success = facemark.Fit(grayImage, faces, landmarks);

            PointF[][] f = landmarks.ToArrayOfArray();
            if (success)
            {
                Rectangle[] facesRect = faces.ToArray();
                for (int i = 0; i < facesRect.Length; i++)
                {
                    imgInput.Draw(facesRect[0], new Bgr(Color.Blue), 2);
                    FaceInvoke.DrawFacemarks(imgInput, landmarks[i], new Bgr(Color.Blue).MCvScalar);
                }
                return(imgInput);
            }
            return(null);
        }
Exemplo n.º 4
0
 /// <summary>
 /// Detect the facial landmarks from the face regions
 /// </summary>
 /// <param name="image">The image to detect facial landmarks from</param>
 /// <param name="fullFaceRegions">The face regions to detect landmarks from</param>
 /// <returns>Vector of facial landmarks</returns>
 public VectorOfVectorOfPointF Detect(IInputArray image, Rectangle[] fullFaceRegions)
 {
     using (VectorOfRect vr = new VectorOfRect(fullFaceRegions))
     {
         VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF();
         _facemark.Fit(image, vr, landmarks);
         return(landmarks);
     }
 }
        static private VectorOfPointF MarkFacialPoints(FacemarkLBF facemark, Image <Gray, byte> image, Rectangle faceRect, out bool isSuccess)
        {
            VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF();
            VectorOfRect           faces     = new VectorOfRect(new Rectangle[] { faceRect });

            isSuccess = facemark.Fit(image, faces, landmarks);
            if (isSuccess)
            {
                return(landmarks[0]);     // return the landmarks for the first (and only) face rectangle
            }
            return(new VectorOfPointF()); // return an empty vector
        }
Exemplo n.º 6
0
        private FaceModel GetFaceModel(Image <Bgr, Byte> image, Image <Gray, byte> grayImage)
        {
            grayImage._EqualizeHist();
            VectorOfRect faces = new VectorOfRect(faceDetector.DetectMultiScale(grayImage));

            Rectangle[]            rects     = faces.ToArray();
            VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF();
            bool success = facemark.Fit(grayImage, faces, landmarks);

            PointF[] points = landmarks.ToArrayOfArray()[0];
            if (!success)
            {
                return(null);
            }
            return(new FaceModel(points, rects[0]));
        }
Exemplo n.º 7
0
        public Image <Bgr, Byte> GetFacePoints()
        {
            String facePath = Path.GetFullPath(@"../../data/haarcascade_frontalface_default.xml");

            //CascadeClassifier faceDetector = new CascadeClassifier(@"..\..\Resource\EMGUCV\haarcascade_frontalface_default.xml");
            CascadeClassifier faceDetector = new CascadeClassifier(facePath);
            FacemarkLBFParams fParams      = new FacemarkLBFParams();

            //fParams.ModelFile = @"..\..\Resource\EMGUCV\lbfmodel.yaml";
            fParams.ModelFile  = @"lbfmodel.yaml";
            fParams.NLandmarks = 68; // number of landmark points
            fParams.InitShapeN = 10; // number of multiplier for make data augmentation
            fParams.StagesN    = 5;  // amount of refinement stages
            fParams.TreeN      = 6;  // number of tree in the model for each landmark point
            fParams.TreeDepth  = 5;  //he depth of decision tree
            FacemarkLBF facemark = new FacemarkLBF(fParams);
            //facemark.SetFaceDetector(MyDetector);

            Image <Bgr, Byte>  image     = new Image <Bgr, byte>("test.png");
            Image <Gray, byte> grayImage = image.Convert <Gray, byte>();

            grayImage._EqualizeHist();

            VectorOfRect           faces     = new VectorOfRect(faceDetector.DetectMultiScale(grayImage));
            VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF();

            facemark.LoadModel(fParams.ModelFile);

            bool success = facemark.Fit(grayImage, faces, landmarks);

            if (success)
            {
                Rectangle[] facesRect = faces.ToArray();
                for (int i = 0; i < facesRect.Length; i++)
                {
                    image.Draw(facesRect[i], new Bgr(Color.Blue), 2);
                    FaceInvoke.DrawFacemarks(image, landmarks[i], new Bgr(Color.Blue).MCvScalar);
                }
                return(image);
            }
            return(null);
        }
Exemplo n.º 8
0
        private void captureImageGrabberEvent(object sender, EventArgs e)
        {
            try
            {
                Mat m = new Mat();
                videoCapture.Retrieve(m);

                imgInput = m.ToImage <Bgr, Byte>();
                var imgGray = imgInput.Convert <Gray, byte>().Clone(); //Конвертируем в оттенки серого

                imgGray._EqualizeHist();                               // выравниваем ярксоть (только для черно-белого изображения)

                //imgGray.SmoothGaussian(4);
                imgGray._SmoothGaussian(1);                                                                                                           //гауусовский фильтр

                VectorOfRect faces = new VectorOfRect(faceFrontalDetector.DetectMultiScale(imgGray, 1.1, 3, new Size(100, 100), new Size(300, 300))); //находим все лица
                if (faces.Size > 0)
                {
                    Rectangle[] facesRect = faces.ToArray();// приводим вектор к фигуре
                    //facesRect[0]

                    Array.Sort(facesRect, new Comparison <Rectangle>(CompareShapes));  //сортировка по высоте

                    VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF();

                    bool success = facemark.Fit(imgGray, faces, landmarks);//находим точки лица


                    double k = (double)faces[0].Width / imgGray.Width;

                    double s = (matrixCam * avarageFaceSize) / k;


                    PointF[] h = landmarks[0].ToArray();//преобразовываем в массив точек

                    //var point = h.Select(p => new Point(Convert.ToInt32(p.X), Convert.ToInt32(p.Y)));

                    Point[] rightEye = new Point[6];
                    rightEye[0].X = Convert.ToInt32(Math.Round(h[36].X));
                    rightEye[0].Y = Convert.ToInt32(Math.Round(h[36].Y));
                    rightEye[1].X = Convert.ToInt32(Math.Round(h[37].X));
                    rightEye[1].Y = Convert.ToInt32(Math.Round(h[37].Y));
                    rightEye[2].X = Convert.ToInt32(Math.Round(h[38].X));
                    rightEye[2].Y = Convert.ToInt32(Math.Round(h[38].Y));
                    rightEye[3].X = Convert.ToInt32(Math.Round(h[39].X));
                    rightEye[3].Y = Convert.ToInt32(Math.Round(h[39].Y));
                    rightEye[4].X = Convert.ToInt32(Math.Round(h[40].X));
                    rightEye[4].Y = Convert.ToInt32(Math.Round(h[40].Y));
                    rightEye[5].X = Convert.ToInt32(Math.Round(h[41].X));
                    rightEye[5].Y = Convert.ToInt32(Math.Round(h[41].Y));

                    var rightEyeMinimalPoint = rightEye.First(x => x.Y == rightEye.Max(y => y.Y));
                    var rightEyeMaximalPoint = rightEye.First(x => x.Y == rightEye.Min(y => y.Y));

                    Point[] leftEye = new Point[6];
                    leftEye[0].X = Convert.ToInt32(Math.Round(h[42].X));
                    leftEye[0].Y = Convert.ToInt32(Math.Round(h[42].Y));
                    leftEye[1].X = Convert.ToInt32(Math.Round(h[43].X));
                    leftEye[1].Y = Convert.ToInt32(Math.Round(h[43].Y));
                    leftEye[2].X = Convert.ToInt32(Math.Round(h[44].X));
                    leftEye[2].Y = Convert.ToInt32(Math.Round(h[44].Y));
                    leftEye[3].X = Convert.ToInt32(Math.Round(h[45].X));
                    leftEye[3].Y = Convert.ToInt32(Math.Round(h[45].Y));
                    leftEye[4].X = Convert.ToInt32(Math.Round(h[46].X));
                    leftEye[4].Y = Convert.ToInt32(Math.Round(h[46].Y));
                    leftEye[5].X = Convert.ToInt32(Math.Round(h[47].X));
                    leftEye[5].Y = Convert.ToInt32(Math.Round(h[47].Y));

                    var leftEyeMinimalPoint = leftEye.First(x => x.Y == leftEye.Max(y => y.Y));
                    var leftEyeMaximalPoint = leftEye.First(x => x.Y == leftEye.Min(y => y.Y));


                    imgInput.DrawPolyline(leftEye, true, new Bgr(Color.Blue), 1);
                    imgInput.DrawPolyline(rightEye, true, new Bgr(Color.Blue), 1);


                    ////for (int i = 0; i < h.Length; ++i)
                    ////{
                    ////    mas[i].X = Convert.ToInt32(Math.Round(h[i].X));
                    ////    mas[i].Y = Convert.ToInt32(Math.Round(h[i].Y));
                    ////}



                    //imgInput.Draw(h[37], new Bgr(Color.Blue), 2);


                    //imgInput.Draw(new CircleF(mas[41], 10), new Bgr(Color.Gray), 1);


                    //imgInput.DrawPolyline(mas, true, new Bgr(Color.Blue), 2);

                    //////if (success)
                    //////{
                    //////    facesRect = faces.ToArray();
                    //////    for (int i = 0; i < facesRect.Length; i++)
                    //////    {
                    //////        imgInput.Draw(facesRect[0], new Bgr(Color.Blue), 2);
                    //////        FaceInvoke.DrawFacemarks(imgInput, landmarks[i], new Bgr(Color.Blue).MCvScalar);

                    //////    }
                    //////    //return imgInput;
                    //////}
                    //return null;

                    //int cropHeight = (int)Math.Round(facesRect[0].Height * 0.5, 0);

                    //Rectangle eyesAreaRec = new Rectangle(facesRect[0].X, (int)Math.Round(facesRect[0].Y * 1.4), facesRect[0].Width, cropHeight);


                    //for (int i = 0; i < facesRect.Length; i++)
                    //{
                    //    imgInput.Draw(facesRect[0], new Bgr(Color.Blue), 2);
                    //    //FaceInvoke.DrawFacemarks(imgInput, landmarks[i], new Bgr(Color.Blue).MCvScalar);

                    //}

                    //imgInput.Draw(eyesAreaRec, new Bgr(Color.Blue), 2);

                    Invoke(new Action(() =>
                    {
                        if (faces.Size > 0)
                        {
                            faceSizePixelEdit.Text   = faces[0].Size.Height.ToString();
                            camLengthEdit.Text       = s.ToString();
                            arcScaleComponent1.Value = faces[0].Size.Height;

                            if (rightEyeMaximalPoint.Y > 0 && rightEyeMinimalPoint.Y > 0)
                            {
                                rightEyeEdit.Text = Convert.ToString(rightEyeMinimalPoint.Y - rightEyeMaximalPoint.Y);
                            }
                        }
                    }));
                    //faceSizePixelEdit.Text = Convert.ToString(k);
                    //Writelabel(0);
                    generalBox.Image = imgInput;
                }
                else if (faces.Size == 0)
                {
                    //faces = new VectorOfRect(faceProfilDetector.DetectMultiScale(imgGray));
                    generalBox.Image = imgInput;
                }



                //VectorOfRect faces = new VectorOfRect(faceDetector.DetectMultiScale(grayImage));
                //VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF();


                //bool success = facemark.Fit(grayImage, faces, landmarks);
                //PointF[][] f = landmarks.ToArrayOfArray();
                //if (success)
                //{
                //    Rectangle[] facesRect = faces.ToArray();
                //    for (int i = 0; i < facesRect.Length; i++)
                //    {
                //        imgInput.Draw(facesRect[0], new Bgr(Color.Blue), 2);
                //        FaceInvoke.DrawFacemarks(imgInput, landmarks[i], new Bgr(Color.Blue).MCvScalar);

                //    }
                //    return imgInput;
                //}
                //return null;

                //if (faces.Length > 0)
                //    faceSizePixelEdit.Text = Convert.ToString(faces[0].Size.Height * faces[0].Size.Width);
                //faceSizePixelEdit.Text = faces[0].Size.Height.ToString();
                //videoCapture.Pause();

                //videoCapture.Start();
                //////var imgGray = imgInput.Convert<Gray, byte>().Clone();
                //////Rectangle[] faces = classifierFace.DetectMultiScale(imgGray, 1.1, 4);
                //////foreach (var face in faces)
                //////{
                //////    imgInput.Draw(face, new Bgr(0, 0, 255), 2);

                //////    imgGray.ROI = face;
                //////    Rectangle[] eyes = classifierEye.DetectMultiScale(imgGray, 1.1, 4);
                //////    foreach (var eye in eyes)
                //////    {
                //////        var ec = eye;
                //////        ec.X += face.X;
                //////        ec.Y += face.Y;
                //////        imgInput.Draw(ec, new Bgr(0, 255, 0), 2);
                //////    }
                //////}


                Thread.Sleep(1);

                //imageInput =
                //pictureBox1.Image = m.ToImage<Bgr, byte>().Bitmap;
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.InnerException.Message);
            }
        }
Exemplo n.º 9
0
        public FaceLandmarkDetectionPage()
            : base()
        {
            var button = this.GetButton();

            button.Text     = "Perform Face Landmark Detection";
            button.Clicked += OnButtonClicked;

            OnImagesLoaded += async(sender, image) =>
            {
                if (image == null || image[0] == null)
                {
                    return;
                }
                SetMessage("Please wait...");
                SetImage(null);
                Task <Tuple <IInputArray, long> > t = new Task <Tuple <IInputArray, long> >(
                    () =>
                {
                    InitFaceDetector();
                    InitFacemark();

                    int imgDim        = 300;
                    MCvScalar meanVal = new MCvScalar(104, 177, 123);
                    Stopwatch watch   = Stopwatch.StartNew();
                    Size imageSize    = image[0].Size;
                    using (Mat inputBlob = DnnInvoke.BlobFromImage(
                               image[0],
                               1.0,
                               new Size(imgDim, imgDim),
                               meanVal,
                               false,
                               false))
                        _faceDetector.SetInput(inputBlob, "data");
                    using (Mat detection = _faceDetector.Forward("detection_out"))
                    {
                        float confidenceThreshold = 0.5f;

                        List <Rectangle> faceRegions = new List <Rectangle>();

                        float[,,,] values = detection.GetData(true) as float[, , , ];
                        for (int i = 0; i < values.GetLength(2); i++)
                        {
                            float confident = values[0, 0, i, 2];

                            if (confident > confidenceThreshold)
                            {
                                float xLeftBottom       = values[0, 0, i, 3] * imageSize.Width;
                                float yLeftBottom       = values[0, 0, i, 4] * imageSize.Height;
                                float xRightTop         = values[0, 0, i, 5] * imageSize.Width;
                                float yRightTop         = values[0, 0, i, 6] * imageSize.Height;
                                RectangleF objectRegion = new RectangleF(
                                    xLeftBottom,
                                    yLeftBottom,
                                    xRightTop - xLeftBottom,
                                    yRightTop - yLeftBottom);
                                Rectangle faceRegion = Rectangle.Round(objectRegion);
                                faceRegions.Add(faceRegion);
                            }
                        }

                        using (VectorOfRect vr = new VectorOfRect(faceRegions.ToArray()))
                            using (VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF())
                            {
                                _facemark.Fit(image[0], vr, landmarks);

                                foreach (Rectangle face in faceRegions)
                                {
                                    CvInvoke.Rectangle(image[0], face, new MCvScalar(0, 255, 0));
                                }

                                int len = landmarks.Size;
                                for (int i = 0; i < landmarks.Size; i++)
                                {
                                    using (VectorOfPointF vpf = landmarks[i])
                                        FaceInvoke.DrawFacemarks(image[0], vpf, new MCvScalar(255, 0, 0));
                                }
                            }
                        watch.Stop();
                        return(new Tuple <IInputArray, long>(image[0], watch.ElapsedMilliseconds));
                    }
                });
                t.Start();

                var result = await t;
                SetImage(t.Result.Item1);
                String computeDevice = CvInvoke.UseOpenCL ? "OpenCL: " + Ocl.Device.Default.Name : "CPU";

                SetMessage(String.Format("Detected in {0} milliseconds.", t.Result.Item2));
            };
        }
Exemplo n.º 10
0
        private void DetectAndRender(Mat image)
        {
            int       imgDim  = 300;
            MCvScalar meanVal = new MCvScalar(104, 177, 123);

            Size imageSize = image.Size;

            using (Mat inputBlob = DnnInvoke.BlobFromImage(
                       image,
                       1.0,
                       new Size(imgDim, imgDim),
                       meanVal,
                       false,
                       false))
                _faceDetector.SetInput(inputBlob, "data");
            using (Mat detection = _faceDetector.Forward("detection_out"))
            {
                float confidenceThreshold = 0.5f;

                List <Rectangle> faceRegions = new List <Rectangle>();

                float[,,,] values = detection.GetData(true) as float[, , , ];
                for (int i = 0; i < values.GetLength(2); i++)
                {
                    float confident = values[0, 0, i, 2];

                    if (confident > confidenceThreshold)
                    {
                        float      xLeftBottom  = values[0, 0, i, 3] * imageSize.Width;
                        float      yLeftBottom  = values[0, 0, i, 4] * imageSize.Height;
                        float      xRightTop    = values[0, 0, i, 5] * imageSize.Width;
                        float      yRightTop    = values[0, 0, i, 6] * imageSize.Height;
                        RectangleF objectRegion = new RectangleF(
                            xLeftBottom,
                            yLeftBottom,
                            xRightTop - xLeftBottom,
                            yRightTop - yLeftBottom);
                        Rectangle faceRegion = Rectangle.Round(objectRegion);
                        faceRegions.Add(faceRegion);
                    }
                }

                using (VectorOfRect vr = new VectorOfRect(faceRegions.ToArray()))
                    using (VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF())
                    {
                        _facemark.Fit(image, vr, landmarks);

                        foreach (Rectangle face in faceRegions)
                        {
                            CvInvoke.Rectangle(image, face, new MCvScalar(0, 255, 0));
                        }

                        int len = landmarks.Size;
                        for (int i = 0; i < landmarks.Size; i++)
                        {
                            using (VectorOfPointF vpf = landmarks[i])
                                FaceInvoke.DrawFacemarks(image, vpf, new MCvScalar(255, 0, 0));
                        }
                    }
            }
        }
Exemplo n.º 11
0
        private void imageGraberCapture(object sender, EventArgs e)
        {
            try
            {
                #region Получение кадра и массива лиц

                Mat m = new Mat();

                videoCapture.Retrieve(m);
                Image <Bgr, Byte> inputImage = m.ToImage <Bgr, Byte>();
                var imgGray = inputImage.Convert <Gray, Byte>().Clone();                                                           //Конвертируем в оттенки серого
                imgGray._EqualizeHist();                                                                                           //выравниваем яркость
                imgGray._SmoothGaussian(1);                                                                                        //убираем шумы

                VectorOfRect faces = new VectorOfRect(faceFrontalDetector.DetectMultiScale(imgGray, 1.05, 5, new Size(120, 120))); //находим все лица

                #endregion



                if (faces.Size > 0)
                {
                    Rectangle[] facesRect = faces.ToArray(); // приводим вектор к фигуре

                    facesRect.OrderBy(ob => ob.Height);      //сортировка по высоте

                    VectorOfVectorOfPointF landmarks = new VectorOfVectorOfPointF();

                    bool success = facemark.Fit(imgGray, faces, landmarks); //находим точки лица


                    #region Анализ расстояния до пользователя

                    double k = ((double)faces[0].Height) / imgGray.Height;

                    currentDistance = (2.5 * avarageFaceSize) / k;

                    #endregion

                    #region Анализ яркости

                    imageBrightList = FrameBright(imgGray.Clone(), facesRect[0]);

                    if ((imageBrightList[1] - imageBrightList[0]) > 55)
                    {
                        currentBright = 10;
                    }
                    else if ((imageBrightList[1] - imageBrightList[0]) > 45 && (imageBrightList[1] - imageBrightList[0]) < 55)
                    {
                        currentBright = 20;
                    }
                    else if ((imageBrightList[1] - imageBrightList[0]) > 35 && (imageBrightList[1] - imageBrightList[0]) < 45)
                    {
                        currentBright = 35;
                    }
                    else if ((imageBrightList[1] - imageBrightList[0]) > 25 && (imageBrightList[1] - imageBrightList[0]) < 35)
                    {
                        currentBright = 50;
                    }
                    else if ((imageBrightList[1] - imageBrightList[0]) > 15 && (imageBrightList[1] - imageBrightList[0]) < 25)
                    {
                        currentBright = 70;
                    }
                    else if ((imageBrightList[1] - imageBrightList[0]) > 10 && (imageBrightList[1] - imageBrightList[0]) < 15)
                    {
                        currentBright = 85;
                    }
                    else if ((imageBrightList[1] - imageBrightList[0]) > 5 && (imageBrightList[1] - imageBrightList[0]) < 10)
                    {
                        currentBright = 95;
                    }

                    //currentBright = (int)(((100.0/255)*imageBrightList[0]) + 0.5);

                    #endregion


                    #region Глазной анализатор

                    PointF[] h = landmarks[0].ToArray();//преобразовываем в массив точек

                    Point[] rightEye = new Point[6];
                    rightEye[0].X = Convert.ToInt32(Math.Round(h[36].X));
                    rightEye[0].Y = Convert.ToInt32(Math.Round(h[36].Y));
                    rightEye[1].X = Convert.ToInt32(Math.Round(h[37].X));
                    rightEye[1].Y = Convert.ToInt32(Math.Round(h[37].Y));
                    rightEye[2].X = Convert.ToInt32(Math.Round(h[38].X));
                    rightEye[2].Y = Convert.ToInt32(Math.Round(h[38].Y));
                    rightEye[3].X = Convert.ToInt32(Math.Round(h[39].X));
                    rightEye[3].Y = Convert.ToInt32(Math.Round(h[39].Y));
                    rightEye[4].X = Convert.ToInt32(Math.Round(h[40].X));
                    rightEye[4].Y = Convert.ToInt32(Math.Round(h[40].Y));
                    rightEye[5].X = Convert.ToInt32(Math.Round(h[41].X));
                    rightEye[5].Y = Convert.ToInt32(Math.Round(h[41].Y));

                    var rightEyeMinimalPoint = rightEye.First(x => x.Y == rightEye.Max(y => y.Y));
                    var rightEyeMaximalPoint = rightEye.First(x => x.Y == rightEye.Min(y => y.Y));

                    Point[] leftEye = new Point[6];
                    leftEye[0].X = Convert.ToInt32(Math.Round(h[42].X));
                    leftEye[0].Y = Convert.ToInt32(Math.Round(h[42].Y));
                    leftEye[1].X = Convert.ToInt32(Math.Round(h[43].X));
                    leftEye[1].Y = Convert.ToInt32(Math.Round(h[43].Y));
                    leftEye[2].X = Convert.ToInt32(Math.Round(h[44].X));
                    leftEye[2].Y = Convert.ToInt32(Math.Round(h[44].Y));
                    leftEye[3].X = Convert.ToInt32(Math.Round(h[45].X));
                    leftEye[3].Y = Convert.ToInt32(Math.Round(h[45].Y));
                    leftEye[4].X = Convert.ToInt32(Math.Round(h[46].X));
                    leftEye[4].Y = Convert.ToInt32(Math.Round(h[46].Y));
                    leftEye[5].X = Convert.ToInt32(Math.Round(h[47].X));
                    leftEye[5].Y = Convert.ToInt32(Math.Round(h[47].Y));

                    var leftEyeMinimalPoint = leftEye.First(x => x.Y == leftEye.Max(y => y.Y));
                    var leftEyeMaximalPoint = leftEye.First(x => x.Y == leftEye.Min(y => y.Y));



                    if (rightEyeMaximalPoint.Y > 0 && rightEyeMinimalPoint.Y > 0 && leftEyeMaximalPoint.Y > 0 && leftEyeMinimalPoint.Y > 0)
                    {
                        double bufer;


                        inputImage.DrawPolyline(leftEye, true, new Bgr(Color.Red), 2);
                        inputImage.DrawPolyline(rightEye, true, new Bgr(Color.Red), 2);

                        bufer = rightEyeMinimalPoint.Y - rightEyeMaximalPoint.Y;

                        double kk = ((double)bufer) / imgGray.Height;

                        curentRightEye = (currentDistance * kk) / 2.5; //в сантиметрах



                        if (curentRightEye > heightOpenRightEye)
                        {
                            curentRightEye = heightOpenRightEye;
                        }
                        else if (curentRightEye < heightCloseRightEye * chit)
                        {
                            curentRightEye = heightCloseRightEye;
                            blink++;
                            clipCounter++;
                        }



                        curentLeftEye = curentRightEye;
                    }
                    else
                    {
                        curentLeftEye  = heightCloseRightEye;
                        curentRightEye = heightCloseRightEye;
                    }


                    double?buffer = ((100 / (heightOpenLeftEye - heightCloseLeftEye)) * (curentRightEye - heightCloseLeftEye));

                    #endregion

                    Invoke(new Action(() =>
                    {
                        if (faces.Size > 0)
                        {
                            //brightGaugComponent.Value = (float)60;
                            distanceGaugComponent.Value = (float)currentDistance;

                            //график света
                            linearScaleMarkerComponent3.Value   = (float)currentBright;
                            linearScaleRangeBarComponent2.Value = (float)currentBright;

                            //lengthEdit.Text = test.ToString();
                            //faceSizeEdit.Text = curentRightEye.ToString();

                            //график глаза
                            linearScaleRangeBarComponent3.Value = (float)buffer;
                            linearScaleMarkerComponent4.Value   = (float)buffer;
                            //linearScaleRangeBarComponent1.Value = 60;
                        }
                    }));

                    inputImage.Draw(faces[0], new Bgr(Color.Green), 1);

                    generalTabBox.Image = inputImage;
                }
                else if (faces.Size == 0)
                {
                    //faces = new VectorOfRect(faceProfilDetector.DetectMultiScale(imgGray));
                    generalTabBox.Image = inputImage;
                }

                Thread.Sleep(1);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.InnerException.Message);
            }
        }
Exemplo n.º 12
0
        static void Main(string[] args)
        {
            //prepare images for averaging
            PrepareImages();

            //load facemark model
            _facemark = FacemarkLBF.Create();
            _facemark.LoadModel("lbfmodel.yaml");

            //collection for all found facemarks
            var allFaceMarks = new List <List <Point2f> >();

            //facemark search and save
            foreach (var image in Directory.GetFiles(TempDirName))
            {
                using (var mat = new Mat(image))
                {
                    var facesRects = _cascade.DetectMultiScale(mat);
                    using (var facesRectsArray = InputArray.Create(facesRects))
                    {
                        _facemark.Fit(mat, facesRectsArray, out Point2f[][] landmarks);
                        // only one face should be
                        allFaceMarks.Add(landmarks[0].ToList());
                    }
                }
            }

            //add static points
            foreach (var facemarks in allFaceMarks)
            {
                facemarks.Add(new Point2f(1, 1));
                facemarks.Add(new Point2f(1, _outputSize.Height / 2));
                facemarks.Add(new Point2f(1, _outputSize.Height - 1));
                facemarks.Add(new Point2f(_outputSize.Width - 1, 1));
                facemarks.Add(new Point2f(_outputSize.Width / 2, _outputSize.Height - 1));
                facemarks.Add(new Point2f(_outputSize.Width - 1, _outputSize.Height / 2));
                facemarks.Add(new Point2f(_outputSize.Width - 1, _outputSize.Height - 1));
            }

            //average Facemarks
            var averagePoints = new List <Point2f>();

            for (int i = 0; i < 75; i++)
            {
                float xSum = 0;
                float ySum = 0;
                for (int j = 0; j < allFaceMarks.Count; j++)
                {
                    var point = allFaceMarks[j][i];
                    xSum += point.X;
                    ySum += point.Y;
                }
                averagePoints.Add(new Point2f(xSum / allFaceMarks.Count, ySum / allFaceMarks.Count));
            }

            //calculate delaunay triangles
            var destinationTriangles = averagePoints.GetDelaunayTriangles();

            //create result mat
            var outputMat = new Mat(_outputSize, _matTypeDefault);

            outputMat.SetTo(0);

            // blending coeff
            var delta = 1.0 / allFaceMarks.Count;

            // warping and blending
            var files = Directory.GetFiles(TempDirName);

            for (int i = 0; i < files.Length; i++)
            {
                using (var mat = new Mat(files[i]))
                {
                    var landmarks = allFaceMarks[i];
                    var warps     = destinationTriangles.GetWarps(landmarks, averagePoints);
                    var warpedImg = mat.ApplyWarps(mat.Width, mat.Height, warps);
                    Cv2.AddWeighted(outputMat, 1, warpedImg, delta, 0, outputMat);
                }
            }

            //save
            outputMat.SaveImage("result.png");
            Console.WriteLine("Done.");
        }
Exemplo n.º 13
0
    void Track()
    {
        if (lastPositions != null)
        {
            lastPositions = landmarks;
        }

        // We fetch webcam texture data
        convertedTexture.SetPixels(webcamTexture.GetPixels());
        convertedTexture.Apply();

        // We convert the webcam texture2D into the OpenCV image format
        UMat img = new UMat();

        TextureConvert.Texture2dToOutputArray(convertedTexture, img);
        CvInvoke.Flip(img, img, FlipType.Vertical);

        using (CascadeClassifier classifier = new CascadeClassifier(filePath)) {
            using (UMat gray = new UMat()) {
                // We convert the OpenCV image format to the facial detection API parsable monochrome image type and detect the faces
                CvInvoke.CvtColor(img, gray, ColorConversion.Bgr2Gray);
                facesVV   = new VectorOfRect(classifier.DetectMultiScale(gray));
                landmarks = new VectorOfVectorOfPointF();

                // we fit facial landmarks onto the face data
                if (facemark.Fit(gray, facesVV, landmarks))
                {
                    FaceInvoke.DrawFacemarks(img, landmarks[0], new MCvScalar(255, 255, 0, 255));

                    // We calculate the nose position to use as a capture center
                    noseOffset = new Vector3(landmarks[0][67].X, landmarks[0][67].Y * -1f, 0f);

                    // We draw markers and computer positions
                    for (int j = 0; j < 68; j++)
                    {
                        Vector3 markerPos = new Vector3(landmarks[0][j].X, landmarks[0][j].Y * -1f, 0f);

                        if (displayOffsetMarkers)
                        {
                            Debug.DrawLine(markerPos, markerPos + (Vector3.forward * 3f), UnityEngine.Color.green, trackingInterval);
                        }

                        AdjustCalibration(j, markerPos);
                    }
                    recording = true;
                }
                else
                {
                    recording = false;
                }

                if (displayCalibrationMarkers)
                {
                    DisplayCalibration();
                }
            }
        }

        // We render out the calculation result into the debug image
        if (debugImage)
        {
            Texture2D texture = TextureConvert.InputArrayToTexture2D(img, FlipType.Vertical);
            debugImage.sprite = Sprite.Create(texture, new Rect(0, 0, texture.width, texture.height), new Vector2(0.5f, 0.5f));
        }
    }