DetectFace() public method

public DetectFace ( ) : TFacePosition
return TFacePosition
コード例 #1
0
        public static List<ImagePoint> GetFeaturePoints(Image faceImage)
        {
            List<ImagePoint> faceFeatureList = new List<ImagePoint>();
            try
            {
                FSDK.CImage image = new FSDK.CImage(faceImage);
                FSDK.TFacePosition facePosition = image.DetectFace();
                if (0 == facePosition.w)
                    return null;
                else
                {
                    int left = facePosition.xc - (int)(facePosition.w * 0.6f);
                    int top = facePosition.yc - (int)(facePosition.w * 0.5f);
                    //gr.DrawRectangle(Pens.LightGreen, left, top, (int)(facePosition.w * 1.2), (int)(facePosition.w * 1.2));

                    FSDK.TPoint[] facialFeatures = image.DetectFacialFeaturesInRegion(ref facePosition);
                    //int i = 0;
                    foreach (FSDK.TPoint point in facialFeatures)
                    {
                        faceFeatureList.Add(new ImagePoint(point.x, point.y));
                        //gr.DrawEllipse((++i > 2) ? Pens.LightGreen : Pens.Blue, point.x, point.y, 3, 3);
                    }

                    return faceFeatureList;
                }

            }
            catch (Exception ex)
            {
                throw new Exception(ex.Message);
            }
        }
コード例 #2
0
ファイル: Form1.cs プロジェクト: ScoreSolutions/RFIDHERO2014
        private void btnOpenPhoto_Click(object sender, EventArgs e)
        {
            if (openFileDialog1.ShowDialog() == DialogResult.OK)
            {
                try
                {
                    FSDK.CImage image = new FSDK.CImage(openFileDialog1.FileName);

                    // resize image to fit the window width
                    double ratio = System.Math.Min((pictureBox1.Width + 0.4) / image.Width,
                        (pictureBox1.Height + 0.4) / image.Height);
                    image = image.Resize(ratio);
                    
                    Image frameImage = image.ToCLRImage();
                    Graphics gr = Graphics.FromImage(frameImage);

                    FSDK.TFacePosition facePosition = image.DetectFace();
                    if (0 == facePosition.w)
                        MessageBox.Show("No faces detected", "Face Detection");
                    else
                    {
                        int left = facePosition.xc - (int)(facePosition.w*0.6f);
                        int top = facePosition.yc - (int)(facePosition.w*0.5f);
                        gr.DrawRectangle(Pens.LightGreen, left, top, (int)(facePosition.w*1.2), (int)(facePosition.w*1.2));

                        FSDK.TPoint[] facialFeatures = image.DetectFacialFeaturesInRegion(ref facePosition);
                        int i = 0;
                        foreach (FSDK.TPoint point in facialFeatures)
                            gr.DrawEllipse((++i > 2) ? Pens.LightGreen : Pens.Blue, point.x, point.y, 3, 3);

                        gr.Flush();
                    }

                    // display image
                    pictureBox1.Image = frameImage;
                    pictureBox1.Refresh();
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message, "Exception");
                }
            }
        }
コード例 #3
0
ファイル: Luxand.cs プロジェクト: ngcbassman/smiles
 public void DetectFace(Bitmap bitmap)
 {
     Left = 0;
     Top = 0;
     FaceDetected = false;
     _facialFeatures = null;
     FSDK.CImage image = new FSDK.CImage(bitmap);
     FSDK.TFacePosition facePosition = image.DetectFace();
     FaceDetected = facePosition.w > 0;
     if (FaceDetected)
     {
         Left = facePosition.xc - facePosition.w / 2;
         Top = facePosition.yc - facePosition.w / 2;
         _facialFeatures = image.DetectFacialFeaturesInRegion(ref facePosition);
         _eyes = image.DetectEyesInRegion(ref facePosition);
     }
 }
コード例 #4
0
        private void detect_button_Click(object sender, EventArgs e)
        {
            var sampes = Directory.GetFiles("C:\\Projects\\misoi\\bsuir-misoi\\Presentation.WindowsForms\\Samples");

            foreach (var sample in sampes)
            {
                try
                {
                    FSDK.CImage image = new FSDK.CImage(sample);

                    // resize image to fit the window width
                    double ratio = System.Math.Min((resultPictureBox.Width + 0.4) / image.Width,
                        (resultPictureBox.Height + 0.4) / image.Height);
                    image = image.Resize(ratio);

                    FSDK.TFacePosition facePosition = image.DetectFace();
                    if (0 == facePosition.w)
                        MessageBox.Show("No faces detected", "Face Detection");
                    else
                    {
                        FSDK.TPoint[] facialFeatures = image.DetectFacialFeaturesInRegion(ref facePosition);
                        int i = 0;
                        using (var writer = File.AppendText("sasi.txt"))
                        {
                            foreach (FSDK.TPoint point in facialFeatures)
                            {
                                writer.Write((double)(point.x - facePosition.xc)/facePosition.w + "," + (double)(point.y - facePosition.yc)/facePosition.w + ",");
                            }
                            writer.Write("Happy");
                            writer.WriteLine();
                        }
                    }
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message, "Exception");
                }
            }
        }
コード例 #5
0
        private void openBtn_Click(object sender, EventArgs e)
        {
            using (var dlg = new OpenFileDialog())
            {
                dlg.Title = "Open Image";
                dlg.Filter = "Image Files(*.BMP;*.JPG;*.GIF;*.PNG)|*.BMP;*.JPG;*.GIF;*.PNG";

                if (dlg.ShowDialog() == DialogResult.OK)
                {
                    sourcePictureBox.Image = new Bitmap(dlg.FileName);
                    resultPictureBox.Image = null;

                    try
                    {
                        FSDK.CImage image = new FSDK.CImage(dlg.FileName);

                        // resize image to fit the window width
                        double ratio = System.Math.Min((resultPictureBox.Width + 0.4) / image.Width,
                            (resultPictureBox.Height + 0.4) / image.Height);
                        image = image.Resize(ratio);

                        Image frameImage = image.ToCLRImage();
                        Graphics gr = Graphics.FromImage(frameImage);

                        FSDK.TFacePosition facePosition = image.DetectFace();
                        if (0 == facePosition.w)
                            MessageBox.Show("No faces detected", "Face Detection");
                        else
                        {
                            int left = facePosition.xc - facePosition.w / 2;
                            int top = facePosition.yc - facePosition.w / 2;
                            gr.DrawRectangle(Pens.LightGreen, left, top, facePosition.w, facePosition.w);

                            FSDK.TPoint[] facialFeatures = image.DetectFacialFeaturesInRegion(ref facePosition);
                            int i = 0;
                            using (var writer = File.AppendText("sasi.txt"))
                            {
                                foreach (FSDK.TPoint point in facialFeatures)
                                {
                                    gr.DrawEllipse((++i > 2) ? Pens.LightGreen : Pens.Blue, point.x, point.y, 3, 3);
                                    writer.Write((point.x - facePosition.xc) + "," + (point.y - facePosition.yc) + ",");
                                }
                                writer.Write("Happy");
                                writer.WriteLine();
                            }

                            gr.Flush();
                        }

                        // display image
                        resultPictureBox.Image = frameImage;
                        resultPictureBox.Refresh();
                    }
                    catch (Exception ex)
                    {
                        MessageBox.Show(ex.Message, "Exception");
                    }
                }
            }
        }
コード例 #6
0
        public bool Recognize(ref string path, bool needCrop)
        {
            FaceRectRelative = RectangleF.Empty;
            LeftEyeCenter = RightEyeCenter = LeftMouth = LeftNose = RightNose = RightMouth = Vector2.Zero;

            var executablePath = Path.GetDirectoryName(Application.ExecutablePath);
            FSDK.TPoint[] pointFeature;
            FSDK.CImage image = new FSDK.CImage(path);

            var faceRectangle = Rectangle.Empty;
            var mouthRectangle = Rectangle.Empty;

            #region Определение цвета лица

            if (needCrop)
            {
                var openCvImage = new Image<Bgr, byte>(path);
                var detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);

                using (var skin = new Image<Gray, Byte>(image.Width, image.Height))
                {
                    var color = new Bgr(0, 0, 0);
                    var count = 0;
                    detector.Process(openCvImage, skin);
                    for (int y = 0; y < skin.Height; y++)
                    {
                        for (int x = 0; x < skin.Width; x++)
                        {
                            byte value = skin.Data[y, x, 0];
                            if (value != 0)
                            {
                                var c = openCvImage[y, x];
                                color.Red += c.Red;
                                color.Green += c.Green;
                                color.Blue += c.Blue;
                                ++count;
                            }
                        }
                    }
                    if (count > 0)
                    {
                        color.Red /= count;
                        color.Green /= count;
                        color.Blue /= count;
                        FaceColor = new Vector4((float)color.Red / 255f, (float)color.Green / 255f, (float)color.Blue / 255f, 1.0f);
                    }
                    else
                    {
                        FaceColor = new Vector4(0.72f, 0.72f, 0.72f, 1.0f);
                    }
                }
            }

            #endregion

            FSDK.TFacePosition facePosition = image.DetectFace();
            if (0 == facePosition.w)
            {
                faceRectangle = new Rectangle(0, 0, image.Width, image.Height);
                MessageBox.Show("No faces detected", "Face Detection");
                return false;
            }
            else
            {
                pointFeature = image.DetectFacialFeaturesInRegion(ref facePosition);

                String AttributeValues;         // определение пола
                FSDK.DetectFacialAttributeUsingFeatures(image.ImageHandle, ref pointFeature, "Gender", out AttributeValues, 1024);
                float ConfidenceMale = 0.0f;
                float ConfidenceFemale = 0.0f;
                FSDK.GetValueConfidence(AttributeValues, "Male", ref ConfidenceMale);
                FSDK.GetValueConfidence(AttributeValues, "Female", ref ConfidenceFemale);
                IsMale = ConfidenceMale > ConfidenceFemale;

                int left = facePosition.xc - (int)(facePosition.w * 0.6f);
                left = left < 0 ? 0 : left;
                //   int top = facePosition.yc - (int)(facePosition.w * 0.5f);             // верхушку определяет неправильлно. поэтому просто не будем обрезать :)
                BottomFace = new Vector2(pointFeature[11].x, pointFeature[11].y);

                var distance = pointFeature[2].y - pointFeature[11].y;
                var top = pointFeature[16].y + distance - 15;          // определение высоты по алгоритму старикана
                top = top < 0 ? 0 : top;

              var newWidth = (int)(facePosition.w * 1.2);
                newWidth = newWidth > image.Width ? image.Width : newWidth;

                faceRectangle = new Rectangle(left, top, newWidth, BottomFace.Y + 15 < image.Height ? (int)(BottomFace.Y + 15) - top : image.Height - top - 1);
                if (needCrop)       // если это создание проекта - то нужно обрезать фотку и оставить только голову
                {
                    using (var croppedImage = ImageEx.Crop(path, faceRectangle))
                    {
                        path = UserConfig.AppDataDir;
                        FolderEx.CreateDirectory(path);
                        path = Path.Combine(path, "tempHaarImage.jpg");
                        croppedImage.Save(path, ImageFormat.Jpeg);

                        return Recognize(ref path, false);

                    }
                }

                LeftEyeCenter = new Vector2(pointFeature[0].x, pointFeature[0].y);
                RightEyeCenter = new Vector2(pointFeature[1].x, pointFeature[1].y);

                LeftMouth = new Vector2(pointFeature[3].x, pointFeature[3].y);
                RightMouth = new Vector2(pointFeature[4].x, pointFeature[4].y);

                LeftNose = new Vector2(pointFeature[45].x, pointFeature[45].y);
                RightNose = new Vector2(pointFeature[46].x, pointFeature[46].y);

                TopFace = new Vector2(pointFeature[66].x, pointFeature[66].y);
                MiddleFace1 = new Vector2(pointFeature[66].x, pointFeature[66].y);
                MiddleFace2 = new Vector2(pointFeature[5].x, pointFeature[5].y);

                RightMiddleFace1 = new Vector2(pointFeature[67].x, pointFeature[67].y);
                RightMiddleFace2 = new Vector2(pointFeature[6].x, pointFeature[6].y);

                #region Поворот фотки по глазам!

                var v = new Vector2(LeftEyeCenter.X - RightEyeCenter.X, LeftEyeCenter.Y - RightEyeCenter.Y);
                v.Normalize();      // ПД !
                var xVector = new Vector2(1, 0);

                var xDiff = xVector.X - v.X;
                var yDiff = xVector.Y - v.Y;
                var angle = Math.Atan2(yDiff, xDiff) * 180.0 / Math.PI;

                if (Math.Abs(angle) > 1 && angleCount <= 5)                // поворачиваем наклоненные головы
                {
                    ++angleCount;

                    using (var ms = new MemoryStream(File.ReadAllBytes(path))) // Don't use using!!
                    {
                        var originalImg = (Bitmap)Bitmap.FromStream(ms);

                        path = UserConfig.AppDataDir;
                        FolderEx.CreateDirectory(path);
                        path = Path.Combine(path, "tempHaarImage.jpg");

                        using (var ii = ImageEx.RotateImage(new Bitmap(originalImg), (float)-angle))
                            ii.Save(path, ImageFormat.Jpeg);
                    }

                    return Recognize(ref path, false);
                }

                #endregion

                #region Переводим в относительные координаты

                LeftMouth = new Vector2(LeftMouth.X / (image.Width * 1f), LeftMouth.Y / (image.Height * 1f));
                RightMouth = new Vector2(RightMouth.X / (image.Width * 1f), RightMouth.Y / (image.Height * 1f));

                LeftEyeCenter = new Vector2(LeftEyeCenter.X / (image.Width * 1f), LeftEyeCenter.Y / (image.Height * 1f));
                RightEyeCenter = new Vector2(RightEyeCenter.X / (image.Width * 1f), RightEyeCenter.Y / (image.Height * 1f));

                LeftNose = new Vector2(LeftNose.X / (image.Width * 1f), LeftNose.Y / (image.Height * 1f));
                RightNose = new Vector2(RightNose.X / (image.Width * 1f), RightNose.Y / (image.Height * 1f));

                TopFace = new Vector2(TopFace.X / (image.Width * 1f), TopFace.Y / (image.Height * 1f));
                MiddleFace1 = new Vector2(MiddleFace1.X / (image.Width * 1f), MiddleFace1.Y / (image.Height * 1f));
                MiddleFace2 = new Vector2(MiddleFace2.X / (image.Width * 1f), MiddleFace2.Y / (image.Height * 1f));
                BottomFace = new Vector2(BottomFace.X / (image.Width * 1f), BottomFace.Y / (image.Height * 1f));

                RightMiddleFace1 = new Vector2(RightMiddleFace1.X / (image.Width * 1f), RightMiddleFace1.Y / (image.Height * 1f));
                RightMiddleFace2 = new Vector2(RightMiddleFace2.X / (image.Width * 1f), RightMiddleFace2.Y / (image.Height * 1f));

                FacialFeatures = new List<Vector2>();
                foreach (var point in pointFeature)
                    FacialFeatures.Add(new Vector2(point.x / (image.Width * 1f), point.y / (image.Height * 1f)));

                /*     int left = facePosition.xc - (int)(facePosition.w * 0.6f);
                     int top = facePosition.yc - (int)(facePosition.w * 0.5f);
                     var lRelative =

                     faceRectangle = new Rectangle(left, top, (int)(facePosition.w * 1.2), (int)(facePosition.w * 1.2));
                     FaceRectRelative =
                     */

                //      var leftTop = new Vector2(LeftEyeCenter.X, Math.Max(LeftEyeCenter.Y, RightEyeCenter.Y));
                //     var rightBottom = new Vector2(RightEyeCenter.X, LeftMouth.Y);
                // FaceRectRelative = new RectangleF(leftTop.X, leftTop.Y, rightBottom.X - leftTop.X, rightBottom.Y - leftTop.Y);

                #endregion

                return true;
            }
        }