예제 #1
0
        /// <summary>
        /// Get how much the eyes are open, and how the much the person is smiling
        /// </summary>
        /// <returns>A number between 0 and 1 for each of the tuple</returns>
        /// <remarks>The face's features need to be already detected using DetectFeatures, otherwise (null, null) will be returned</remarks>
        public (float?eyesOpen, float?smile) GetExpression()
        {
            if (Features == null)
            {
                return(null, null);
            }

            if (FSDK.FSDKE_OK !=
                FSDK.DetectFacialAttributeUsingFeatures(ImageHandle, ref Features, "Expression", out string response, 128))
            {
                return(null, null);
            }

            var splitResponse = response.Split('=', ';');

            float?eyes  = null;
            float?smile = null;

            string eyesString = splitResponse[3];

            if (float.TryParse(eyesString, NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out float eyesTemp))
            {
                eyes = eyesTemp;
            }

            string smileString = splitResponse[1];

            if (float.TryParse(smileString, NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out float smileTemp))
            {
                smile = smileTemp;
            }

            return(eyes, smile);
        }
예제 #2
0
        /// <summary>
        /// Get how much the face's eyes are open
        /// </summary>
        /// <returns>A number between 0 and 1 indicating how much the face's eyes are open, null if failed</returns>
        /// <remarks>
        /// The face's Features need to have been detected beforehand using DetectFeatures(), otherwise null will be returned
        /// Use <see cref="GetExpression"/> if detecting multiple attrbibutes
        /// </remarks>
        public float?GetEyesOpen()
        {
            if (Features == null)
            {
                return(null);
            }

            if (FSDK.FSDKE_OK !=
                FSDK.DetectFacialAttributeUsingFeatures(ImageHandle, ref Features, "Expression", out string response, 128))
            {
                return(null);
            }

            string eyesOpen = response.Split('=', ';')[3];

            if (float.TryParse(eyesOpen, NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out float ret))
            {
                return(ret);
            }

            return(null);
        }
예제 #3
0
        private void matchesFace()
        {
            if (dataGridView1.Rows.Count > 0)
            {
                this.dataGridView1.Invoke(new MethodInvoker(() =>
                                                            this.dataGridView1.Rows.Clear()));
                this.dataGridView1.Invoke(new MethodInvoker(() =>
                                                            this.dataGridView1.Refresh()));
            }

            for (int i = 0; i < SubjectList.Count; i++)
            {
                if (SubjectList.Count >= 1)
                {
                    FSDK.GetMatchingThresholdAtFAR(FARValue / 100, ref FaceDetectionThreshold);

                    TFaceRecord DbSubject    = SubjectList[i];
                    int         MatchedCount = 0;
                    int         FaceCount    = FaceSearchList.Count;
                    float[]     Similarities = new float[FaceCount];
                    float[]     Smile        = new float[FaceCount];
                    float[]     EyesOpen     = new float[FaceCount];
                    float[]     Male         = new float[FaceCount];
                    float[]     Female       = new float[FaceCount];

                    int[] Numbers = new int[FaceCount];

                    for (int k = 0; k < FaceSearchList.Count; k++)
                    {
                        float Similarity         = 0.0f;
                        float ConfidenceSmile    = 0.0f;
                        float ConfidenceEyesOpen = 0.0f;
                        float ConfidenceMale     = 0.0f;
                        float ConfidenceFemale   = 0.0f;

                        TFaceRecord SearchFace = FaceSearchList[k];
                        FSDK.MatchFaces(ref DbSubject.Template, ref SearchFace.Template, ref Similarity);

                        long          MaxSizeInBytes   = 100000;
                        string        ExpressionValues = "";
                        string        GenderValues     = "";
                        FSDK.CImage   CurrentImage     = SearchFace.image;
                        FSDK.TPoint[] Facefeatures     = null;

                        FSDK.DetectFacialFeatures(SearchFace.faceImage.ImageHandle, out Facefeatures);

                        if (Facefeatures != null)
                        {
                            FSDK.DetectFacialAttributeUsingFeatures(SearchFace.faceImage.ImageHandle, ref Facefeatures, "Expression", out ExpressionValues, MaxSizeInBytes);
                            FSDK.GetValueConfidence(ExpressionValues, "Smile", ref ConfidenceSmile);
                            FSDK.GetValueConfidence(ExpressionValues, "EyesOpen", ref ConfidenceEyesOpen);

                            FSDK.DetectFacialAttributeUsingFeatures(SearchFace.faceImage.ImageHandle, ref Facefeatures, "Gender", out GenderValues, MaxSizeInBytes);
                            FSDK.GetValueConfidence(GenderValues, "Male", ref ConfidenceMale);
                            FSDK.GetValueConfidence(GenderValues, "Female", ref ConfidenceFemale);

                            if (Similarity >= FaceDetectionThreshold)
                            {
                                Similarities[MatchedCount] = Similarity;
                                Smile[MatchedCount]        = ConfidenceSmile;
                                EyesOpen[MatchedCount]     = ConfidenceEyesOpen;
                                Male[MatchedCount]         = ConfidenceMale;
                                Female[MatchedCount]       = ConfidenceFemale;

                                Numbers[MatchedCount] = k;
                                ++MatchedCount;
                            }
                        }
                        else
                        {
                            if (Similarity >= FaceDetectionThreshold)
                            {
                                Similarities[MatchedCount] = Similarity;
                                Smile[MatchedCount]        = 0;
                                EyesOpen[MatchedCount]     = 0;
                                Male[MatchedCount]         = 0;
                                Female[MatchedCount]       = 0;

                                Numbers[MatchedCount] = k;
                                ++MatchedCount;
                            }
                        }
                    }

                    if (MatchedCount == 0)
                    {
                        MessageBox.Show("No matches found. You can try to increase the FAR parameter in the Options dialog box.", "No matches");
                    }
                    else
                    {
                        for (int j = 0; j < MatchedCount; j++)
                        {
                            if ((Similarities[j] * 100.0f) >= 30.0f)
                            {
                                resultImagelist.Images.Add(FaceSearchList[j].faceImage.ToCLRImage());

                                Image img1 = FaceSearchList[Numbers[j]].faceImage.ToCLRImage();
                                img1 = (Image)(new Bitmap(img1, new Size(100, 100)));

                                Image img2 = Image.FromFile(SubjectList[i].ImageFileName);
                                img2 = (Image)(new Bitmap(img2, new Size(100, 100)));

                                string feature = DbSubject.suspectName + " \r\n\nSimilarity = " + (Similarities[j] * 100).ToString() + " Smile:" + Smile[j] * 100 + " Eyes Open: " + EyesOpen[j] * 100
                                                 + " Male:" + Male[j] * 100 + " Female: " + Female[j] * 100;
                                Object[] row = new Object[] { img1, img2, feature };

                                this.dataGridView1.Invoke(new MethodInvoker(() =>
                                                                            this.dataGridView1.Rows.Add(row)));
                            }
                        }
                    }
                }
            }
        }
예제 #4
0
        public bool Recognize(ref string path, bool needCrop, bool needRotation = true)
        {
            FaceRectRelative = RectangleF.Empty;
            LeftEyeCenter    = RightEyeCenter = LeftMouth = LeftNose = RightNose = RightMouth = Vector2.Zero;

            var executablePath = Path.GetDirectoryName(Application.ExecutablePath);

            FSDK.TPoint[] pointFeature;
            var           image = new FSDK.CImage(path);

            var faceRectangle  = Rectangle.Empty;
            var mouthRectangle = Rectangle.Empty;

            FSDK.SetFaceDetectionThreshold(5);
            FSDK.SetFaceDetectionParameters(true, true, 512);
            var facePosition = image.DetectFace();

            if (0 == facePosition.w)
            {
                MessageBox.Show("No faces detected", "Face Detection");
                return(false);
            }

            if (needCrop)
            {
                RotatedAngle = facePosition.angle;      // угол, на который повернута голова.
            }
            pointFeature = image.DetectFacialFeaturesInRegion(ref facePosition);

            String AttributeValues;         // определение пола

            FSDK.DetectFacialAttributeUsingFeatures(image.ImageHandle, ref pointFeature, "Gender", out AttributeValues, 1024);
            var ConfidenceMale   = 0.0f;
            var ConfidenceFemale = 0.0f;
            var Age = 0.0f;             // в этой версии распознавалки не работает.

            FSDK.GetValueConfidence(AttributeValues, "Male", ref ConfidenceMale);
            FSDK.GetValueConfidence(AttributeValues, "Female", ref ConfidenceFemale);
            IsMale = ConfidenceMale > ConfidenceFemale;

            FSDK.DetectFacialAttributeUsingFeatures(image.ImageHandle, ref pointFeature, "Age", out AttributeValues, 1024);

            var left = facePosition.xc - (int)(facePosition.w * 0.6f);

            left = left < 0 ? 0 : left;
            //   int top = facePosition.yc - (int)(facePosition.w * 0.5f);             // верхушку определяет неправильлно. поэтому просто не будем обрезать :)
            BottomFace = new Vector2(pointFeature[11].x, pointFeature[11].y);

            var distance = pointFeature[2].y - pointFeature[11].y;
            var top      = pointFeature[16].y + distance - 15;     // определение высоты по алгоритму старикана

            top = top < 0 ? 0 : top;

            var newWidth = (int)(facePosition.w * 1.2);

            newWidth = newWidth > image.Width || newWidth == 0 ? image.Width : newWidth;

            faceRectangle = new Rectangle(left, top, newWidth, BottomFace.Y + 15 < image.Height ? (int)(BottomFace.Y + 15) - top : image.Height - top - 1);
            if (needCrop)       // если это создание проекта - то нужно обрезать фотку и оставить только голову
            {
                var bmpImage = new Bitmap(path);
                FaceColor = GetFaceColor(bmpImage, pointFeature);

                var croppedImage = ImageEx.Crop(bmpImage, faceRectangle);
                path = UserConfig.AppDataDir;
                FolderEx.CreateDirectory(path);
                path = Path.Combine(path, "tempHaarImage.jpg");
                croppedImage.Save(path, ImageFormat.Jpeg);
                croppedImage.Dispose();

                return(Recognize(ref path, false));
            }

            LeftEyeCenter  = new Vector2(pointFeature[0].x, pointFeature[0].y);
            RightEyeCenter = new Vector2(pointFeature[1].x, pointFeature[1].y);

            LeftMouth  = new Vector2(pointFeature[3].x, pointFeature[3].y);
            RightMouth = new Vector2(pointFeature[4].x, pointFeature[4].y);

            LeftNose  = new Vector2(pointFeature[45].x, pointFeature[45].y);
            RightNose = new Vector2(pointFeature[46].x, pointFeature[46].y);

            TopFace     = new Vector2(pointFeature[66].x, pointFeature[66].y);
            MiddleFace1 = new Vector2(pointFeature[66].x, pointFeature[66].y);
            MiddleFace2 = new Vector2(pointFeature[5].x, pointFeature[5].y);


            RightMiddleFace1 = new Vector2(pointFeature[67].x, pointFeature[67].y);
            RightMiddleFace2 = new Vector2(pointFeature[6].x, pointFeature[6].y);

            #region Поворот фотки по глазам!

            if (needRotation)
            {
                var v = new Vector2(LeftEyeCenter.X - RightEyeCenter.X, LeftEyeCenter.Y - RightEyeCenter.Y);
                v.Normalize();      // ПД !
                var xVector = new Vector2(1, 0);

                var xDiff = xVector.X - v.X;
                var yDiff = xVector.Y - v.Y;
                var angle = Math.Atan2(yDiff, xDiff) * 180.0 / Math.PI;

                if (Math.Abs(angle) > 1 && angleCount <= 5)                // поворачиваем наклоненные головы
                {
                    ++angleCount;

                    using (var ms = new MemoryStream(File.ReadAllBytes(path))) // Don't use using!!
                    {
                        var originalImg = (Bitmap)Image.FromStream(ms);

                        path = UserConfig.AppDataDir;
                        FolderEx.CreateDirectory(path);
                        path = Path.Combine(path, "tempHaarImage.jpg");

                        using (var ii = ImageEx.RotateImage(new Bitmap(originalImg), (float)-angle))
                            ii.Save(path, ImageFormat.Jpeg);
                    }

                    return(Recognize(ref path, false));
                }
            }

            #endregion

            var upperUpperLip = pointFeature[54];       // вехняя точка верхней губы
            var lowerUpperLip = pointFeature[61];       // нижняя точка верхней губы
            var lowerLip      = pointFeature[64];       // верхняя точка нижней губы

            var diff2 = Math.Abs(lowerUpperLip.y - upperUpperLip.y);
            var diffX = Math.Abs(lowerLip.y - lowerUpperLip.y);

            IsOpenSmile = diffX > diff2;

            #region Переводим в относительные координаты

            LeftMouth  = new Vector2(LeftMouth.X / (image.Width * 1f), LeftMouth.Y / (image.Height * 1f));
            RightMouth = new Vector2(RightMouth.X / (image.Width * 1f), RightMouth.Y / (image.Height * 1f));

            LeftEyeCenter  = new Vector2(LeftEyeCenter.X / (image.Width * 1f), LeftEyeCenter.Y / (image.Height * 1f));
            RightEyeCenter = new Vector2(RightEyeCenter.X / (image.Width * 1f), RightEyeCenter.Y / (image.Height * 1f));

            LeftNose  = new Vector2(LeftNose.X / (image.Width * 1f), LeftNose.Y / (image.Height * 1f));
            RightNose = new Vector2(RightNose.X / (image.Width * 1f), RightNose.Y / (image.Height * 1f));

            TopFace     = new Vector2(TopFace.X / (image.Width * 1f), TopFace.Y / (image.Height * 1f));
            MiddleFace1 = new Vector2(MiddleFace1.X / (image.Width * 1f), MiddleFace1.Y / (image.Height * 1f));
            MiddleFace2 = new Vector2(MiddleFace2.X / (image.Width * 1f), MiddleFace2.Y / (image.Height * 1f));
            BottomFace  = new Vector2(BottomFace.X / (image.Width * 1f), BottomFace.Y / (image.Height * 1f));

            RightMiddleFace1 = new Vector2(RightMiddleFace1.X / (image.Width * 1f), RightMiddleFace1.Y / (image.Height * 1f));
            RightMiddleFace2 = new Vector2(RightMiddleFace2.X / (image.Width * 1f), RightMiddleFace2.Y / (image.Height * 1f));

            FacialFeatures = new List <Vector3>();
            RealPoints     = new List <Vector2>();
            int index       = 0;
            var pointDepths = GetPointDepths();
            foreach (var point in pointFeature)
            {
                FacialFeatures.Add(new Vector3(point.x / (image.Width * 1f), point.y / (image.Height * 1f), pointDepths[index++]));
                RealPoints.Add(new Vector2(point.x, point.y));
            }

            #endregion

            return(true);
        }