Пример #1
0
        public void GuesGenderAndAge(long[] IDs, int i, int tracker)
        {
            String AttributeValues;
            String AttributeValuesAge;
            String AtributValuesExpression;
            string str = "";

            if (0 == FSDK.GetTrackerFacialAttribute(tracker, 0, IDs[i], "Gender", out AttributeValues, 1024) &&
                0 == FSDK.GetTrackerFacialAttribute(tracker, 0, IDs[i], "Age", out AttributeValuesAge, 1024) &&
                0 == FSDK.GetTrackerFacialAttribute(tracker, 0, IDs[i], "Expression", out AtributValuesExpression, 1024))
            {
                if (checkBox2.Checked)
                {
                    float ConfidenceAge = 0.0f;
                    FSDK.GetValueConfidence(AttributeValuesAge, "Age", ref ConfidenceAge);
                    label7.Text = ((int)ConfidenceAge).ToString();
                }
                else
                {
                    label7.Text = "";
                }
                if (checkBox3.Checked)
                {
                    float ConfidenceMale   = 0.0f;
                    float ConfidenceFemale = 0.0f;
                    FSDK.GetValueConfidence(AttributeValues, "Male", ref ConfidenceMale);
                    FSDK.GetValueConfidence(AttributeValues, "Female", ref ConfidenceFemale);
                    label8.Text = (ConfidenceMale > ConfidenceFemale ? "Barbat" : "Femeie") + " " + (ConfidenceMale > ConfidenceFemale ? (int)(ConfidenceMale * 100) : (int)(ConfidenceFemale * 100)).ToString() + "%";
                }
                else
                {
                    label8.Text = "";
                }
                if (checkBox4.Checked)
                {
                    float ConfidenceExpression = 0.0f;
                    FSDK.GetValueConfidence(AtributValuesExpression, "Expression", ref ConfidenceExpression);
                    string[] tokens = AtributValuesExpression.Split(';');
                    label9.Text = tokens[0];
                }
                else
                {
                    label9.Text = "";
                }
            }
        }
        private void button1_Click(object sender, EventArgs e)
        {
            this.button1.Enabled = false;
            int cameraHandle = 0;

            int r = FSDKCam.OpenVideoCamera(ref cameraName, ref cameraHandle);

            if (r != FSDK.FSDKE_OK)
            {
                MessageBox.Show("Error opening the first camera", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                Application.Exit();
            }

            int tracker = 0;

            FSDK.CreateTracker(ref tracker);

            int err = 0; // set realtime face detection parameters

            FSDK.SetTrackerMultipleParameters(tracker, "RecognizeFaces=false; DetectGender=true; HandleArbitraryRotations=false; DetermineFaceRotationAngle=false; InternalResizeWidth=100; FaceDetectionThreshold=5;", ref err);

            while (!needClose)
            {
                Int32 imageHandle = 0;
                if (FSDK.FSDKE_OK != FSDKCam.GrabFrame(cameraHandle, ref imageHandle)) // grab the current frame from the camera
                {
                    Application.DoEvents();
                    continue;
                }
                FSDK.CImage image = new FSDK.CImage(imageHandle);

                long [] IDs;
                long    faceCount = 0;
                FSDK.FeedFrame(tracker, 0, image.ImageHandle, ref faceCount, out IDs, sizeof(long) * 256); // maximum 256 faces detected
                Array.Resize(ref IDs, (int)faceCount);

                Image    frameImage = image.ToCLRImage();
                Graphics gr         = Graphics.FromImage(frameImage);

                for (int i = 0; i < IDs.Length; ++i)
                {
                    FSDK.TFacePosition facePosition = new FSDK.TFacePosition();
                    FSDK.GetTrackerFacePosition(tracker, 0, IDs[i], ref facePosition);

                    int left = facePosition.xc - (int)(facePosition.w * 0.6);
                    int top  = facePosition.yc - (int)(facePosition.w * 0.5);
                    int w    = (int)(facePosition.w * 1.2);
                    gr.DrawRectangle(Pens.LightGreen, left, top, w, w);

                    String AttributeValues;
                    if (0 == FSDK.GetTrackerFacialAttribute(tracker, 0, IDs[i], "Gender", out AttributeValues, 1024))
                    {
                        float ConfidenceMale   = 0.0f;
                        float ConfidenceFemale = 0.0f;
                        FSDK.GetValueConfidence(AttributeValues, "Male", ref ConfidenceMale);
                        FSDK.GetValueConfidence(AttributeValues, "Female", ref ConfidenceFemale);

                        String str = (ConfidenceMale > ConfidenceFemale ? "Male" : "Female") + ", " +
                                     (ConfidenceMale > ConfidenceFemale ? (int)(ConfidenceMale * 100) : (int)(ConfidenceFemale * 100)).ToString() + "%";

                        StringFormat format = new StringFormat();
                        format.Alignment = StringAlignment.Center;

                        gr.DrawString(str, new System.Drawing.Font("Arial", 16),
                                      new System.Drawing.SolidBrush(System.Drawing.Color.LightGreen),
                                      facePosition.xc, top + w + 5, format);
                    }
                }

                // display current frame
                pictureBox1.Image = frameImage;

                GC.Collect(); // collect the garbage

                // make UI controls accessible
                Application.DoEvents();
            }
            FSDK.FreeTracker(tracker);

            FSDKCam.CloseVideoCamera(cameraHandle);
            FSDKCam.FinalizeCapturing();
        }
Пример #3
0
        private void matchesFace()
        {
            if (dataGridView1.Rows.Count > 0)
            {
                this.dataGridView1.Invoke(new MethodInvoker(() =>
                                                            this.dataGridView1.Rows.Clear()));
                this.dataGridView1.Invoke(new MethodInvoker(() =>
                                                            this.dataGridView1.Refresh()));
            }

            for (int i = 0; i < SubjectList.Count; i++)
            {
                if (SubjectList.Count >= 1)
                {
                    FSDK.GetMatchingThresholdAtFAR(FARValue / 100, ref FaceDetectionThreshold);

                    TFaceRecord DbSubject    = SubjectList[i];
                    int         MatchedCount = 0;
                    int         FaceCount    = FaceSearchList.Count;
                    float[]     Similarities = new float[FaceCount];
                    float[]     Smile        = new float[FaceCount];
                    float[]     EyesOpen     = new float[FaceCount];
                    float[]     Male         = new float[FaceCount];
                    float[]     Female       = new float[FaceCount];

                    int[] Numbers = new int[FaceCount];

                    for (int k = 0; k < FaceSearchList.Count; k++)
                    {
                        float Similarity         = 0.0f;
                        float ConfidenceSmile    = 0.0f;
                        float ConfidenceEyesOpen = 0.0f;
                        float ConfidenceMale     = 0.0f;
                        float ConfidenceFemale   = 0.0f;

                        TFaceRecord SearchFace = FaceSearchList[k];
                        FSDK.MatchFaces(ref DbSubject.Template, ref SearchFace.Template, ref Similarity);

                        long          MaxSizeInBytes   = 100000;
                        string        ExpressionValues = "";
                        string        GenderValues     = "";
                        FSDK.CImage   CurrentImage     = SearchFace.image;
                        FSDK.TPoint[] Facefeatures     = null;

                        FSDK.DetectFacialFeatures(SearchFace.faceImage.ImageHandle, out Facefeatures);

                        if (Facefeatures != null)
                        {
                            FSDK.DetectFacialAttributeUsingFeatures(SearchFace.faceImage.ImageHandle, ref Facefeatures, "Expression", out ExpressionValues, MaxSizeInBytes);
                            FSDK.GetValueConfidence(ExpressionValues, "Smile", ref ConfidenceSmile);
                            FSDK.GetValueConfidence(ExpressionValues, "EyesOpen", ref ConfidenceEyesOpen);

                            FSDK.DetectFacialAttributeUsingFeatures(SearchFace.faceImage.ImageHandle, ref Facefeatures, "Gender", out GenderValues, MaxSizeInBytes);
                            FSDK.GetValueConfidence(GenderValues, "Male", ref ConfidenceMale);
                            FSDK.GetValueConfidence(GenderValues, "Female", ref ConfidenceFemale);

                            if (Similarity >= FaceDetectionThreshold)
                            {
                                Similarities[MatchedCount] = Similarity;
                                Smile[MatchedCount]        = ConfidenceSmile;
                                EyesOpen[MatchedCount]     = ConfidenceEyesOpen;
                                Male[MatchedCount]         = ConfidenceMale;
                                Female[MatchedCount]       = ConfidenceFemale;

                                Numbers[MatchedCount] = k;
                                ++MatchedCount;
                            }
                        }
                        else
                        {
                            if (Similarity >= FaceDetectionThreshold)
                            {
                                Similarities[MatchedCount] = Similarity;
                                Smile[MatchedCount]        = 0;
                                EyesOpen[MatchedCount]     = 0;
                                Male[MatchedCount]         = 0;
                                Female[MatchedCount]       = 0;

                                Numbers[MatchedCount] = k;
                                ++MatchedCount;
                            }
                        }
                    }

                    if (MatchedCount == 0)
                    {
                        MessageBox.Show("No matches found. You can try to increase the FAR parameter in the Options dialog box.", "No matches");
                    }
                    else
                    {
                        for (int j = 0; j < MatchedCount; j++)
                        {
                            if ((Similarities[j] * 100.0f) >= 30.0f)
                            {
                                resultImagelist.Images.Add(FaceSearchList[j].faceImage.ToCLRImage());

                                Image img1 = FaceSearchList[Numbers[j]].faceImage.ToCLRImage();
                                img1 = (Image)(new Bitmap(img1, new Size(100, 100)));

                                Image img2 = Image.FromFile(SubjectList[i].ImageFileName);
                                img2 = (Image)(new Bitmap(img2, new Size(100, 100)));

                                string feature = DbSubject.suspectName + " \r\n\nSimilarity = " + (Similarities[j] * 100).ToString() + " Smile:" + Smile[j] * 100 + " Eyes Open: " + EyesOpen[j] * 100
                                                 + " Male:" + Male[j] * 100 + " Female: " + Female[j] * 100;
                                Object[] row = new Object[] { img1, img2, feature };

                                this.dataGridView1.Invoke(new MethodInvoker(() =>
                                                                            this.dataGridView1.Rows.Add(row)));
                            }
                        }
                    }
                }
            }
        }
Пример #4
0
        public bool Recognize(ref string path, bool needCrop, bool needRotation = true)
        {
            FaceRectRelative = RectangleF.Empty;
            LeftEyeCenter    = RightEyeCenter = LeftMouth = LeftNose = RightNose = RightMouth = Vector2.Zero;

            var executablePath = Path.GetDirectoryName(Application.ExecutablePath);

            FSDK.TPoint[] pointFeature;
            var           image = new FSDK.CImage(path);

            var faceRectangle  = Rectangle.Empty;
            var mouthRectangle = Rectangle.Empty;

            FSDK.SetFaceDetectionThreshold(5);
            FSDK.SetFaceDetectionParameters(true, true, 512);
            var facePosition = image.DetectFace();

            if (0 == facePosition.w)
            {
                MessageBox.Show("No faces detected", "Face Detection");
                return(false);
            }

            if (needCrop)
            {
                RotatedAngle = facePosition.angle;      // угол, на который повернута голова.
            }
            pointFeature = image.DetectFacialFeaturesInRegion(ref facePosition);

            String AttributeValues;         // определение пола

            FSDK.DetectFacialAttributeUsingFeatures(image.ImageHandle, ref pointFeature, "Gender", out AttributeValues, 1024);
            var ConfidenceMale   = 0.0f;
            var ConfidenceFemale = 0.0f;
            var Age = 0.0f;             // в этой версии распознавалки не работает.

            FSDK.GetValueConfidence(AttributeValues, "Male", ref ConfidenceMale);
            FSDK.GetValueConfidence(AttributeValues, "Female", ref ConfidenceFemale);
            IsMale = ConfidenceMale > ConfidenceFemale;

            FSDK.DetectFacialAttributeUsingFeatures(image.ImageHandle, ref pointFeature, "Age", out AttributeValues, 1024);

            var left = facePosition.xc - (int)(facePosition.w * 0.6f);

            left = left < 0 ? 0 : left;
            //   int top = facePosition.yc - (int)(facePosition.w * 0.5f);             // верхушку определяет неправильлно. поэтому просто не будем обрезать :)
            BottomFace = new Vector2(pointFeature[11].x, pointFeature[11].y);

            var distance = pointFeature[2].y - pointFeature[11].y;
            var top      = pointFeature[16].y + distance - 15;     // определение высоты по алгоритму старикана

            top = top < 0 ? 0 : top;

            var newWidth = (int)(facePosition.w * 1.2);

            newWidth = newWidth > image.Width || newWidth == 0 ? image.Width : newWidth;

            faceRectangle = new Rectangle(left, top, newWidth, BottomFace.Y + 15 < image.Height ? (int)(BottomFace.Y + 15) - top : image.Height - top - 1);
            if (needCrop)       // если это создание проекта - то нужно обрезать фотку и оставить только голову
            {
                var bmpImage = new Bitmap(path);
                FaceColor = GetFaceColor(bmpImage, pointFeature);

                var croppedImage = ImageEx.Crop(bmpImage, faceRectangle);
                path = UserConfig.AppDataDir;
                FolderEx.CreateDirectory(path);
                path = Path.Combine(path, "tempHaarImage.jpg");
                croppedImage.Save(path, ImageFormat.Jpeg);
                croppedImage.Dispose();

                return(Recognize(ref path, false));
            }

            LeftEyeCenter  = new Vector2(pointFeature[0].x, pointFeature[0].y);
            RightEyeCenter = new Vector2(pointFeature[1].x, pointFeature[1].y);

            LeftMouth  = new Vector2(pointFeature[3].x, pointFeature[3].y);
            RightMouth = new Vector2(pointFeature[4].x, pointFeature[4].y);

            LeftNose  = new Vector2(pointFeature[45].x, pointFeature[45].y);
            RightNose = new Vector2(pointFeature[46].x, pointFeature[46].y);

            TopFace     = new Vector2(pointFeature[66].x, pointFeature[66].y);
            MiddleFace1 = new Vector2(pointFeature[66].x, pointFeature[66].y);
            MiddleFace2 = new Vector2(pointFeature[5].x, pointFeature[5].y);


            RightMiddleFace1 = new Vector2(pointFeature[67].x, pointFeature[67].y);
            RightMiddleFace2 = new Vector2(pointFeature[6].x, pointFeature[6].y);

            #region Поворот фотки по глазам!

            if (needRotation)
            {
                var v = new Vector2(LeftEyeCenter.X - RightEyeCenter.X, LeftEyeCenter.Y - RightEyeCenter.Y);
                v.Normalize();      // ПД !
                var xVector = new Vector2(1, 0);

                var xDiff = xVector.X - v.X;
                var yDiff = xVector.Y - v.Y;
                var angle = Math.Atan2(yDiff, xDiff) * 180.0 / Math.PI;

                if (Math.Abs(angle) > 1 && angleCount <= 5)                // поворачиваем наклоненные головы
                {
                    ++angleCount;

                    using (var ms = new MemoryStream(File.ReadAllBytes(path))) // Don't use using!!
                    {
                        var originalImg = (Bitmap)Image.FromStream(ms);

                        path = UserConfig.AppDataDir;
                        FolderEx.CreateDirectory(path);
                        path = Path.Combine(path, "tempHaarImage.jpg");

                        using (var ii = ImageEx.RotateImage(new Bitmap(originalImg), (float)-angle))
                            ii.Save(path, ImageFormat.Jpeg);
                    }

                    return(Recognize(ref path, false));
                }
            }

            #endregion

            var upperUpperLip = pointFeature[54];       // вехняя точка верхней губы
            var lowerUpperLip = pointFeature[61];       // нижняя точка верхней губы
            var lowerLip      = pointFeature[64];       // верхняя точка нижней губы

            var diff2 = Math.Abs(lowerUpperLip.y - upperUpperLip.y);
            var diffX = Math.Abs(lowerLip.y - lowerUpperLip.y);

            IsOpenSmile = diffX > diff2;

            #region Переводим в относительные координаты

            LeftMouth  = new Vector2(LeftMouth.X / (image.Width * 1f), LeftMouth.Y / (image.Height * 1f));
            RightMouth = new Vector2(RightMouth.X / (image.Width * 1f), RightMouth.Y / (image.Height * 1f));

            LeftEyeCenter  = new Vector2(LeftEyeCenter.X / (image.Width * 1f), LeftEyeCenter.Y / (image.Height * 1f));
            RightEyeCenter = new Vector2(RightEyeCenter.X / (image.Width * 1f), RightEyeCenter.Y / (image.Height * 1f));

            LeftNose  = new Vector2(LeftNose.X / (image.Width * 1f), LeftNose.Y / (image.Height * 1f));
            RightNose = new Vector2(RightNose.X / (image.Width * 1f), RightNose.Y / (image.Height * 1f));

            TopFace     = new Vector2(TopFace.X / (image.Width * 1f), TopFace.Y / (image.Height * 1f));
            MiddleFace1 = new Vector2(MiddleFace1.X / (image.Width * 1f), MiddleFace1.Y / (image.Height * 1f));
            MiddleFace2 = new Vector2(MiddleFace2.X / (image.Width * 1f), MiddleFace2.Y / (image.Height * 1f));
            BottomFace  = new Vector2(BottomFace.X / (image.Width * 1f), BottomFace.Y / (image.Height * 1f));

            RightMiddleFace1 = new Vector2(RightMiddleFace1.X / (image.Width * 1f), RightMiddleFace1.Y / (image.Height * 1f));
            RightMiddleFace2 = new Vector2(RightMiddleFace2.X / (image.Width * 1f), RightMiddleFace2.Y / (image.Height * 1f));

            FacialFeatures = new List <Vector3>();
            RealPoints     = new List <Vector2>();
            int index       = 0;
            var pointDepths = GetPointDepths();
            foreach (var point in pointFeature)
            {
                FacialFeatures.Add(new Vector3(point.x / (image.Width * 1f), point.y / (image.Height * 1f), pointDepths[index++]));
                RealPoints.Add(new Vector2(point.x, point.y));
            }

            #endregion

            return(true);
        }