public static FaceRosponse AnalyzePhoto(Image <Bgr, byte> image)
        {
            FaceRosponse outData = new FaceRosponse();

            string frontFaceFilePath = Application.StartupPath + @"\Face\Data\haarcascade_frontalface_default.xml";

            string sideFaceFilePath = Application.StartupPath + @"\Face\Data\lbpcascade_profileface.xml";

            List <Rectangle> faceRects     = new List <Rectangle>();
            List <Rectangle> faceRectFront = new List <Rectangle>();
            List <Rectangle> faceRectSide  = new List <Rectangle>();

            faceRectFront = DetectFace(image, frontFaceFilePath);

            faceRectSide = DetectFace(image, sideFaceFilePath);

            faceRects.AddRange(faceRectFront);
            faceRects.AddRange(faceRectSide);

            foreach (Rectangle faceRect in faceRects)
            {
                Image <Bgr, byte> _croppedImage = image.Copy();
                _croppedImage.ROI = new Rectangle(faceRect.X, faceRect.Y, faceRect.Width, faceRect.Height);


                RecognizerResponse faceResponce = new RecognizerResponse();


                int[] genderFaces = GetGender(_croppedImage);
                if (genderFaces.Length < 2)
                {
                    continue;
                }

                faceResponce.gender     = genderFaces[0] == 0 ? "male" : "female";
                faceResponce.genderProb = genderFaces[1];

                int[] ageFaces = GetAge(_croppedImage);
                if (ageFaces.Length < 2)
                {
                    continue;
                }

                faceResponce.ageClass    = ageFaces[0];
                faceResponce.ageProb     = ageFaces[1];
                faceResponce.ageDiapason = GetAgeDiapasonByClass(faceResponce.ageClass);

                outData.personsData.Add(faceResponce);

                _croppedImage = null;
            }

            outData.personsFaces.AddRange(faceRects);

            if (faceRects.Count == 0)
            {
                outData = null;
            }


            GC.Collect();


            return(outData);
        }
        public static FaceRosponse AnalyzePhotoAsync(Image <Bgr, byte> image)
        {
            string frontFaceFilePath = Application.StartupPath + @"\Face\Data\haarcascade_frontalface_default.xml";

            string sideFaceFilePath = Application.StartupPath + @"\Face\Data\lbpcascade_profileface.xml";

            List <Rectangle> faceRects     = new List <Rectangle>();
            List <Rectangle> faceRectFront = new List <Rectangle>();
            List <Rectangle> faceRectSide  = new List <Rectangle>();

            faceRectFront = DetectFace(image, frontFaceFilePath);

            faceRectSide = DetectFace(image, sideFaceFilePath);

            faceRects.AddRange(faceRectFront);
            faceRects.AddRange(faceRectSide);

            FaceRosponse outData = new FaceRosponse();

            List <Thread> tasks = new List <Thread>();

            CancellationTokenSource cancelSourse = new CancellationTokenSource();

            foreach (Rectangle faceRect in faceRects)
            {
                tasks.Add(new Thread(() =>
                {
                    Image <Bgr, byte> _croppedImage = image.Copy();
                    _croppedImage.ROI = new Rectangle(faceRect.X, faceRect.Y, faceRect.Width, faceRect.Height);

                    RecognizerResponse faceResponce = new RecognizerResponse();


                    int[] genderFaces = GetGender(_croppedImage);

                    if (genderFaces.Length < 2)
                    {
                        return;
                    }

                    faceResponce.gender     = genderFaces[0] == 0 ? "male" : "female";
                    faceResponce.genderProb = genderFaces[1];

                    int[] ageFaces = GetAge(_croppedImage);

                    if (ageFaces.Length < 2)
                    {
                        return;
                    }

                    faceResponce.ageClass    = ageFaces[0];
                    faceResponce.ageProb     = ageFaces[1];
                    faceResponce.ageDiapason = GetAgeDiapasonByClass(faceResponce.ageClass);

                    lock (outData)
                    {
                        outData.personsData.Add(faceResponce);
                        outData.personsFaces.Add(faceRect);
                    }

                    _croppedImage = null;
                }));
            }

            for (int i = 0; i < tasks.Count; i++)
            {
                tasks[i].Start();
            }

            for (int i = 0; i < tasks.Count; i++)
            {
                tasks[i].Join();
            }

            if (faceRects.Count == 0)
            {
                outData = null;
            }

            GC.Collect();

            return(outData);
        }