Ejemplo n.º 1
0
        private RecognitionResult RecognizeEmotions(FaceCropResult faces)
        {
            RecognitionResult result = new RecognitionResult();

            result.CaptureTime = faces.CaptureTime;
            result.Tag         = faces.Tag;

            try
            {
                result.Faces = _emotionApi.DetectEmotions(faces.Faces).GetAwaiter().GetResult();
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex);
            }

            return(result);
        }
Ejemplo n.º 2
0
        private FaceCropResult DetectFaces(ImageCapture capture)
        {
            Rect[] rects = _classifier.DetectMultiScale(capture.image, 1.08, 2, HaarDetectionType.ScaleImage, new Size(30, 30));

            FaceCropResult faces = new FaceCropResult
            {
                CaptureTime = capture.captureTime,
                Faces       = new FaceCropResult.Face[rects.Length]
            };

            for (int i = 0; i < rects.Length; i++)
            {
                // Increase the size of the image with 25%.
                rects[i].Inflate((int)(rects[i].Width * 0.25), (int)(rects[i].Height * 0.25));

                FaceCropResult.Face face = new FaceCropResult.Face();
                face.Id          = i.ToString();
                face.ImageBase64 = Convert.ToBase64String(capture.image.SubMat(rects[i]).ToBytes(".jpg"));

                faces.Faces[i] = face;
            }

            return(faces);
        }