private (bool faceDetected, bool faceRecognized) CompareFaces(Mat frame, User user, Rect[] rects) { bool faceDetected = false, faceRecognized = false; if (rects.Length > 0) { var ph = _testImageRepository.GetReferenceImages(user); #region DEV_MODE #if DEV_MODE Application.Current.Dispatcher.Invoke(() => { Cv2.ImShow("1", ph.First().Img); Cv2.ImShow("2", frame); }); #endif #endregion var excluded = ExcludeFaces(frame, rects); foreach (var mat in excluded) { faceRecognized = _faceRecognition.CompareFaces( ph.First(i => i.HorizontalHeadRotation == HeadRotation.Front).Img, null, mat, null); if (faceRecognized) { break; } faceRecognized = _faceRecognition.CompareFaces( ph.First(i => i.HorizontalHeadRotation == HeadRotation.Right).Img, null, mat, null); if (faceRecognized) { break; } faceRecognized = _faceRecognition.CompareFaces( ph.First(i => i.HorizontalHeadRotation == HeadRotation.Left).Img, null, mat, null); } faceDetected = true; } Log.Logger.Debug($"Face detected: {faceDetected} Face recognized: {faceRecognized}"); return(faceDetected, faceRecognized); }
private Task CreateFaceValidationTask(Mat face1, Mat face2, Task <FaceEncodingData?> faceEncoding1, Task <FaceEncodingData?> faceEncoding2, CancellationToken ct) { return(Task.Factory.StartNew(() => { var fe1 = faceEncoding1.GetAwaiter().GetResult(); var fe2 = faceEncoding2.GetAwaiter().GetResult(); if (fe1 == null || fe2 == null) { throw new ArgumentException("Cannot get face encodings"); } if (!_dnFaceRecognition.CompareFaces(face1, fe1, face2, fe2)) { throw new ArgumentException("Invalid faces"); } _progress += 6; ReportInitFaceProgress(null); }, ct)); }