public string saveEmployee(Image newImage, string name, string middleName, string lastName, string email, FaceRecognizerMethode faceRecognizerMethode) { unitOfWork = new GenericUnitOfWork(); GenericRepository<Employee> employeeRepo = unitOfWork.GetRepoInstance<Employee>(); Employee employee = null; try { employee = (employeeRepo.GetAllRecords().Where<Employee>(e => e.email == email)).First<Employee>(); } catch { Debug.WriteLine("Nuevo usuario"); } //Add Employee if not exist. The email is unique if (employee == null) { employee = new Employee { name = name, middleName = middleName, lastName = lastName, email = email }; employeeRepo.Add(employee); unitOfWork.SaveChanges(); } //I save the image with a guid as a name GenericRepository<DistanceResult> distanceResultRepo = unitOfWork.GetRepoInstance<DistanceResult>(); Guid guid = Guid.NewGuid(); var inputImage = new Image<Bgr, Byte>(new Bitmap(newImage)); Rectangle[] rectangleFace = detection(inputImage, pathXMLHaarcascade); //The function detection(..) can extract N faces if (rectangleFace.Length <= 0) { return Result.NoDetected.ToString(); } else if (rectangleFace.Length > 1) { return Result.MultipleFacesDetected.ToString(); } else { Image<Gray, byte> grayFrame = toGrayEqualizeFrame(inputImage); Image<Gray, Byte> faceEMGUCV = formatRectangleFaces(grayFrame.ToBitmap(), rectangleFace[0]); faceEMGUCV._EqualizeHist(); faceEMGUCV.Save(pathImg + @"\" + guid.ToString()+".Jpeg"); FaceRecognizer faceRecognition; switch (faceRecognizerMethode.ToString()) { case "EigenFaceRecognizerMethode": faceRecognition = new EigenFaceRecognizer(numComponentsEigen, thresholdEigen); //try catch aca break; case "FisherFaceRecognizerMethode": faceRecognition = new FisherFaceRecognizer(numComponentsFisher, thresholdFisher); break; case "LBPHFaceRecognizerMethode": faceRecognition = new LBPHFaceRecognizer(radiusLBPH, neighborsLBPH, gridXLBPH, gridYLBPH, thresholdLBPH); break; default: return null; }; double distance = 2; //Save register DistanceResult dist = new DistanceResult(); dist.algorithm = RecognizeBLL.FaceRecognizerMethode.FisherFaceRecognizerMethode.ToString(); dist.employeeId = employee.employeeId; dist.photoName = guid.ToString(); dist.distance = distance; distanceResultRepo.Add(dist); unitOfWork.SaveChanges(); int lengthArrays = distanceResultRepo.GetAllRecords().Count(); imagesDB = new Image<Gray, Byte>[lengthArrays]; labels = new int[lengthArrays]; int i = 0; foreach (DistanceResult di in distanceResultRepo.GetAllRecords()) { //This is to recalculate the faceRecognition and save it, but I think is not necesari declare imageDB and labels as global imagesDB[i] = new Image<Gray, Byte>(pathImg + @"\" + di.photoName + ".Jpeg"); labels[i] = di.employeeId; i++; } if (employeeRepo.GetAllRecords().Count() > 1) { faceRecognition.Train(imagesDB, labels); faceRecognition.Save(pathImg + @"\" + "TrainingSet"); } return Result.Saved.ToString(); } //return Result.Error.ToString(); }
public EmployeeStructure[] recognizeMultipleFaces(Image newImage, FaceRecognizerMethode faceRecognizerMethode) { var inputImage = new Image<Bgr, Byte>(new Bitmap(newImage)); Rectangle[] rectangleFace = detection(inputImage, this.pathXMLHaarcascade); EmployeeStructure[] employeeStructure; if (rectangleFace.Length <= 0) { employeeStructure = new EmployeeStructure[0]; employeeStructure[0].result = Result.NoDetected.ToString(); return employeeStructure; } else { Image<Gray, byte> grayFrame = toGrayEqualizeFrame(inputImage); employeeStructure = new EmployeeStructure[rectangleFace.Length]; FaceRecognizer faceRecognition; switch (faceRecognizerMethode.ToString()) { case "EigenFaceRecognizerMethode": faceRecognition = new EigenFaceRecognizer(numComponentsEigen, thresholdEigen); //try catch aca break; case "FisherFaceRecognizerMethode": faceRecognition = new FisherFaceRecognizer(numComponentsFisher, thresholdFisher); break; case "LBPHFaceRecognizerMethode": faceRecognition = new LBPHFaceRecognizer(radiusLBPH, neighborsLBPH, gridXLBPH, gridYLBPH, thresholdLBPH); break; default: faceRecognition = new EigenFaceRecognizer(numComponentsEigen, thresholdEigen); break; }; faceRecognition.Load(pathImg + @"\" + "TrainingSet"); Parallel.For(0, rectangleFace.Length, i => { Image<Gray, byte> faceEMGUCV = formatRectangleFaces(grayFrame.ToBitmap(), rectangleFace[i]); FaceRecognizer.PredictionResult ER = faceRecognition.Predict(faceEMGUCV); if (ER.Label != -1 /*&& ER.Distance > thresholdEigen*/) { int label = ER.Label; GenericRepository<Employee> emplyeeRepo = unitOfWork.GetRepoInstance<Employee>(); Employee em = emplyeeRepo.GetFirstOrDefault(label); employeeStructure[i] = new EmployeeStructure(Result.Recognized.ToString(), em.name, em.middleName, em.lastName, em.email, rectangleFace[0].X, rectangleFace[0].Y, rectangleFace[0].Width, rectangleFace[0].Height); } employeeStructure[i].result = Result.Unknown.ToString(); }); return employeeStructure; } }