public Dictionary<string, FaceRecognitionTrainingStatus> ReadImagesWithPersonsFromDirectory(string directoryWithImages)
        {
            var faceRecognitionTrainingStatuses = new Dictionary<string, FaceRecognitionTrainingStatus>();

            if (!Directory.Exists(directoryWithImages))
            {
                faceRecognitionTrainingStatuses.Add("failure", FaceRecognitionTrainingStatus.TrainingFailure);

                return faceRecognitionTrainingStatuses;
            }

            var directoryWithImagesInfo = new DirectoryInfo(directoryWithImages);

            foreach (var personDirectoryInfo in directoryWithImagesInfo.GetDirectories())
            {
                var personDirectoryInfoSplitted = personDirectoryInfo.Name.Split(' ');

                if (personDirectoryInfoSplitted.Length != 2)
                {
                    faceRecognitionTrainingStatuses.Add(personDirectoryInfo.Name, FaceRecognitionTrainingStatus.TrainingFailure);

                    return faceRecognitionTrainingStatuses;
                }

                var simplePersonFromDirectoryInfo = new SimplePerson
                {
                    FirstName = personDirectoryInfoSplitted.First(),
                    LastName = personDirectoryInfoSplitted.Last()
                };

                foreach (var imageFileInfo in personDirectoryInfo.GetFiles())
                {
                    try
                    {
                        using (var imageWithFace = new Bitmap(imageFileInfo.FullName))
                        {
                            var faceRecognitionTrainingStatus = _faceRecognitionAlgorithm.AddImageToTrainingSet(imageWithFace, simplePersonFromDirectoryInfo);

                            faceRecognitionTrainingStatuses.Add(imageFileInfo.Name, faceRecognitionTrainingStatus);
                        }

                    }
                    catch (Exception e)
                    {
                        faceRecognitionTrainingStatuses.Add(imageFileInfo.Name, FaceRecognitionTrainingStatus.TrainingFailure);
                        throw e;
                    }
                }
            }

            return faceRecognitionTrainingStatuses;
        }
 public FaceRecognitionTrainingStatus TrainFaceRecognition(Bitmap image, SimplePerson personOnImage)
 {
     return _faceRecognitionAlgorithm.AddImageToTrainingSet(image, personOnImage);
 }
        public FaceRecognitionTrainingStatus AddImageToTrainingSet(Bitmap faceImage, SimplePerson personOnImage)
        {
            using (var originalImage = new Image<Bgr, byte>(faceImage))
            {
                try
                {
                    var faces = _faceDetectionAlgorithm.FindFaces(faceImage);

                    if (faces.Count == 1)
                    {
                        var face = faces.First();

                        var tag = new PictureTag
                        {
                            X = face.Bounds.X,
                            Y = face.Bounds.Y,
                            Height = face.Bounds.Height,
                            Width = face.Bounds.Width,
                        };
                        var existingPerson = _unitOfWork.Repository<Person>().Query(q => q.FirstName == personOnImage.FirstName && q.LastName == personOnImage.LastName).Select().FirstOrDefault();

                        if (existingPerson != null)
                        {
                            tag.PersonOnImageTag = existingPerson;
                        }
                        else
                        {
                            tag.PersonOnImageTag = new Person
                            {
                                FirstName = personOnImage.FirstName,
                                LastName = personOnImage.LastName
                            };
                        }

                        if (!Directory.Exists(_basePicturesPath))
                        {
                            Directory.CreateDirectory(_basePicturesPath);
                        }

                        var thumbnailGrayscaleIplImage = CropAndNormalizeFace(originalImage, face);
                        string originalImageGuid = Guid.NewGuid().ToString();
                        string thumbnailGrayscaleGuid = Guid.NewGuid().ToString();
                        string imageExtension = FileHelper.ImageExtensionFromImageFormat(originalImage.Bitmap.RawFormat);
                        string localOrignalImagePath = _basePicturesPath +"\\Files\\Faces\\"+ originalImageGuid + imageExtension;
                        string localThumbnailGrayscaleImagePath = _basePicturesPath + "\\Files\\Faces\\" + thumbnailGrayscaleGuid + imageExtension;

                        originalImage.Save(localOrignalImagePath);
                        thumbnailGrayscaleIplImage.Save(localThumbnailGrayscaleImagePath);
                        //FileHelper.Save(originalImage, orignalImagePath, 90);
                        //FileHelper.Save(thumbnailGrayscaleIplImage, thumbnailGrayscaleImagePath, 90);

                        var image = new FaceDetection.Library.Models.Image
                        {
                            Pictures = new List<Picture>()
                            {
                                new Picture
                                {
                                    PicturePath = "/Files/Faces/"+originalImageGuid+imageExtension,
                                    NumberOfChannels = originalImage.NumberOfChannels,
                                    Height = faceImage.Height,
                                    Width = faceImage.Width,
                                    Type = PictureType.Original,
                                    Tags = new List<PictureTag>
                                    {
                                        tag
                                    }
                                },
                                new Picture
                                {
                                    PicturePath = "/Files/Faces/"+thumbnailGrayscaleGuid+imageExtension,
                                    NumberOfChannels = thumbnailGrayscaleIplImage.NumberOfChannels,
                                    Height = _faceSize.Height,
                                    Width = _faceSize.Width,
                                    Type = PictureType.GrayscaleThumbnail,
                                    Tags = new List<PictureTag>
                                    {
                                        tag
                                    }
                                }
                            }
                        };

                        _unitOfWork.BeginTransaction();

                        try
                        {
                            _unitOfWork.Repository<FaceDetection.Library.Models.Image>().Insert(image);
                            _unitOfWork.Commit();

                            return FaceRecognitionTrainingStatus.TrainingSuccessful;
                        }
                        catch (Exception e)
                        {
                            _unitOfWork.Rollback();
                            throw e;
                        }
                    }
                    else if (faces.Count == 0)
                    {
                        return FaceRecognitionTrainingStatus.NoFacesFound;
                    }
                    else
                    {
                        return FaceRecognitionTrainingStatus.FoundMoreThenOneFace;
                    }
                }
                catch (Exception e)
                {
                    throw e;
                }
            }

            return FaceRecognitionTrainingStatus.TrainingFailure;
        }