Example #1
0
        private void ThreadWork()
        {
            string imagePath;

            FaceRecognition faceRecognition = null;

            while (true)
            {
                lock (_searchStackLocker)
                {
                    if (_searchQueue.Count > 0)
                    {
                        imagePath = _searchQueue.Dequeue();
                    }
                    else
                    {
                        break;
                    }
                }

                _progress.Report(new ProgressPartialResult()
                {
                    Current = _current, Total = _progressMaximum, Text = imagePath
                });
                lock (_progressLocker)
                {
                    _current++;
                }

                FaceEncodingInfo founded;
                lock (_dbLocker)
                {
                    founded = _db.GetFromDB(imagePath);
                }
                if (founded == null)
                {
                    if (faceRecognition == null)
                    {
                        faceRecognition = FaceRecognition.Create(_configuration.ModelsDirectory);
                    }

                    FaceRecognitionDotNet.Image image;
                    try
                    {
                        image = FaceRecognition.LoadImageFile(imagePath);
                    }
                    catch (Exception ex)
                    {
                        MessageBox.Show($"{ex.Message} \n {ex.StackTrace} \n {ex?.InnerException?.Message}",
                                        "Exception on LoadImageFile",
                                        MessageBoxButton.OK, MessageBoxImage.Error);
                        continue;
                    }
                    using (image)
                    {
                        Debug.WriteLine($"Train on {imagePath}");
                        //find face locations
                        var faceBoundingBoxes = faceRecognition.FaceLocations(image, 1, Model.Hog);

                        var countOfFace = faceBoundingBoxes.Count();
                        if (countOfFace == 0)
                        {
                            Application.Current.Dispatcher.Invoke(() =>
                            {
                                FaceViewModel vm = new FaceViewModel(imagePath);
                                _windowService.ShowDialogWindow <FaceWindow>(vm);
                            });
                            continue;
                            //throw new Exception($"Not founded face in {imageFile}");
                        }

                        if (countOfFace > 1)
                        {
                            Application.Current.Dispatcher.Invoke(() =>
                            {
                                FaceViewModel vm = new FaceViewModel(faceBoundingBoxes, imagePath);
                                _windowService.ShowDialogWindow <FaceWindow>(vm);
                            });

                            continue;
                            //If there are no people (or too many people) in a training image, skip the image.
                            //throw new Exception($"Faces {countOfFace} > 1 in {imageFile}");
                        }
                        else
                        {
                            // Add face encoding for current image to the training set
                            var encodings = faceRecognition.FaceEncodings(image, faceBoundingBoxes);
                            if (encodings == null)
                            {
                                continue;
                            }

                            foreach (var encoding in encodings)
                            {
                                var info = new SerializationInfo(typeof(double), _formatterConverter);
                                encoding.GetObjectData(info, _context);

                                double[] doubleInfo = (double[])info.GetValue("_Encoding", typeof(double[]));
                                encoding.Dispose();
                                var    dir       = Path.GetDirectoryName(imagePath);
                                string directory = new DirectoryInfo(dir).Name;
                                lock (_trainedInfoLocker)
                                {
                                    _trainedInfo.Add(new ClassInfo(directory, doubleInfo, imagePath));
                                }

                                lock (_dbLocker)
                                {
                                    _db.AddFaceInfo(imagePath, doubleInfo, faceBoundingBoxes.Single().Left, faceBoundingBoxes.Single().Right,
                                                    faceBoundingBoxes.Single().Top, faceBoundingBoxes.Single().Bottom);
                                }
                            }
                        }
                    }
                }
                else
                {
                    Debug.WriteLine($"File {imagePath} in db");
                    var    dir       = Path.GetDirectoryName(imagePath);
                    string directory = new DirectoryInfo(dir).Name;
                    lock (_trainedInfoLocker)
                    {
                        var fingerAndLocation = founded.FingerAndLocations.Single();
                        _trainedInfo.Add(new ClassInfo(directory,
                                                       fingerAndLocation.FingerPrint, imagePath));
                    }
                }
            }

            if (faceRecognition != null)
            {
                faceRecognition.Dispose();
            }
        }
        protected override void ThreadWork()
        {
            string imagePath;

            using (var faceRecognition = FaceRecognition.Create(_configuration.ModelsDirectory))
            {
                while (true)
                {
                    lock (_searchQueueLocker)
                    {
                        if (_searchQueue.Count > 0)
                        {
                            imagePath = _searchQueue.Dequeue();
                        }
                        else
                        {
                            return;
                        }
                    }

                    _progress.Report(new ProgressPartialResult()
                    {
                        Current = _current, Total = _progressMaximum, Text = imagePath
                    });
                    lock (_progressLocker)
                    {
                        _current++;
                    }

                    //load image
                    //using (var ms = new MemoryStream(File.ReadAllBytes(imageFile3)))
                    FaceEncodingInfo founded;
                    lock (_dbLocker)
                    {
                        founded = _db.GetFromDB(imagePath);
                    }
                    if (founded == null)
                    {
                        FaceRecognitionDotNet.Image unknownImage;
                        try
                        {
                            unknownImage = FaceRecognition.LoadImageFile(imagePath);
                        }
                        catch (Exception ex)
                        {
                            MessageBox.Show($"{ex.Message} \n {ex.StackTrace} \n {ex?.InnerException?.Message}", "Uncaught Thread Exception",
                                            MessageBoxButton.OK, MessageBoxImage.Error);
                            continue;
                        }
                        using (unknownImage)
                        {
                            Debug.WriteLine($"Read {imagePath}");
                            //find face locations
                            var locations = faceRecognition.FaceLocations(unknownImage);
                            //# If no faces are found in the image, return an empty result.
                            if (!locations.Any())
                            {
                                Debug.WriteLine($"In {imagePath} not found faces");
                                lock (_dbLocker)
                                {
                                    _db.AddFileWithoutFace(imagePath);
                                }
                                continue;
                            }

                            foreach (var location in locations)
                            {
                                var encodings = faceRecognition.FaceEncodings(unknownImage, new Location[] { location });
                                if (encodings == null)
                                {
                                    continue;
                                }

                                var encoding = encodings.Single();

                                var info = new SerializationInfo(typeof(double), _formatterConverter);
                                encoding.GetObjectData(info, _context);
                                encoding.Dispose();

                                double[] unknown = (double[])info.GetValue("_Encoding", typeof(double[]));
                                lock (_dbLocker)
                                {
                                    _db.AddFaceInfo(imagePath, unknown, location.Left, location.Right,
                                                    location.Top, location.Bottom);
                                }

                                VoteAndDistance predict = MyKnn.Classify(unknown, _trainedInfo, _classes.ToArray(), 1);

                                if (String.IsNullOrEmpty(_checkClass))
                                {
                                    if (predict.Distance < _configuration.DistanceThreshold)
                                    {
                                        Debug.WriteLine($"Found {predict.Name} in {imagePath} with {predict.Distance} distance");
                                        _addToViewImageAction(imagePath, _directoryWithFaces, predict, location.Left, location.Top,
                                                              location.Right - location.Left, location.Bottom - location.Top,
                                                              locations.Count());
                                    }
                                }
                                else
                                {
                                    if (predict.Distance > _configuration.DistanceThreshold ||
                                        predict.Name != _checkClass)
                                    {
                                        _addToViewImageAction(imagePath, _directoryWithFaces, predict, location.Left, location.Top,
                                                              location.Right - location.Left, location.Bottom - location.Top,
                                                              locations.Count());
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        foreach (var fingerAndLocations in founded.FingerAndLocations)
                        {
                            VoteAndDistance predict = MyKnn.Classify(fingerAndLocations.FingerPrint,
                                                                     _trainedInfo, _classes.ToArray(), 1);

                            if (String.IsNullOrEmpty(_checkClass))
                            {
                                if (predict.Distance < _configuration.DistanceThreshold)
                                {
                                    _addToViewImageAction(imagePath, _directoryWithFaces, predict,
                                                          fingerAndLocations.Left, fingerAndLocations.Top,
                                                          fingerAndLocations.Right - fingerAndLocations.Left,
                                                          fingerAndLocations.Bottom - fingerAndLocations.Top,
                                                          founded.FingerAndLocations.Count);
                                }
                            }
                            else
                            {
                                if (predict.Distance > _configuration.DistanceThreshold ||
                                    predict.Name != _checkClass)
                                {
                                    _addToViewImageAction(imagePath, _directoryWithFaces, predict,
                                                          fingerAndLocations.Left, fingerAndLocations.Top,
                                                          fingerAndLocations.Right - fingerAndLocations.Left,
                                                          fingerAndLocations.Bottom - fingerAndLocations.Top,
                                                          founded.FingerAndLocations.Count);
                                }
                            }
                        }
                    }
                }
            }
        }