protected override void ThreadWork() { string imagePath; using (var faceRecognition = FaceRecognition.Create(_configuration.ModelsDirectory)) { while (true) { lock (_searchQueueLocker) { if (_searchQueue.Count > 0) { imagePath = _searchQueue.Dequeue(); } else { return; } } _progress.Report(new ProgressPartialResult() { Current = _current, Total = _progressMaximum, Text = imagePath }); lock (_progressLocker) { _current++; } //load image //using (var ms = new MemoryStream(File.ReadAllBytes(imageFile3))) FaceEncodingInfo founded; lock (_dbLocker) { founded = _db.GetFromDB(imagePath); } if (founded == null) { FaceRecognitionDotNet.Image unknownImage; try { unknownImage = FaceRecognition.LoadImageFile(imagePath); } catch (Exception ex) { MessageBox.Show($"{ex.Message} \n {ex.StackTrace} \n {ex?.InnerException?.Message}", "Uncaught Thread Exception", MessageBoxButton.OK, MessageBoxImage.Error); continue; } using (unknownImage) { Debug.WriteLine($"Read {imagePath}"); //find face locations var locations = faceRecognition.FaceLocations(unknownImage); //# If no faces are found in the image, return an empty result. if (!locations.Any()) { Debug.WriteLine($"In {imagePath} not found faces"); lock (_dbLocker) { _db.AddFileWithoutFace(imagePath); } continue; } foreach (var location in locations) { var encodings = faceRecognition.FaceEncodings(unknownImage, new Location[] { location }); if (encodings == null) { continue; } var encoding = encodings.Single(); var info = new SerializationInfo(typeof(double), _formatterConverter); encoding.GetObjectData(info, _context); encoding.Dispose(); double[] unknown = (double[])info.GetValue("_Encoding", typeof(double[])); lock (_dbLocker) { _db.AddFaceInfo(imagePath, unknown, location.Left, location.Right, location.Top, location.Bottom); } VoteAndDistance predict = MyKnn.Classify(unknown, _trainedInfo, _classes.ToArray(), 1); if (String.IsNullOrEmpty(_checkClass)) { if (predict.Distance < _configuration.DistanceThreshold) { Debug.WriteLine($"Found {predict.Name} in {imagePath} with {predict.Distance} distance"); _addToViewImageAction(imagePath, _directoryWithFaces, predict, location.Left, location.Top, location.Right - location.Left, location.Bottom - location.Top, locations.Count()); } } else { if (predict.Distance > _configuration.DistanceThreshold || predict.Name != _checkClass) { _addToViewImageAction(imagePath, _directoryWithFaces, predict, location.Left, location.Top, location.Right - location.Left, location.Bottom - location.Top, locations.Count()); } } } } } else { foreach (var fingerAndLocations in founded.FingerAndLocations) { VoteAndDistance predict = MyKnn.Classify(fingerAndLocations.FingerPrint, _trainedInfo, _classes.ToArray(), 1); if (String.IsNullOrEmpty(_checkClass)) { if (predict.Distance < _configuration.DistanceThreshold) { _addToViewImageAction(imagePath, _directoryWithFaces, predict, fingerAndLocations.Left, fingerAndLocations.Top, fingerAndLocations.Right - fingerAndLocations.Left, fingerAndLocations.Bottom - fingerAndLocations.Top, founded.FingerAndLocations.Count); } } else { if (predict.Distance > _configuration.DistanceThreshold || predict.Name != _checkClass) { _addToViewImageAction(imagePath, _directoryWithFaces, predict, fingerAndLocations.Left, fingerAndLocations.Top, fingerAndLocations.Right - fingerAndLocations.Left, fingerAndLocations.Bottom - fingerAndLocations.Top, founded.FingerAndLocations.Count); } } } } } } }