/// <summary> /// Loads the groups. /// </summary> /// <returns></returns> private async Task LoadGroups() { FaceGroups.Clear(); var groups = await RetryHelper.OperationWithBasicRetryAsync(async() => await _faceServiceClient.ListLargePersonGroupsAsync(), new[] { typeof(FaceAPIException) }, traceWriter : _mainWindowLogTraceWriter); foreach (var grp in groups) { FaceGroups.Add(new LargePersonGroupExtended { Group = grp }); } MainWindow.Log("Found {0} groups.", groups.Length); }
/// <summary> /// Loads the groups. /// </summary> /// <returns></returns> private async Task LoadGroups() { FaceGroups.Clear(); var groups = await RetryHelper.OperationWithBasicRetryAsync(async() => await _faceServiceClient.ListLargePersonGroupsAsync(), new[] { "RateLimitExceeded" }, traceWriter : _mainWindowLogTraceWriter); foreach (var grp in groups) { FaceGroups.Add(new LargePersonGroupExtended { Group = grp }); } PropertyChanged(this, new PropertyChangedEventArgs("HasNoGroups")); MainWindow.Log("Found {0} groups.", groups.Length); }
/// <summary> /// Goes the get matches. /// </summary> /// <returns></returns> private async Task GoGetMatches() { // Identify each face // Call identify REST API, the result contains identified person information //var identifyResult = await _faceServiceClient.IdentifyAsync(_detectedFaces.Select(ff => new Guid(ff.FaceId)).ToArray(), largePersonGroupId: this._scanGroup.Group.LargePersonGroupId); var identifyResult = await RetryHelper.OperationWithBasicRetryAsync(async() => await _faceServiceClient.IdentifyAsync(_detectedFaces.Select(ff => new Guid(ff.FaceId)).ToArray(), largePersonGroupId: this._scanGroup.Group.LargePersonGroupId), new[] { "RateLimitExceeded" }, traceWriter : _mainWindowLogTraceWriter); for (int idx = 0; idx < _detectedFaces.Count; idx++) { // Update identification result for rendering var face = DetectedFaces[idx]; var idResult = identifyResult[idx]; face.Identifications = idResult; if (idResult.Candidates.Length > 0) { var pers = _scanGroup.GroupPersons.Where(p => p.Person.PersonId == idResult.Candidates[0].PersonId).First().Person; face.PersonName = pers.Name; face.PersonId = pers.PersonId; face.PersonSourcePath = pers.UserData; } else { face.PersonName = "Unknown"; } } var outString = new StringBuilder(); foreach (var face in DetectedFaces) { outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName); } btnSave.IsEnabled = DetectedFaces.Count > 0; // hack MainWindow.Log("Response: Success. {0}", outString); }
/// <summary> /// Processes the file. /// </summary> /// <param name="filePath">The file path.</param> private async void ProcessFile(string filePath) { _selectedFilePath = filePath; using (var fStream = File.OpenRead(filePath)) { try { var faces = await RetryHelper.OperationWithBasicRetryAsync(async() => await _faceServiceClient.DetectAsync(fStream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses, FaceAttributeType.HeadPose, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Occlusion, FaceAttributeType.Accessories, FaceAttributeType.Noise, FaceAttributeType.Exposure, FaceAttributeType.Blur }), new[] { typeof(FaceAPIException) }, traceWriter : _mainWindowLogTraceWriter); MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, filePath); if (faces.Length == 0) { btnNext.IsEnabled = true; return; } var renderingImage = UIHelper.LoadImageAppliedOrientation(filePath); var imageInfo = UIHelper.GetImageInfoForRendering(renderingImage); SelectedFile = renderingImage; foreach (var face in faces) { DetectedFaces.Add(new Models.Face() { ImageFile = renderingImage, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceRectangle = new FaceRectangle { Height = face.FaceRectangle.Height, Width = face.FaceRectangle.Width, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top }, FaceId = face.FaceId.ToString(), Age = string.Format("{0:#} years old", face.FaceAttributes.Age), Gender = face.FaceAttributes.Gender, HeadPose = string.Format("Pitch: {0}, Roll: {1}, Yaw: {2}", Math.Round(face.FaceAttributes.HeadPose.Pitch, 2), Math.Round(face.FaceAttributes.HeadPose.Roll, 2), Math.Round(face.FaceAttributes.HeadPose.Yaw, 2)), FacialHair = string.Format("FacialHair: {0}", face.FaceAttributes.FacialHair.Moustache + face.FaceAttributes.FacialHair.Beard + face.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No"), Glasses = string.Format("GlassesType: {0}", face.FaceAttributes.Glasses.ToString()), Emotion = $"{GetEmotion(face.FaceAttributes.Emotion)}", Hair = string.Format("Hair: {0}", GetHair(face.FaceAttributes.Hair)), Makeup = string.Format("Makeup: {0}", ((face.FaceAttributes.Makeup.EyeMakeup || face.FaceAttributes.Makeup.LipMakeup) ? "Yes" : "No")), EyeOcclusion = string.Format("EyeOccluded: {0}", ((face.FaceAttributes.Occlusion.EyeOccluded) ? "Yes" : "No")), ForeheadOcclusion = string.Format("ForeheadOccluded: {0}", (face.FaceAttributes.Occlusion.ForeheadOccluded ? "Yes" : "No")), MouthOcclusion = string.Format("MouthOccluded: {0}", (face.FaceAttributes.Occlusion.MouthOccluded ? "Yes" : "No")), Accessories = $"{GetAccessories(face.FaceAttributes.Accessories)}", Blur = string.Format("Blur: {0}", face.FaceAttributes.Blur.BlurLevel.ToString()), Exposure = string.Format("{0}", face.FaceAttributes.Exposure.ExposureLevel.ToString()), Noise = string.Format("Noise: {0}", face.FaceAttributes.Noise.NoiseLevel.ToString()), }); } // Convert detection result into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { ResultCollection.Add(face); } // Start train large person group MainWindow.Log("Request: Training group \"{0}\"", _scanGroup.Group.LargePersonGroupId); await RetryHelper.VoidOperationWithBasicRetryAsync(() => _faceServiceClient.TrainLargePersonGroupAsync(_scanGroup.Group.LargePersonGroupId), new[] { typeof(FaceAPIException) }, traceWriter : _mainWindowLogTraceWriter); // Wait until train completed while (true) { await Task.Delay(1000); try // Temporary { var status = await _faceServiceClient.GetLargePersonGroupTrainingStatusAsync(_scanGroup.Group.LargePersonGroupId); MainWindow.Log("Response: {0}. Group \"{1}\" training process is {2}", "Success", _scanGroup.Group.LargePersonGroupId, status.Status); if (status.Status != Microsoft.ProjectOxford.Face.Contract.Status.Running) { break; } } catch (Exception ex) { MainWindow.Log($"Error: {ex.Message}"); // retry } } await GoGetMatches(); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); GC.Collect(); return; } GC.Collect(); } btnNext.IsEnabled = true; }
/// <summary> /// Pick image, detect and identify all faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void Identify_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif"; var result = dlg.ShowDialog(); if (result.HasValue && result.Value) { // User picked one image // Clear previous detection and identification results TargetFaces.Clear(); var pickedImagePath = dlg.FileName; var renderingImage = UIHelper.LoadImageAppliedOrientation(pickedImagePath); var imageInfo = UIHelper.GetImageInfoForRendering(renderingImage); SelectedFile = renderingImage; var sw = Stopwatch.StartNew(); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; string subscriptionEndpoint = mainWindow._scenariosControl.SubscriptionEndpoint; var faceServiceClient = new FaceServiceClient(subscriptionKey, subscriptionEndpoint); // Call detection REST API using (var fStream = File.OpenRead(pickedImagePath)) { try { var faces = await RetryHelper.OperationWithBasicRetryAsync(async() => await faceServiceClient.DetectAsync(fStream), new[] { "RateLimitExceeded" }, traceWriter : _mainWindowLogTraceWriter); //var faces = await faceServiceClient.DetectAsync(fStream); // Convert detection result into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Request: Identifying {0} face(s) in group \"{1}\"", faces.Length, this.GroupId); // Identify each face // Call identify REST API, the result contains identified person information var identifyResult = await RetryHelper.OperationWithBasicRetryAsync(async() => await faceServiceClient.IdentifyAsync(faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId: this.GroupId), new[] { "RateLimitExceeded" }, traceWriter : _mainWindowLogTraceWriter); //var identifyResult = await faceServiceClient.IdentifyAsync(faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId: this.GroupId); for (int idx = 0; idx < faces.Length; idx++) { // Update identification result for rendering var face = TargetFaces[idx]; var res = identifyResult[idx]; if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString())) { face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName; } else { face.PersonName = "Unknown"; } } var outString = new StringBuilder(); foreach (var face in TargetFaces) { outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName); } MainWindow.Log("Response: Success. {0}", outString); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } GC.Collect(); }
/// <summary> /// Pick the root person database folder, to minimum the data preparation logic, the folder should be under following construction /// Each person's image should be put into one folder named as the person's name /// All person's image folder should be put directly under the root person database folder /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event argument</param> private async void FolderPicker_Click(object sender, RoutedEventArgs e) { bool groupExists = false; MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; string endpoint = mainWindow._scenariosControl.SubscriptionEndpoint; var faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint); // Test whether the group already exists try { MainWindow.Log("Request: Group {0} will be used to build a person database. Checking whether the group exists.", this.GroupId); await faceServiceClient.GetLargePersonGroupAsync(this.GroupId); groupExists = true; MainWindow.Log("Response: Group {0} exists.", this.GroupId); } catch (FaceAPIException ex) { if (ex.ErrorCode != "LargePersonGroupNotFound") { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); return; } else { MainWindow.Log("Response: Group {0} did not exist previously.", this.GroupId); } } if (groupExists) { var cleanGroup = System.Windows.MessageBox.Show(string.Format("Requires a clean up for group \"{0}\" before setting up a new person database. Click OK to proceed, group \"{0}\" will be cleared.", this.GroupId), "Warning", MessageBoxButton.OKCancel); if (cleanGroup == MessageBoxResult.OK) { await faceServiceClient.DeleteLargePersonGroupAsync(this.GroupId); this.GroupId = Guid.NewGuid().ToString(); } else { return; } } // Show folder picker System.Windows.Forms.FolderBrowserDialog dlg = new System.Windows.Forms.FolderBrowserDialog(); var result = dlg.ShowDialog(); // Set the suggestion count is intent to minimum the data preparation step only, // it's not corresponding to service side constraint const int SuggestionCount = 15; if (result == System.Windows.Forms.DialogResult.OK) { // User picked a root person database folder // Clear person database Persons.Clear(); TargetFaces.Clear(); SelectedFile = null; IdentifyButton.IsEnabled = false; // Call create large person group REST API // Create large person group API call will failed if group with the same name already exists MainWindow.Log("Request: Creating group \"{0}\"", this.GroupId); try { await faceServiceClient.CreateLargePersonGroupAsync(this.GroupId, this.GroupId, dlg.SelectedPath); MainWindow.Log("Response: Success. Group \"{0}\" created", this.GroupId); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); return; } int processCount = 0; bool forceContinue = false; MainWindow.Log("Request: Preparing faces for identification, detecting faces in chosen folder."); // Enumerate top level directories, each directory contains one person's images int invalidImageCount = 0; foreach (var dir in System.IO.Directory.EnumerateDirectories(dlg.SelectedPath)) { var tasks = new List <Task>(); var tag = System.IO.Path.GetFileName(dir); Person p = new Person(); p.PersonName = tag; var faces = new ObservableCollection <Models.Face>(); p.Faces = faces; // Call create person REST API, the new create person id will be returned MainWindow.Log("Request: Creating person \"{0}\"", p.PersonName); p.PersonId = (await RetryHelper.OperationWithBasicRetryAsync(async() => await faceServiceClient.CreatePersonInLargePersonGroupAsync(this.GroupId, p.PersonName, dir), new[] { "RateLimitExceeded" }, traceWriter: _mainWindowLogTraceWriter )).PersonId.ToString(); MainWindow.Log("Response: Success. Person \"{0}\" (PersonID:{1}) created", p.PersonName, p.PersonId); string img; // Enumerate images under the person folder, call detection var imageList = new ConcurrentBag <string>( Directory.EnumerateFiles(dir, "*.*", SearchOption.AllDirectories) .Where(s => s.ToLower().EndsWith(".jpg") || s.ToLower().EndsWith(".png") || s.ToLower().EndsWith(".bmp") || s.ToLower().EndsWith(".gif"))); while (imageList.TryTake(out img)) { tasks.Add(Task.Factory.StartNew( async(obj) => { var imgPath = obj as string; using (var fStream = File.OpenRead(imgPath)) { try { // Update person faces on server side var persistFace = await faceServiceClient.AddPersonFaceInLargePersonGroupAsync(this.GroupId, Guid.Parse(p.PersonId), fStream, imgPath); return(new Tuple <string, ClientContract.AddPersistedFaceResult>(imgPath, persistFace)); } catch (FaceAPIException ex) { // if operation conflict, retry. if (ex.ErrorCode.Equals("ConcurrentOperationConflict")) { MainWindow.Log("Concurrent operation conflict. Retrying."); imageList.Add(imgPath); return(null); } // if operation cause rate limit exceed, retry. else if (ex.ErrorCode.Equals("RateLimitExceeded")) { MainWindow.Log("Rate limit exceeded. Retrying."); imageList.Add(imgPath); return(null); } else if (ex.ErrorMessage.Contains("more than 1 face in the image.")) { Interlocked.Increment(ref invalidImageCount); } // Here we simply ignore all detection failure in this sample // You may handle these exceptions by check the Error.Error.Code and Error.Message property for ClientException object return(new Tuple <string, ClientContract.AddPersistedFaceResult>(imgPath, null)); } } }, img).Unwrap().ContinueWith((detectTask) => { // Update detected faces for rendering var detectionResult = detectTask?.Result; if (detectionResult == null || detectionResult.Item2 == null) { return; } this.Dispatcher.Invoke( new Action <ObservableCollection <Models.Face>, string, ClientContract.AddPersistedFaceResult>(UIHelper.UpdateFace), faces, detectionResult.Item1, detectionResult.Item2); })); if (processCount >= SuggestionCount && !forceContinue) { var continueProcess = System.Windows.Forms.MessageBox.Show("The images loaded have reached the recommended count, may take long time if proceed. Would you like to continue to load images?", "Warning", System.Windows.Forms.MessageBoxButtons.YesNo); if (continueProcess == System.Windows.Forms.DialogResult.Yes) { forceContinue = true; } else { break; } } if (tasks.Count >= _maxConcurrentProcesses || imageList.IsEmpty) { await Task.WhenAll(tasks); tasks.Clear(); } } Persons.Add(p); } if (invalidImageCount > 0) { MainWindow.Log("Warning: more or less than one face is detected in {0} images, can not add to face list.", invalidImageCount); } MainWindow.Log("Response: Success. Total {0} faces are detected.", Persons.Sum(p => p.Faces.Count)); try { // Start train large person group MainWindow.Log("Request: Training group \"{0}\"", this.GroupId); await RetryHelper.VoidOperationWithBasicRetryAsync(() => faceServiceClient.TrainLargePersonGroupAsync(this.GroupId), new[] { "RateLimitExceeded" }, traceWriter : _mainWindowLogTraceWriter); //await faceServiceClient.TrainLargePersonGroupAsync(this.GroupId); // Wait until train completed while (true) { await Task.Delay(1000); var status = await faceServiceClient.GetLargePersonGroupTrainingStatusAsync(this.GroupId); MainWindow.Log("Response: {0}. Group \"{1}\" training process is {2}", "Success", this.GroupId, status.Status); if (status.Status != Status.Running) { break; } } IdentifyButton.IsEnabled = true; } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } GC.Collect(); }
/// <summary> /// Processes the file. /// </summary> /// <param name="filePath">The file path.</param> private async Task ProcessFile(string filePath) { using (var fStream = File.OpenRead(filePath)) { try { // Show the image to be analysed var renderingImage = UIHelper.LoadImageAppliedOrientation(filePath); var imageInfo = UIHelper.GetImageInfoForRendering(renderingImage); SelectedFile = renderingImage; var faces = await RetryHelper.OperationWithBasicRetryAsync(async() => await _faceServiceClient.DetectAsync(fStream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses, FaceAttributeType.HeadPose, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Occlusion, FaceAttributeType.Accessories, FaceAttributeType.Noise, FaceAttributeType.Exposure, FaceAttributeType.Blur }), new[] { "RateLimitExceeded" }, traceWriter : _mainWindowLogTraceWriter); MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, filePath); if (faces.Length == 0) { btnNext.IsEnabled = true; return; } foreach (var face in faces) { DetectedFaces.Add(new Models.Face() { ImageFile = renderingImage, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceRectangle = new FaceRectangle { Height = face.FaceRectangle.Height, Width = face.FaceRectangle.Width, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top }, FaceId = face.FaceId.ToString(), Age = string.Format("{0:#} years old", face.FaceAttributes.Age), Gender = face.FaceAttributes.Gender, HeadPose = string.Format("Pitch: {0}, Roll: {1}, Yaw: {2}", Math.Round(face.FaceAttributes.HeadPose.Pitch, 2), Math.Round(face.FaceAttributes.HeadPose.Roll, 2), Math.Round(face.FaceAttributes.HeadPose.Yaw, 2)), FacialHair = string.Format("FacialHair: {0}", face.FaceAttributes.FacialHair.Moustache + face.FaceAttributes.FacialHair.Beard + face.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No"), Glasses = string.Format("GlassesType: {0}", face.FaceAttributes.Glasses.ToString()), Emotion = $"{GetEmotion(face.FaceAttributes.Emotion)}", Hair = string.Format("Hair: {0}", GetHair(face.FaceAttributes.Hair)), Makeup = string.Format("Makeup: {0}", ((face.FaceAttributes.Makeup.EyeMakeup || face.FaceAttributes.Makeup.LipMakeup) ? "Yes" : "No")), EyeOcclusion = string.Format("EyeOccluded: {0}", ((face.FaceAttributes.Occlusion.EyeOccluded) ? "Yes" : "No")), ForeheadOcclusion = string.Format("ForeheadOccluded: {0}", (face.FaceAttributes.Occlusion.ForeheadOccluded ? "Yes" : "No")), MouthOcclusion = string.Format("MouthOccluded: {0}", (face.FaceAttributes.Occlusion.MouthOccluded ? "Yes" : "No")), Accessories = $"{GetAccessories(face.FaceAttributes.Accessories)}", Blur = string.Format("Blur: {0}", face.FaceAttributes.Blur.BlurLevel.ToString()), Exposure = string.Format("{0}", face.FaceAttributes.Exposure.ExposureLevel.ToString()), Noise = string.Format("Noise: {0}", face.FaceAttributes.Noise.NoiseLevel.ToString()), }); } // Convert detection result into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { ResultCollection.Add(face); } await GoGetMatches(); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); GC.Collect(); return; } GC.Collect(); } btnNext.IsEnabled = true; }