public static BitmapSource DrawFaces(BitmapSource baseImage, FaceAPI.Face[] faces, ObservableCollection <Microsoft.ProjectOxford.Face.Controls.Face> targetFaces, EmotionScores[] emotionScores, string[] celebName, List <PersonData> personData, DataTable dataTable, ImageWall imageWall) { if (faces == null) { return(baseImage); } Action <DrawingContext, BitmapSource, double> drawAction = (drawingContext, oriImage, annotationScale) => { for (int i = 0; i < faces.Length; i++) { if (targetFaces[i].PersonName == "Unknown") { continue; } var face = faces[i]; imageWall.colorful[imageWall.id.IndexOf(targetFaces[i].FaceId)] = true; if (face.FaceRectangle == null) { continue; } PersonData pD = new PersonData(); try { pD = personData.Find(x => x.ID == targetFaces[i].FaceId); pD.Times++; } catch (Exception) { personData.Find(x => x.ID == targetFaces[i].FaceId); } Rect faceRect = new Rect( face.FaceRectangle.Left, face.FaceRectangle.Top, face.FaceRectangle.Width, face.FaceRectangle.Height); Int32Rect faceRectInt32 = new Int32Rect( face.FaceRectangle.Left, face.FaceRectangle.Top, face.FaceRectangle.Width, face.FaceRectangle.Height); string text = ""; drawingContext.DrawImage(new CroppedBitmap(oriImage, faceRectInt32), faceRect); if (face.FaceAttributes != null) { text += Aggregation.SummarizeFaceAttributes(face.FaceAttributes, targetFaces[i].PersonName, pD); } if (emotionScores?[i] != null) { text += Aggregation.SummarizeEmotion(emotionScores[i]); } if (celebName?[i] != null) { text += celebName[i]; } faceRect.Inflate(6 * annotationScale, 6 * annotationScale); double lineThickness = 4 * annotationScale; drawingContext.DrawRectangle( Brushes.Transparent, new Pen(s_lineBrush, lineThickness), faceRect); if (text != "") { FormattedText ft = new FormattedText(text, CultureInfo.CurrentCulture, FlowDirection.LeftToRight, s_typeface, 16 * annotationScale, Brushes.Black); var pad = 3 * annotationScale; var ypad = pad; var xpad = pad + 4 * annotationScale; var origin = new System.Windows.Point( faceRect.Left + xpad - lineThickness / 2, faceRect.Top - ft.Height - ypad + lineThickness / 2); var rect = ft.BuildHighlightGeometry(origin).GetRenderBounds(null); rect.Inflate(xpad, ypad); drawingContext.DrawRectangle(s_lineBrush, null, rect); drawingContext.DrawText(ft, origin); } } dataTable.dataGrid.ItemsSource = personData; imageWall.UpdateCanvas(); }; return(DrawOverlay(baseImage, drawAction)); }
private async void FolderPicker() { if (Properties.Settings.Default.FacePath == "path here") { return; } _faceClient = new FaceAPI.FaceServiceClient(Properties.Settings.Default.FaceAPIKey, Properties.Settings.Default.FaceAPIHost); bool groupExists = false; MainWindow mainWindow = System.Windows.Window.GetWindow(this) as MainWindow; var faceServiceClient = _faceClient; // Test whether the group already exists try { Log("Request: Group {0} will be used to build a person database. Checking whether the group exists.", this.GroupId); await faceServiceClient.GetLargePersonGroupAsync(this.GroupId); groupExists = true; Log("Response: Group {0} exists.", this.GroupId); } catch (FaceAPIException ex) { if (ex.ErrorCode != "LargePersonGroupNotFound") { Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); return; } else { Log("Response: Group {0} did not exist previously.", this.GroupId); } } // Show folder picker //System.Windows.Forms.FolderBrowserDialog dlg = new System.Windows.Forms.FolderBrowserDialog(); //var result = dlg.ShowDialog(); // Set the suggestion count is intent to minimum the data preparation step only, // it's not corresponding to service side constraint const int SuggestionCount = 15; if (Properties.Settings.Default.FacePath != "path here") { // User picked a root person database folder // Clear person database Persons.Clear(); TargetFaces.Clear(); SelectedFile = null; //IdentifyButton.IsEnabled = false; // Call create large person group REST API // Create large person group API call will failed if group with the same name already exists if (groupExists) { Log("Request: Loading group \"{0}\"", this.GroupId); load_Persons = await faceServiceClient.ListPersonsInLargePersonGroupAsync(this.GroupId); } else { Log("Request: Creating group \"{0}\"", this.GroupId); try { await faceServiceClient.CreateLargePersonGroupAsync(this.GroupId, this.GroupId); Log("Response: Success. Group \"{0}\" created.", this.GroupId); } catch (FaceAPIException ex) { Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); return; } } int processCount = 0; bool forceContinue = false; Log("Request: Preparing faces for identification, detecting faces in chosen folder."); // Enumerate top level directories, each directory contains one person's images int invalidImageCount = 0; personData.Clear(); int i = 0; foreach (var dir in System.IO.Directory.EnumerateDirectories(Properties.Settings.Default.FacePath)) { i++; } imageWall.Init(i); i = 0; foreach (var dir in System.IO.Directory.EnumerateDirectories(Properties.Settings.Default.FacePath)) { var tasks = new List <Task>(); var tag = System.IO.Path.GetFileName(dir); Person p = new Person(); p.PersonName = tag; var faces = new ObservableCollection <Face>(); p.Faces = faces; // Call create person REST API, the new create person id will be returned Log("Request: Creating person \"{0}\"", p.PersonName); Guid personid = Guid.NewGuid(); bool isFound = false; if (groupExists) { isFound = FindPersonByName(load_Persons, p.PersonName, out personid); } if (groupExists && isFound) { p.PersonId = (await faceServiceClient.GetPersonInLargePersonGroupAsync(this.GroupId, personid)).PersonId.ToString(); } else { p.PersonId = (await faceServiceClient.CreatePersonInLargePersonGroupAsync(this.GroupId, p.PersonName)).PersonId.ToString(); } Log("Response: Success. Person \"{0}\" (PersonID:{1}) created. Please wait for training.", p.PersonName, p.PersonId); personData.Add(new PersonData(p.PersonId, p.PersonName)); string img; // Enumerate images under the person folder, call detection var imageList = new ConcurrentBag <string>( Directory.EnumerateFiles(dir, "*.*", SearchOption.AllDirectories) .Where(s => s.ToLower().EndsWith(".jpg") || s.ToLower().EndsWith(".png") || s.ToLower().EndsWith(".bmp") || s.ToLower().EndsWith(".gif"))); int j = 0; while (imageList.TryTake(out img)) { if (j == 0 && i < ImageWall.num) { imageWall.faceBitmaps[i] = new BitmapImage(new Uri(img)); imageWall.id.Add(p.PersonId); } j++; tasks.Add(Task.Factory.StartNew( async(obj) => { var imgPath = obj as string; using (var fStream = File.OpenRead(imgPath)) { try { // Update person faces on server side var persistFace = await faceServiceClient.AddPersonFaceInLargePersonGroupAsync(this.GroupId, Guid.Parse(p.PersonId), fStream, imgPath); return(new Tuple <string, ClientContract.AddPersistedFaceResult>(imgPath, persistFace)); } catch (FaceAPIException ex) { // if operation conflict, retry. if (ex.ErrorCode.Equals("ConcurrentOperationConflict")) { imageList.Add(imgPath); return(null); } // if operation cause rate limit exceed, retry. else if (ex.ErrorCode.Equals("RateLimitExceeded")) { imageList.Add(imgPath); return(null); } else if (ex.ErrorMessage.Contains("more than 1 face in the image.")) { Interlocked.Increment(ref invalidImageCount); } // Here we simply ignore all detection failure in this sample // You may handle these exceptions by check the Error.Error.Code and Error.Message property for ClientException object return(new Tuple <string, ClientContract.AddPersistedFaceResult>(imgPath, null)); } } }, img).Unwrap().ContinueWith((detectTask) => { // Update detected faces for rendering var detectionResult = detectTask?.Result; if (detectionResult == null || detectionResult.Item2 == null) { return; } this.Dispatcher.Invoke( new Action <ObservableCollection <Face>, string, ClientContract.AddPersistedFaceResult>(UIHelper.UpdateFace), faces, detectionResult.Item1, detectionResult.Item2); })); if (processCount >= SuggestionCount && !forceContinue) { var continueProcess = System.Windows.Forms.MessageBox.Show("The images loaded have reached the recommended count, may take long time if proceed. Would you like to continue to load images?", "Warning", System.Windows.Forms.MessageBoxButtons.YesNo); if (continueProcess == System.Windows.Forms.DialogResult.Yes) { forceContinue = true; } else { break; } } if (tasks.Count >= _maxConcurrentProcesses || imageList.IsEmpty) { await Task.WhenAll(tasks); tasks.Clear(); } } Persons.Add(p); i++; } PersonDataUpdate(); imageWall.UpdateCanvas(); if (invalidImageCount > 0) { Log("Warning: more or less than one face is detected in {0} images, can not add to face list.", invalidImageCount); } Log("Response: Success. Total {0} faces are detected.", Persons.Sum(p => p.Faces.Count)); try { // Start train large person group Log("Request: Training group \"{0}\"", this.GroupId); await faceServiceClient.TrainLargePersonGroupAsync(this.GroupId); // Wait until train completed while (true) { await Task.Delay(1000); var status = await faceServiceClient.GetLargePersonGroupTrainingStatusAsync(this.GroupId); Log("Response: {0}. Group \"{1}\" training process is {2}", "Success", this.GroupId, status.Status); if (status.Status != ClientContract.Status.Running) { break; } } //IdentifyButton.IsEnabled = true; } catch (FaceAPIException ex) { Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } GC.Collect(); }