private async void AddFaces(object sender, RoutedEventArgs e) { var storageAccount = CloudStorageAccount.Parse(_storageConnectionString); var blobClient = storageAccount.CreateCloudBlobClient(); var container = blobClient.GetContainerReference(_containerName); await container.SetPermissionsAsync(new BlobContainerPermissions { PublicAccess = BlobContainerPublicAccessType.Blob }); var detectedFaces = 0; var currentFaceListId = ""; var faceServiceClient = new FaceServiceClient(_subscriptionKey); foreach (var blob in await ListBlobsAsync(container)) { Debug.WriteLine(blob.Uri); try { var faces = await faceServiceClient.DetectAsync(blob.Uri.ToString(), true, true, null); foreach (var face in faces) { if (detectedFaces++ == 0) { currentFaceListId = await CreateFaceListAsync(faceServiceClient); Debug.WriteLine(currentFaceListId); } try { var faceData = new FaceData { BlobUrl = blob.Uri.ToString(), FaceRectangle = face.FaceRectangle }; var faceDataJS = JsonConvert.SerializeObject(faceData); var faceResult = await faceServiceClient.AddFaceToFaceListAsync(currentFaceListId, blob.Uri.ToString(), faceDataJS, face.FaceRectangle); Debug.WriteLine(faceResult.PersistedFaceId); } catch (Exception ex) { Debug.WriteLine(ex.Message); } if (detectedFaces >= 1000) { detectedFaces = 0; } } } catch (Exception ex) { Debug.WriteLine(ex.Message); } } }
/// <summary> /// Detecta las Caras que hay en una foto y muestra la edad estimada y el sexo /// </summary> /// <param name="url">Url donde esta la imagén</param> /// <returns></returns> public static async Task<ResultFace> DetecFacesAndDisplayResult(string url) { var subscriptionKey = ""; try { ResultFace result = new ResultFace(); var client = new FaceServiceClient(subscriptionKey); var faces =await client.DetectAsync(url, false, true, true); Console.WriteLine(" > " + faces.Length + " detected."); foreach (var face in faces) { Console.WriteLine(" >> age: " + face.Attributes.Age + " gender:" + face.Attributes.Gender); result.Age = face.Attributes.Age; result.Gender = face.Attributes.Gender; } return result; } catch (Exception exception) { return new ResultFace();; Console.WriteLine(exception.ToString()); } }
public async Task<Tuple<ObservableCollection<Face>, ObservableCollection<Face>>> StartFaceDetection(string selectedFile, string subscriptionKeyFace, string subscriptionKeyEmotions) { var detectedFaces = new ObservableCollection<Face>(); var facesRect = new ObservableCollection<Face>(); Debug.WriteLine("Request: Detecting {0}", selectedFile); using (var fileStreamFace = File.OpenRead(selectedFile)) { try { var client = new FaceServiceClient(subscriptionKeyFace); var faces = await client.DetectAsync(fileStreamFace, false, true, true); Debug.WriteLine("Response: Success. Detected {0} face(s) in {1}", faces.Length, selectedFile); var imageInfo = GetImageInfoForRendering(selectedFile); Debug.WriteLine("{0} face(s) has been detected", faces.Length); foreach (var face in faces) { var detectedFace = new Face() { ImagePath = selectedFile, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceId = face.FaceId, Gender = face.Attributes.Gender, Age = face.Attributes.Age.ToString(), }; detectedFaces.Add(detectedFace); } // Convert detection result into UI binding object for rendering foreach (var face in CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { facesRect.Add(face); } // update emotions detectedFaces = await UpdateEmotions(selectedFile, detectedFaces, subscriptionKeyEmotions); foreach (var faceRect in facesRect) { foreach (var detectedFace in detectedFaces.Where(detectedFace => faceRect.FaceId == detectedFace.FaceId)) { faceRect.Scores = detectedFace.Scores; faceRect.Age = detectedFace.Age; faceRect.Gender = detectedFace.Gender; } } } catch (Exception ex) { Debug.WriteLine(ex.ToString()); } var returnData = new Tuple<ObservableCollection<Face>, ObservableCollection<Face>>(detectedFaces, facesRect); return returnData; } }
public static async void DetecFacesAndDisplayResult(string url, string id ) { var subscriptionKey = ""; try { var client = new FaceServiceClient(subscriptionKey); var faces = await client.DetectAsync(url, false, true, true); Console.WriteLine(" > " + faces.Length + " detected."); if (faces.Length == 0) UpdateSharePoint(id, "0", "Sin identificar"); foreach (var face in faces) { Console.WriteLine( " >> age: " + face.Attributes.Age + " gender:" + face.Attributes.Gender); UpdateSharePoint(id, face.Attributes.Age.ToString(), face.Attributes.Gender); } } catch (Exception exception) { UpdateSharePoint(id, "0", "Sin identificar"); Console.WriteLine(exception.ToString()); } }
public static async void DetecFacesAndDisplayResult(string url, string urlComparation, string id ,string campo, int value ) { var subscriptionKey = "idSupscription"; try { var client = new FaceServiceClient(subscriptionKey); var faces1 = await client.DetectAsync(url, false, true, true); var faces2 = await client.DetectAsync(urlComparation, false, true, true); if (faces1 == null || faces2 == null) { UpdateSharePoint(id, 0,campo,value); } if (faces1.Count() == 0 || faces2.Count() == 0) { UpdateSharePoint(id, 0,campo,value); } if (faces1.Count() > 1 || faces2.Count() > 1) { UpdateSharePoint(id, 0,campo,value); } var res = await client.VerifyAsync(faces1[0].FaceId, faces2[0].FaceId); double score = 0; if (res.IsIdentical) score = 100; else { score = Math.Round((res.Confidence / 0.5) * 100); } UpdateSharePoint(id, score,campo,value); } catch (Exception exception) { UpdateSharePoint(id, 0,campo,value); Console.WriteLine(exception.ToString()); } }
private static async void DetecFacesAndDisplayResult(string fileLocation, string subscriptionKeyFace, string subscriptionKeyEmotion) { using (var fileStreamFace = File.OpenRead(fileLocation)) { using (var fileStreamEmotions = File.OpenRead(fileLocation)) { try { var faceServiceClient = new FaceServiceClient(subscriptionKeyFace); var emotionServiceClient = new EmotionServiceClient(subscriptionKeyEmotion); var faces = await faceServiceClient.DetectAsync(fileStreamFace, false, true, true); Console.WriteLine(" > " + faces.Length + " detected."); if (faces.Length > 0) { var faceRectangles = new List<Rectangle>(); foreach (var face in faces) { Console.WriteLine(" >> age: " + face.Attributes.Age + " gender:" + face.Attributes.Gender); var rectangle = new Rectangle { Height = face.FaceRectangle.Height, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width }; faceRectangles.Add(rectangle); } // on face detected we start emotion analysis var emotions = await emotionServiceClient.RecognizeAsync(fileStreamEmotions, faceRectangles.ToArray()); var emotionsDetails = ""; foreach (var emotion in emotions) { emotionsDetails += $@" Anger: {emotion.Scores.Anger} Contempt: {emotion.Scores.Contempt} Disgust: {emotion.Scores.Disgust} Fear: {emotion.Scores.Fear} Happiness: {emotion.Scores.Happiness} Neutral: {emotion.Scores.Neutral} Sadness: {emotion.Scores.Sadness} Surprise: {emotion.Scores.Surprise} "; } Console.WriteLine(" >> emotions: " + emotionsDetails); } } catch (Exception exception) { Console.WriteLine(exception.ToString()); } } } }
public async Task<Face[]> GetAge() { var faceServiceClient = new FaceServiceClient("keey"); var face= await faceServiceClient.DetectAsync(this.ImageResult.Url,true,true,true,true); this.FacesCollection = face; var image = new ImageView { Edad = face[0].Attributes.Age.ToString(), Nombre = ImageResult.Nombre, Url = ImageResult.Url, Sexo = (face[0].Attributes.Gender.Equals("male")?"Hombre":"Mujer") }; var urlComparation = image.Sexo.Equals("Hombre") ? "http://aimworkout.com/wp-content/uploads/2014/11/Chuck_Norris.jpg" : "http://www.beevoz.com/wp-content/uploads/2015/08/angelinajolie.jpg"; var face1 = await faceServiceClient.DetectAsync(urlComparation); var result=await faceServiceClient.VerifyAsync(face[0].FaceId, face1[0].FaceId); image.Similar= (Convert.ToInt32(result.Confidence*100)).ToString(); ImageCollection.Add(image); return face; }
private async void ButtonGetFacesImage1_Click(object sender, RoutedEventArgs e) { try { var subscriptionKey = "4c138b4d82b947beb2e2926c92d1e514"; var fileUrl = GetSelectedItemUrl(); var client = new FaceServiceClient(subscriptionKey); var faces = await client.DetectAsync(fileUrl, false, true, true); } catch (Exception exception) { TextBlockOutput.Text = exception.ToString(); } }
private static async void GetFaces(FaceServiceClient client) { string imagePath = @"C:\SD\OneDrive\Event Materials\2015 05 30 MalagaDotNet Coding4Fun\Face Samples\princesas.jpg"; using (var img = File.OpenRead(imagePath)) { var faces = await client.DetectAsync(img, false, true, true); foreach (var face in faces) { Console.WriteLine("age:" + face.Attributes.Age); Console.WriteLine("gender:" + face.Attributes.Gender); } } }
public async Task<ObservableCollection<Face>> StartFaceDetection(string selectedFile, StorageFile file, Tuple<int, int> imageInfo, string subscriptionKey) { var detectedFaces = new ObservableCollection<Face>(); Debug.WriteLine("Request: Detecting {0}", selectedFile); var sampleFile = await StorageFile.GetFileFromPathAsync(selectedFile); var fs = await FileIO.ReadBufferAsync(sampleFile); using (var stream = fs.AsStream()) { try { var client = new FaceServiceClient(subscriptionKey); var faces = await client.DetectAsync(stream, true, true, true); Debug.WriteLine("Response: Success. Detected {0} face(s) in {1}", faces.Length, selectedFile); Debug.WriteLine("{0} face(s) has been detected", faces.Length); foreach (var face in faces) { var fileFaceImage = await FileHelper.SaveFaceImageFile(file, face); var newFace = new Face { ImagePath = selectedFile, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceId = face.FaceId.ToString(), Gender = face.Attributes.Gender, Age = face.Attributes.Age, AgeComplete = string.Format("{0:#} years old", face.Attributes.Age), ImageFacePath = fileFaceImage.Path }; // calculate rect image newFace = CalculateFaceRectangleForRendering(newFace, MaxImageSize, imageInfo); detectedFaces.Add(newFace); } } catch (Exception ex) { Debug.WriteLine(ex.ToString()); } } return detectedFaces; }
public async Task<Tuple<ObservableCollection<Face>, ObservableCollection<Face>>> StartFaceDetection(string selectedFile, string subscriptionKey) { var detectedFaces = new ObservableCollection<Face>(); var facesRect = new ObservableCollection<Face>(); Debug.WriteLine("Request: Detecting {0}", selectedFile); // Call detection REST API using (var fileStream = File.OpenRead(selectedFile)) { try { var client = new FaceServiceClient(subscriptionKey); var faces = await client.DetectAsync(fileStream, false, true, true); Debug.WriteLine("Response: Success. Detected {0} face(s) in {1}", faces.Length, selectedFile); var imageInfo = GetImageInfoForRendering(selectedFile); Debug.WriteLine("{0} face(s) has been detected", faces.Length); foreach (var face in faces) { detectedFaces.Add(item: new Face() { ImagePath = selectedFile, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceId = face.FaceId.ToString(), Gender = face.Attributes.Gender, Age = string.Format("{0:#} years old", face.Attributes.Age), }); } // Convert detection result into UI binding object for rendering foreach (var face in CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { facesRect.Add(face); } } catch (Exception ex) { Debug.WriteLine(ex.ToString()); } var returnData = new Tuple<ObservableCollection<Face>, ObservableCollection<Face>>(detectedFaces, facesRect); return returnData; } }
private static async void DetecFacesAndDisplayResult(string fileLocation, string subscriptionKey) { using (var fileStream = File.OpenRead(fileLocation)) { try { var client = new FaceServiceClient(subscriptionKey); var faces = await client.DetectAsync(fileStream, false, true, true); Console.WriteLine(" > " + faces.Length + " detected."); foreach (var face in faces) { Console.WriteLine(" >> age: " + face.Attributes.Age + " gender:" + face.Attributes.Gender); } } catch (Exception exception) { Console.WriteLine(exception.ToString()); } } }
/// <summary> /// Pick the root person database folder, to minimum the data preparation logic, the folder should be under following construction /// Each person's image should be put into one folder named as the person's name /// All person's image folder should be put directly under the root person database folder /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event argument</param> private async void FolderPicker_Click(object sender, RoutedEventArgs e) { bool groupExists = false; MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); // Test whether the group already exists try { Output = Output.AppendLine(string.Format("Request: Group {0} will be used for build person database. Checking whether group exists.", GroupName)); await faceServiceClient.GetPersonGroupAsync(GroupName); groupExists = true; Output = Output.AppendLine(string.Format("Response: Group {0} exists.", GroupName)); } catch (ClientException ex) { if (ex.Error.Code != "PersonGroupNotFound") { Output = Output.AppendLine(string.Format("Response: {0}. {1}", ex.Error.Code, ex.Error.Message)); return; } else { Output = Output.AppendLine(string.Format("Response: Group {0} does not exist before.", GroupName)); } } if (groupExists) { var cleanGroup = System.Windows.MessageBox.Show(string.Format("Requires a clean up for group \"{0}\" before setup new person database. Click OK to proceed, group \"{0}\" will be fully cleaned up.", GroupName), "Warning", MessageBoxButton.OKCancel); if (cleanGroup == MessageBoxResult.OK) { await faceServiceClient.DeletePersonGroupAsync(GroupName); } else { return; } } // Show folder picker FolderBrowserDialog dlg = new FolderBrowserDialog(); var result = dlg.ShowDialog(); // Set the suggestion count is intent to minimum the data preparetion step only, // it's not corresponding to service side constraint const int SuggestionCount = 15; if (result == DialogResult.OK) { // User picked a root person database folder // Clear person database Persons.Clear(); TargetFaces.Clear(); SelectedFile = null; // Call create person group REST API // Create person group API call will failed if group with the same name already exists Output = Output.AppendLine(string.Format("Request: Creating group \"{0}\"", GroupName)); try { await faceServiceClient.CreatePersonGroupAsync(GroupName, GroupName); Output = Output.AppendLine(string.Format("Response: Success. Group \"{0}\" created", GroupName)); } catch (ClientException ex) { Output = Output.AppendLine(string.Format("Response: {0}. {1}", ex.Error.Code, ex.Error.Message)); return; } int processCount = 0; bool forceContinue = false; Output = Output.AppendLine("Request: Preparing faces for identification, detecting faces in choosen folder."); // Enumerate top level directories, each directory contains one person's images foreach (var dir in System.IO.Directory.EnumerateDirectories(dlg.SelectedPath)) { var tasks = new List<Task>(); var tag = System.IO.Path.GetFileName(dir); Person p = new Person(); p.PersonName = tag; // Call create person REST API, the new create person id will be returned var faces = new ObservableCollection<Face>(); p.Faces = faces; Persons.Add(p); // Enumerate images under the person folder, call detection foreach (var img in System.IO.Directory.EnumerateFiles(dir, "*.jpg", System.IO.SearchOption.AllDirectories)) { tasks.Add(Task.Factory.StartNew( async (obj) => { var imgPath = obj as string; // Call detection REST API using (var fStream = File.OpenRead(imgPath)) { try { var face = await faceServiceClient.DetectAsync(fStream); return new Tuple<string, ClientContract.Face[]>(imgPath, face); } catch (ClientException) { // Here we simply ignore all detection failure in this sample // You may handle these exceptions by check the Error.Code and Error.Message property for ClientException object return new Tuple<string, ClientContract.Face[]>(imgPath, null); } } }, img).Unwrap().ContinueWith((detectTask) => { // Update detected faces for rendering var detectionResult = detectTask.Result; if (detectionResult == null || detectionResult.Item2 == null) { return; } foreach (var f in detectionResult.Item2) { this.Dispatcher.Invoke( new Action<ObservableCollection<Face>, string, ClientContract.Face>(UIHelper.UpdateFace), faces, detectionResult.Item1, f); } })); if (processCount >= SuggestionCount && !forceContinue) { var continueProcess = System.Windows.Forms.MessageBox.Show("The images loaded have reached the recommended count, may take long time if proceed. Would you like to continue to load images?", "Warning", MessageBoxButtons.YesNo); if (continueProcess == DialogResult.Yes) { forceContinue = true; } else { break; } } } await Task.WhenAll(tasks); } Output = Output.AppendLine(string.Format("Response: Success. Total {0} faces are detected.", Persons.Sum(p => p.Faces.Count))); try { // Update person faces on server side foreach (var p in Persons) { // Call person update REST API Output = Output.AppendLine(string.Format("Request: Creating person \"{0}\"", p.PersonName)); p.PersonId = (await faceServiceClient.CreatePersonAsync(GroupName, p.Faces.Select(face => Guid.Parse(face.FaceId)).ToArray(), p.PersonName)).PersonId.ToString(); Output = Output.AppendLine(string.Format("Response: Success. Person \"{0}\" (PersonID:{1}) created, {2} face(s) added.", p.PersonName, p.PersonId, p.Faces.Count)); } // Start train person group Output = Output.AppendLine(string.Format("Request: Training group \"{0}\"", GroupName)); await faceServiceClient.TrainPersonGroupAsync(GroupName); // Wait until train completed while (true) { await Task.Delay(1000); var status = await faceServiceClient.GetPersonGroupTrainingStatusAsync(GroupName); Output = Output.AppendLine(string.Format("Response: {0}. Group \"{1}\" training process is {2}", "Success", GroupName, status.Status)); if (status.Status != "running") { break; } } } catch (ClientException ex) { Output = Output.AppendLine(string.Format("Response: {0}. {1}", ex.Error.Code, ex.Error.Message)); } } }
private async void faceIdentifyBtn_Click(object sender, RoutedEventArgs e) { try { faceIdentifyBtn.IsEnabled = false; //capture photo than save. captureImage.Source = webImage.Source; Helper.SaveImageCapture((BitmapSource)captureImage.Source); string getDirectory = Directory.GetCurrentDirectory(); string filePath = getDirectory + "\\test1.jpg"; System.Drawing.Image image1 = System.Drawing.Image.FromFile(filePath); var faceServiceClient = new FaceServiceClient(faceAPISubscriptionKey); try { Title = String.Format("Request: Training group \"{0}\"", GroupName); await faceServiceClient.TrainPersonGroupAsync(GroupName); TrainingStatus trainingStatus = null; while (true) { await Task.Delay(1000); trainingStatus = await faceServiceClient.GetPersonGroupTrainingStatusAsync(GroupName); Title = String.Format("Response: {0}. Group \"{1}\" training process is {2}", "Success", GroupName, trainingStatus.Status); if (trainingStatus.Status.ToString() != "running") { break; } } } catch (FaceAPIException ex) { Title = String.Format("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); faceIdentifyBtn.IsEnabled = true; } Title = "Detecting...."; using (Stream s = File.OpenRead(filePath)) { var faces = await faceServiceClient.DetectAsync(s); var faceIds = faces.Select(face => face.FaceId).ToArray(); var faceRects = faces.Select(face => face.FaceRectangle); FaceRectangle[] faceRect = faceRects.ToArray(); if (faceRect.Length == 1) { Title = String.Format("Detection Finished. {0} face(s) detected", faceRect.Length); speechSynthesizer.SpeakAsync("We have detected."); speechSynthesizer.SpeakAsync(faceRect.Length.ToString()); speechSynthesizer.SpeakAsync("face."); speechSynthesizer.SpeakAsync("Please Wait we are identifying your face."); await Task.Delay(3000); Title = "Identifying....."; try { Console.WriteLine("Group Name is : {0}, faceIds is : {1}", GroupName, faceIds); var results = await faceServiceClient.IdentifyAsync(GroupName, faceIds); foreach (var identifyResult in results) { Title = String.Format("Result of face: {0}", identifyResult.FaceId); if (identifyResult.Candidates.Length == 0) { Title = String.Format("No one identified"); MessageBox.Show("Hi, Make sure you have registered your face. Try to register now."); speechSynthesizer.SpeakAsync("Sorry. No one identified."); speechSynthesizer.SpeakAsync("Please make sure you have previously registered your face with us."); registerBtn.IsEnabled = true; faceIdentifyBtn.IsEnabled = false; return; } else { // Get top 1 among all candidates returned var candidateId = identifyResult.Candidates[0].PersonId; var person = await faceServiceClient.GetPersonAsync(GroupName, candidateId); faceIdentifiedUserName = person.Name.ToString(); Title = String.Format("Identified as {0}", person.Name); speechSynthesizer.SpeakAsync("Hi."); speechSynthesizer.Speak(person.Name.ToString()); speechSynthesizer.SpeakAsync("Now you need to verify your voice."); speechSynthesizer.SpeakAsync("To verify your voice. Say like that."); speechSynthesizer.SpeakAsync("My voice is stronger than my password. Verify my voice."); faceIdentifyBtn.IsEnabled = false; identifyRecord.IsEnabled = true; } } GC.Collect(); } catch (FaceAPIException ex) { Title = String.Format("Failed...Try Again."); speechSynthesizer.SpeakAsync("First register your face."); Console.WriteLine("Error : {0} ", ex.Message); image1.Dispose(); File.Delete(filePath); GC.Collect(); registerBtn.IsEnabled = true; return; } } else if (faceRect.Length > 1) { Title = String.Format("More than one faces detected. Make sure only one face is in the photo. Try again.."); speechSynthesizer.SpeakAsync("More than one faces detected. Make sure only one face is in the photo. Try again.."); faceIdentifyBtn.IsEnabled = true; return; } else { Title = String.Format("No one detected in the photo. Please make sure your face is infront of the webcam. Try again with the correct photo."); speechSynthesizer.SpeakAsync("No one detected. Please make sure your face is infront of the webcam. Try again with the correct photo."); faceIdentifyBtn.IsEnabled = true; return; } image1.Dispose(); File.Delete(filePath); GC.Collect(); await Task.Delay(2000); } } catch (Exception ex) { Console.WriteLine("Error : ", ex.Message); faceIdentifyBtn.IsEnabled = true; GC.Collect(); } }
/// <summary> /// Pick image and call find similar for each faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void FindSimilar_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg) | *.jpg"; var filePicker = dlg.ShowDialog(); if (filePicker.HasValue && filePicker.Value) { // User picked image // Clear previous detection and find similar results TargetFaces.Clear(); FindSimilarCollection.Clear(); var sw = Stopwatch.StartNew(); SelectedFile = dlg.FileName; var imageInfo = UIHelper.GetImageInfoForRendering(SelectedFile); // Detect all faces in the picked image using (var fileStream = File.OpenRead(SelectedFile)) { MainWindow.Log("Request: Detecting faces in {0}", SelectedFile); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); var faces = await faceServiceClient.DetectAsync(fileStream); // Update detected faces on UI foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Response: Success. Detected {0} face(s) in {0}", faces.Length, SelectedFile); // Find similar faces for each face foreach (var f in faces) { var faceId = f.FaceId; MainWindow.Log("Request: Finding similar faces for face {0}", faceId); try { // Call find similar REST API, the result contains all the face ids which similar to the query face const int requestCandidatesCount = 3; var result = await faceServiceClient.FindSimilarAsync(faceId, _faceListName, requestCandidatesCount); // Update find similar results collection for rendering var gg = new FindSimilarResult(); gg.Faces = new ObservableCollection <Face>(); gg.QueryFace = new Face() { ImagePath = SelectedFile, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; foreach (var fr in result) { gg.Faces.Add(FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString())); } MainWindow.Log("Response: Found {0} similar faces for face {1}", gg.Faces.Count, faceId); FindSimilarCollection.Add(gg); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } } }
public async Task <JsonResult> SaveCandidateFiles() { string message = string.Empty, fileName = string.Empty, actualFileName = string.Empty; bool flag = false; //Requested File Collection HttpFileCollection fileRequested = System.Web.HttpContext.Current.Request.Files; if (fileRequested != null) { //Create New Folder CreateDirectory(); //Clear Existing File in Folder ClearDirectory(); for (int i = 0; i < fileRequested.Count; i++) { var file = Request.Files[i]; actualFileName = file.FileName; fileName = Guid.NewGuid() + Path.GetExtension(file.FileName); int size = file.ContentLength; string FullImgPath = Path.Combine(Server.MapPath(directory), fileName); try { file.SaveAs(FullImgPath); message = "File uploaded successfully"; flag = true; if (FullImgPath != "") { using (var fStream = System.IO.File.OpenRead(FullImgPath)) { // User picked one image var imageInfo = UIHelper.GetImageInfoForRendering(FullImgPath); // Create Instance of Service Client by passing Servicekey as parameter in constructor var faceServiceClient = new FaceServiceClient(ServiceKey); Face[] faces = await faceServiceClient.DetectAsync(fStream, true, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses }); if (faces.Count() > 0) { Bitmap CroppedFace = null; foreach (var face in faces) { //Create & Save Cropped Images var croppedImg = Convert.ToString(Guid.NewGuid()) + ".jpeg" as string; var croppedImgPath = directory + '\\' + croppedImg as string; var croppedImgFullPath = Server.MapPath(directory) + '\\' + croppedImg as string; CroppedFace = CropBitmap( (Bitmap)Image.FromFile(FullImgPath), face.FaceRectangle.Left, face.FaceRectangle.Top, face.FaceRectangle.Width, face.FaceRectangle.Height); CroppedFace.Save(croppedImgFullPath, ImageFormat.Jpeg); if (CroppedFace != null) { ((IDisposable)CroppedFace).Dispose(); } } //Clear Query File DeleteFile(FullImgPath); } } } } catch (Exception) { message = "File upload failed! Please try again"; } } } return(new JsonResult { Data = new { Message = message, Status = flag } }); }
async Task AddFaceBasedTagsToPhotoAsync(PhotoResult photoResult) { // See comment at bottom of file. if (!string.IsNullOrEmpty(cognitiveServiceFaceServiceKey)) { FaceServiceClient client = new FaceServiceClient( cognitiveServiceFaceServiceKey); using (var stream = await photoResult.PhotoFile.OpenStreamForReadAsync()) { var attributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.FacialHair, FaceAttributeType.Gender, FaceAttributeType.Glasses, FaceAttributeType.Smile }; var results = await client.DetectAsync(stream, true, false, attributes); var firstFace = results?.FirstOrDefault(); if (firstFace != null) { var automaticTags = new List<string>(); automaticTags.Add($"age {firstFace.FaceAttributes.Age}"); automaticTags.Add(firstFace.FaceAttributes.Gender.ToString()); automaticTags.Add(firstFace.FaceAttributes.Glasses.ToString()); Action<double, string> compareFunc = (double value, string name) => { if (value > 0.5) automaticTags.Add(name); }; compareFunc(firstFace.FaceAttributes.Smile, "smile"); compareFunc(firstFace.FaceAttributes.FacialHair.Beard, "beard"); compareFunc(firstFace.FaceAttributes.FacialHair.Moustache, "moustache"); compareFunc(firstFace.FaceAttributes.FacialHair.Sideburns, "sideburns"); await this.photoControl.AddTagsToPhotoAsync( photoResult.PhotoId, automaticTags); } } } }
private async void btn_Find_Click(object sender, EventArgs e) { OpenFileDialog dialog = new OpenFileDialog(); dialog.DefaultExt = ".jpg"; dialog.Filter = "Image files(*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif"; if (dialog.ShowDialog() == DialogResult.OK) { var imagePath = dialog.FileName; //Creamos el cliente var faceServiceClient = new FaceServiceClient(FaceAPIKey, FaceAPIEndPoint); using (var fStream = File.OpenRead(imagePath)) { //Cargamos la imagen en el pictureBox pct_Imagen.Image = new Bitmap(fStream); //Reiniciamos el Stream fStream.Seek(0, SeekOrigin.Begin); try { //Detectamos las caras var faces = await faceServiceClient.DetectAsync(fStream); //Detectamos a las personas var results = await faceServiceClient.IdentifyAsync(GroupGUID, faces.Select(ff => ff.FaceId).ToArray()); //Creamos una lista de caras y nombres asociados List <(Guid, string)> detections = new List <(Guid, string)>(); foreach (var result in results) { //En caso de no haber encontrado un candidato, nos lo saltamos if (result.Candidates.Length == 0) { continue; } var faceId = faces.FirstOrDefault(f => f.FaceId == result.FaceId).FaceId; //Consultamos los datos de la persona detectada var person = await faceServiceClient.GetPersonAsync(GroupGUID, result.Candidates[0].PersonId); //Añadimos a la lista la relacion detections.Add((faceId, person.Name)); } var faceBitmap = new Bitmap(pct_Imagen.Image); using (var g = Graphics.FromImage(faceBitmap)) { var br = new SolidBrush(Color.FromArgb(200, Color.LightGreen)); // Por cada cara reconocida foreach (var face in faces) { var fr = face.FaceRectangle; var fa = face.FaceAttributes; var faceRect = new Rectangle(fr.Left, fr.Top, fr.Width, fr.Height); Pen p = new Pen(br); p.Width = 50; g.DrawRectangle(p, faceRect); // Calculamos la posicon del rectangulo int rectTop = fr.Top + fr.Height + 10; if (rectTop + 45 > faceBitmap.Height) { rectTop = fr.Top - 30; } // Calculamos las dimensiones del rectangulo g.FillRectangle(br, fr.Left - 10, rectTop, fr.Width < 120 ? 120 : fr.Width + 20, 125); //Buscamos en la lista de relaciones cara persona var person = detections.Where(x => x.Item1 == face.FaceId).FirstOrDefault(); var personName = person.Item2; Font font = new Font(Font.FontFamily, 90); //Pintamos el nombre en la imagen g.DrawString($"{personName}", font, Brushes.Black, fr.Left - 8, rectTop + 4); } } pct_Imagen.Image = faceBitmap; } catch (FaceAPIException ex) { } } } }
async Task <AiResult> MakeRequest(string imageToCheck) { AiResult res = new AiResult(); // imageToCheck = "https://www.liberationnews.org/wp-content/uploads/2015/07/donaldtrump61815.jpg"; EmotionServiceClient emotionServiceClient = new EmotionServiceClient(emotionKey); Emotion[] imageEmotion = await emotionServiceClient.RecognizeAsync(imageToCheck); Console.WriteLine("Feeling: " + imageEmotion[0].Scores.ToRankedList().First().Key); Console.WriteLine("Top score: " + imageEmotion[0].Scores.ToRankedList().First().Value); res.Emotion = string.Format("Unknwn ({0:P2})", 0); float bestScore = 0; foreach (var em in imageEmotion[0].Scores.ToRankedList()) { if (em.Value > bestScore) { bestScore = em.Value; res.Emotion = res.Emotion = string.Format("{0} ({1:P2})", em.Key, em.Value); } } FaceServiceClient faceServiceClient = new FaceServiceClient(faceKey); FaceList trumpList = null; try { trumpList = await faceServiceClient.GetFaceListAsync(faceList); } catch (FaceAPIException apiExp) { if (apiExp.ErrorCode == "FaceListNotFound") { await faceServiceClient.CreateFaceListAsync(faceList, faceList, "A collection of trumps"); trumpList = await faceServiceClient.GetFaceListAsync(faceList); } else { throw apiExp; } } if (trumpList.PersistedFaces.Count() < 5) { await faceServiceClient.AddFaceToFaceListAsync(faceList, "https://www.liberationnews.org/wp-content/uploads/2015/07/donaldtrump61815.jpg"); await faceServiceClient.AddFaceToFaceListAsync(faceList, "http://thefederalist.com/wp-content/uploads/2016/02/trumpie.jpg"); await faceServiceClient.AddFaceToFaceListAsync(faceList, "http://www.redstate.com/uploads/2016/02/donald-trump-is-still-soaring-in-iowa-but-there-are-now-some-clear-warning-signs.jpg"); await faceServiceClient.AddFaceToFaceListAsync(faceList, "http://i.huffpost.com/gen/3706868/images/o-DONALD-TRUMP-FUNNY-facebook.jpg"); await faceServiceClient.AddFaceToFaceListAsync(faceList, "http://media.salon.com/2015/04/donald_trump_thumbsup.jpg"); trumpList = await faceServiceClient.GetFaceListAsync(faceList); } Face[] faceToCompare = await faceServiceClient.DetectAsync(imageToCheck); SimilarPersistedFace[] faces = await faceServiceClient.FindSimilarAsync(faceToCompare[0].FaceId, faceList, FindSimilarMatchMode.matchFace); res.TrumpMatch = String.Format("{0:P2}", 0); if (faces.Count() == 0) { Console.WriteLine("Sorry, nothing compares to you"); } else { double totalConfidence = 0; foreach (SimilarPersistedFace matching in faces) { totalConfidence += matching.Confidence; } double averageConfidence = totalConfidence / faces.Count(); res.TrumpMatch = String.Format("{0:P2}", averageConfidence); Console.WriteLine("Trump comparison: " + res.TrumpMatch); } return(res); }
/// <summary> /// Identify a list of photos based on an existing training group. /// </summary> /// <param name="PersonGroupID">Name of the training group</param> /// <param name="Photos">List of photos to be tagged</param> /// <returns></returns> public async Task identifyPhotosInGroup(string PersonGroupID, List<Photo> Photos) { IFaceServiceClient faceClient = new FaceServiceClient(SubscriptionKey); try { foreach (Photo photo in Photos) { photo.NumberOfMatchedFaces = 0; photo.NumberOfUnmatchedFaces = 0; photo.PeopleInPhoto.Clear(); // convert image bytes into a stream Stream stream = new MemoryStream(photo.Image); // identify faces in the image (an image could have multiple faces in it) var faces = await faceClient.DetectAsync(stream); if (faces.Length > 0) { // match each face to the training group photos. var identifyResult = await faceClient.IdentifyAsync(PersonGroupID, faces.Select(ff => ff.FaceId).ToArray()); for (int idx = 0; idx < faces.Length; idx++) { var res = identifyResult[idx]; if (res.Candidates.Length > 0) { // found a match so add the original ID of the training person to the photo if (TrainingPhotos.Keys.Contains(res.Candidates[0].PersonId)) { photo.PeopleInPhoto.Add(TrainingPhotos[res.Candidates[0].PersonId]); photo.NumberOfMatchedFaces += 1; } // didn't find a match so count as an unmatched face. else photo.NumberOfUnmatchedFaces += 1; } // didn't find a match so count as an unmatched face. else photo.NumberOfUnmatchedFaces += 1; } } } } catch (ClientException ex) { throw; } }
/// <summary> /// Add photos to the training group using Microsoft Face API /// </summary> /// <param name="Photos">List of photos to add</param> /// <param name="PersonGroupID">Name of the training group</param> /// <returns></returns> public async Task addPhotosToTrainingGroup(Dictionary<string, PhotoPerson> Photos, string PersonGroupID) { IFaceServiceClient faceClient = new FaceServiceClient(SubscriptionKey); // Get the group and add photos to the group. // The input dictionary is organized by person ID. The output dictionary is organized by the GUID returned by the added photo from the API. try { await faceClient.GetPersonGroupAsync(PersonGroupID); // training photos can support multiple pictures per person (more pictures will make the training more effective). // each photo is added as a Face object within the Face API and attached to a person. foreach (PhotoPerson person in Photos.Values) { Person p = new Person(); p.Name = person.Name; p.PersonId = Guid.NewGuid(); List<Guid> faceIDs = new List<Guid>(); foreach (Photo photo in person.Photos) { Stream stream = new MemoryStream(photo.Image); Face[] face = await faceClient.DetectAsync(stream); // check for multiple faces - should only have one for a training set. if (face.Length != 1) throw new FaceDetectionException("Expected to detect 1 face but found " + face.Length + " faces for person " + p.Name); else faceIDs.Add(face[0].FaceId); } Guid[] faceIDarray = faceIDs.ToArray(); // create the person in the training group with the image array of faces. CreatePersonResult result = await faceClient.CreatePersonAsync(PersonGroupID, faceIDarray, p.Name, null); p.PersonId = result.PersonId; TrainingPhotos.Add(p.PersonId, person); } await faceClient.TrainPersonGroupAsync(PersonGroupID); // Wait until train completed while (true) { await Task.Delay(1000); var status = await faceClient.GetPersonGroupTrainingStatusAsync(PersonGroupID); if (status.Status != "running") { break; } } } catch (ClientException ex) { throw; } }
/// <summary> /// This is testting Face module /// </summary> public async Task <String> Testpicture(String testImageFile) { try { People people = new People(); List <string> rslist = new List <string>(); string[] HeadRandom; StringBuilder Mount_path = new StringBuilder(); FaceServiceClient fc = new FaceServiceClient(ApiKey, "https://southeastasia.api.cognitive.microsoft.com/face/v1.0"); string personGroupId = "test"; // await fc.CreatePersonGroupAsync(personGroupId,"My Family"); //using (Stream s = File.OpenRead(testImageFile)) //{ // if (s != null) // return "yes"; // else // return "no"; //} using (Stream s = File.OpenRead(testImageFile)) { var requiredFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.HeadPose, FaceAttributeType.Glasses, FaceAttributeType.Emotion }; var faces = await fc.DetectAsync(s, returnFaceLandmarks : true, returnFaceAttributes : requiredFaceAttributes); var faceIds = faces.Select(face => face.FaceId).ToArray(); try { var results = await fc.IdentifyAsync(personGroupId, faceIds); var fspicture = new FileStream(testImageFile, FileMode.Open); Bitmap bmp = new Bitmap(fspicture); Graphics g = Graphics.FromImage(bmp); int isM = 0, isF = 0; // string age = ""; string sex = ""; int age; String age_s = ""; String emr = ""; String Top_Emotion = ""; Dictionary <string, float> Emotion = new Dictionary <string, float>(); foreach (var face in faces) { var faceRect = face.FaceRectangle; var attributes = face.FaceAttributes; float Happiness = attributes.Emotion.Happiness; float Anger = attributes.Emotion.Anger; float Neutral = attributes.Emotion.Neutral; float Contempt = attributes.Emotion.Contempt; float Disgust = attributes.Emotion.Disgust; float Fear = attributes.Emotion.Fear; float Sadness = attributes.Emotion.Sadness; float Surprise = attributes.Emotion.Surprise; String[] Emotion_string = { "Anger", "Happiness", "Neutral", "Contempt", "Disgust", "Fear", "Sadness", "Surprise" }; float[] Emotion_array = { Anger, Happiness, Neutral, Contempt, Disgust, Fear, Sadness, Surprise }; // g.DrawEllipse(new Pen(Brushes.Blue, 5), new System.Drawing.Rectangle(faceRect.Left-90, faceRect.Top-90, // faceRect.Width+150, faceRect.Height+150)); /* g.DrawRectangle( * new Pen(Brushes.Red, 3), * new System.Drawing.Rectangle(faceRect.Left, faceRect.Top, * faceRect.Width, faceRect.Height));*/ //g.DrawString(new Font(attributes.Gender.ToString(),)); for (int i = 0; i < Emotion_string.Length; i++) { Emotion.Add(Emotion_string[i], Emotion_array[i]); } if (attributes.Gender.StartsWith("male")) { isM += 1; } else { isF += 1; } age = Convert.ToInt32(attributes.Age); age_s = age.ToString(); sex = attributes.Gender.ToString(); Top_Emotion = GetEmotion(attributes.Emotion); Console.WriteLine(Top_Emotion); //Font drawFont = new Font("Arial", 60, FontStyle.Bold); //SolidBrush drawBrush = new SolidBrush(Color.Blue); //PointF drawPoint = new PointF(faceRect.Left - 90, faceRect.Top - 50); //PointF drawPoint2 = new PointF(faceRect.Left - 10, faceRect.Top - 50); //g.DrawString(age_s, drawFont, drawBrush, drawPoint); //g.DrawString(sex, drawFont, drawBrush, drawPoint2); Bitmap CroppedImage = null; if (face.FaceAttributes.HeadPose.Roll >= 10 || face.FaceAttributes.HeadPose.Roll <= -10) { System.Drawing.Rectangle rect = new System.Drawing.Rectangle(Convert.ToInt32(face.FaceRectangle.Left - 200), Convert.ToInt32(face.FaceRectangle.Top - 200), face.FaceRectangle.Width + 200, face.FaceRectangle.Height + 200); CroppedImage = new Bitmap(CropRotatedRect(bmp, rect, Convert.ToSingle(face.FaceAttributes.HeadPose.Roll * -1), true)); } else { try { CroppedImage = new Bitmap(bmp.Clone(new System.Drawing.Rectangle(face.FaceRectangle.Left - 150, face.FaceRectangle.Top - 150, face.FaceRectangle.Width + 300, face.FaceRectangle.Height + 300), bmp.PixelFormat)); } catch (Exception e) { } } bmp.Dispose(); CroppedImage.Save(@"C:\Users\v-altsai\Pictures\allfix.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); CroppedImage.Dispose(); Console.WriteLine("Age " + age); } Console.WriteLine("Female: " + isF); Console.WriteLine("Male: " + isM); String name = ""; foreach (var identifyResult in results) { // Console.WriteLine("Result of face: {0}", identifyResult.FaceId); if (identifyResult.Candidates.Length == 0) { Console.WriteLine("No one identified"); name = "none"; } else if (identifyResult.Candidates.Length != 0) { var candidateId = identifyResult.Candidates[0].PersonId; var person = await fc.GetPersonAsync(personGroupId, candidateId); Console.WriteLine("Identified as {0}", person.Name); name = person.Name; Bitmap oribmp = new Bitmap(@"C:\Users\v-altsai\Pictures\allfix.jpg"); using (Bitmap tmpBmp = new Bitmap(oribmp)) { Mount_path.Clear(); Mount_path.Append("C:\\Users\\v-altsai\\Pictures\\Family\\"); Mount_path.Append(person.Name); Mount_path.Append(".jpg"); tmpBmp.Save(Mount_path.ToString(), System.Drawing.Imaging.ImageFormat.Jpeg); Console.WriteLine("{0}", Mount_path.ToString()); } } } // bmp.Save(@"C:\Users\v-altsai\Documents\Visual Studio 2017\Projects\Kinect-FaceRecognition\Images\allfix.jpg", System.Drawing.Imaging.ImageFormat.Jpeg); people.Name = name; people.Age = age_s; people.Gender = sex; people.Emotion = Top_Emotion; people.Emotionlistscore = Emotion; JSONHelper helper = new JSONHelper(); String jsonResult = ""; jsonResult = helper.ConvertObjectToJSon(people); fspicture.Close(); s.Close(); Emotion.Clear(); return(jsonResult); } catch (FaceAPIException fs) { Console.WriteLine(fs.ToString()); Console.WriteLine("error results"); //if (s != null) // s.Close(); return(null); } } } catch (Exception e) { String msg = "Oops! Something went wrong. Try again later"; if (e is ClientException && (e as ClientException).Error.Message.ToLowerInvariant().Contains("access denied")) { msg += " (access denied - hint: check your APIKEY )."; Console.Write(msg); } Console.Write(e.ToString()); return(null); } }
public async Task <dynamic> GetDetectedFaces() { ResultCollection.Clear(); DetectedFaces.Clear(); var DetectedResultsInText = string.Format("Detecting..."); var FullImgPath = Server.MapPath(directory) + '/' + UplImageName as string; var QueryFaceImageUrl = directory + '/' + UplImageName; if (UplImageName != "") { //Create New Folder CreateDirectory(); try { // Call detection REST API using (var fStream = System.IO.File.OpenRead(FullImgPath)) { // User picked one image var imageInfo = UIHelper.GetImageInfoForRendering(FullImgPath); // Create Instance of Service Client by passing Servicekey as parameter in constructor var faceServiceClient = new FaceServiceClient(ServiceKey, "https://westcentralus.api.cognitive.microsoft.com/face/v1.0"); Face[] faces = await faceServiceClient.DetectAsync(fStream, true, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses }); DetectedResultsInText = string.Format("{0} face(s) has been detected!!", faces.Length); Bitmap CroppedFace = null; foreach (var face in faces) { //Create & Save Cropped Images var croppedImg = Convert.ToString(Guid.NewGuid()) + ".jpeg" as string; var croppedImgPath = directory + '/' + croppedImg as string; var croppedImgFullPath = Server.MapPath(directory) + '/' + croppedImg as string; CroppedFace = CropBitmap( (Bitmap)Image.FromFile(FullImgPath), face.FaceRectangle.Left, face.FaceRectangle.Top, face.FaceRectangle.Width, face.FaceRectangle.Height); CroppedFace.Save(croppedImgFullPath, ImageFormat.Jpeg); if (CroppedFace != null) { ((IDisposable)CroppedFace).Dispose(); } DetectedFaces.Add(new vmFace() { ImagePath = FullImgPath, FileName = croppedImg, FilePath = croppedImgPath, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceId = face.FaceId.ToString(), Gender = face.FaceAttributes.Gender, Age = string.Format("{0:#} years old", face.FaceAttributes.Age), IsSmiling = face.FaceAttributes.Smile > 0.0 ? "Smile" : "Not Smile", Glasses = face.FaceAttributes.Glasses.ToString(), }); } // Convert detection result into UI binding object for rendering var rectFaces = UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo); foreach (var face in rectFaces) { ResultCollection.Add(face); } } } catch (FaceAPIException) { //do exception work } } return(new JsonResult { Data = new { QueryFaceImage = QueryFaceImageUrl, MaxImageSize = MaxImageSize, FaceInfo = DetectedFaces, FaceRectangles = ResultCollection, DetectedResults = DetectedResultsInText }, JsonRequestBehavior = JsonRequestBehavior.AllowGet }); }
private async void SendToBlobAsync() { //var watch = System.Diagnostics.Stopwatch.StartNew(); bool matchFound = false; CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); // Create the blob client. CloudBlobContainer container = blobClient.GetContainerReference("imgcontainer"); // Retrieve reference to a previously created container. CloudBlockBlob blockBlob = container.GetBlockBlobReference("Visitor.jpg"); // Retrieve reference to a blob named "photo1.jpg". await blockBlob.UploadFromFileAsync(photo); //watch.Stop(); Status += "Pic Uploaded\n"; string personName = "New Visitor"; Stream image = await photo.OpenStreamForReadAsync(); try { var faces = await faceServiceClient.DetectAsync(image); var faceIds = faces.Select(face => face.FaceId).ToArray(); var results = await faceServiceClient.IdentifyAsync("group1", faceIds); foreach (var identifyResult in results) { if (identifyResult.Candidates.Length == 0) { matchFound = false; } else { var candidateId = identifyResult.Candidates[0].PersonId; var person = await faceServiceClient.GetPersonAsync("group1", candidateId); personName = person.Name; matchFound = true; break; } } if (matchFound) { var telemetryDataPoint = new { deviceId = "myFirstDevice", status = "Match Found", name = personName }; var messageString = JsonConvert.SerializeObject(telemetryDataPoint); var message = new Message(Encoding.ASCII.GetBytes(messageString)); await deviceClient.SendEventAsync(message); } else { var telemetryDataPoint = new { deviceId = "myFirstDevice", status = "Pic Uploaded", name = personName }; var messageString = JsonConvert.SerializeObject(telemetryDataPoint); var message = new Message(Encoding.ASCII.GetBytes(messageString)); await deviceClient.SendEventAsync(message); } } catch { var telemetryDataPoint = new { deviceId = "myFirstDevice", status = "Pic Uploaded", name = personName }; var messageString = JsonConvert.SerializeObject(telemetryDataPoint); var message = new Message(Encoding.ASCII.GetBytes(messageString)); await deviceClient.SendEventAsync(message); Status += "\nThe Image is Not Clear\n"; } }
private async Task ShowPersonalizedInformation(FaceServiceClient client, Stream photoStream) { Face[] faces; try { faces = await client.DetectAsync(photoStream, true, true); } catch (Exception ex) { faces = new Face[0]; HockeyClient.Current.TrackEvent("ShowPersonalizedInformation (DetectAsync) - Exception", new Dictionary <string, string> { { "Message", ex.Message }, { "Stack", ex.StackTrace } }); } if (!faces.Any()) { await ClearScrean(); return; } HockeyClient.Current.TrackEvent("Face Detected Remotely (Oxford)"); IdentifyResult[] identifyResults; try { identifyResults = await client.IdentifyAsync(PersonGroupId, faces.Select(face => face.FaceId).ToArray()); } catch (Exception ex) { HockeyClient.Current.TrackEvent("ShowPersonalizedInformation (IdentifyAsync) - Exception", new Dictionary <string, string> { { "Message", ex.Message }, { "Stack", ex.StackTrace } }); return; } Guid[] personIds = identifyResults.Select(r => r.Candidates.First().PersonId).ToArray(); Task <Person>[] personTasks = personIds.Select(async p => await client.GetPersonAsync(PersonGroupId, p)).ToArray(); Task.WaitAll(personTasks); if (personTasks.Any() && personTasks.First().Result != null) { Person person = personTasks.First().Result; HockeyClient.Current.TrackEvent("Face Recognized (Oxford)", new Dictionary <string, string> { { "Person ID", person.PersonId.ToString() }, { "Graph User ID", person.Name } }); try { await ShowPersonalizedInfoPanel(person); } catch (Exception ex) { HockeyClient.Current.TrackEvent("ShowPersonalizedInformation (ShowPersonalizedInfoPanel) - Exception", new Dictionary <string, string> { { "Message", ex.Message }, { "Stack", ex.StackTrace } }); } } else { await ClearScrean(); } }
//grouptest er í raun mainfunctionið, ég á eftir að implementa hvernig þú stofnar nýjann í miðjum klíðum var að vinna í því þegar Piinn framdi sjálfsmorð... private async void GroupTest() { var photodir = await KnownFolders.PicturesLibrary.GetFileAsync(PHOTO_FILE_NAME); string photo = photodir.Path; string picdir = photo.Substring(0, photo.Length - 9); /* * try * { * await faceServiceClient.CreatePersonGroupAsync(personGroupId, "FaceGroup"); * * * //tbl_status.Text = "Group created"; * } * catch * { * * //tbl_status.Text = "Group exists"; * }*/ try { if (nofaces) { return; } else { //smá kóði sem ég notaði til að deleta manneskjum sem hún þekkti í raun ekki en voru óvart stöfnuð án andlita // await faceServiceClient.DeletePersonAsync(personGroupId, Guid.Parse("d93621c4-496d-4ec0-b56c-3042104abfc5")); // var persons = await faceServiceClient.ListPersonsAsync(personGroupId); /* foreach(var person in persons) * { * if(person.PersistedFaceIds.Count() == 0) * { * personlist.Add(person.PersonId.ToString()); * } * } * var lists = personlist; * for (int i = 0; i < personlist.Count; i++) * { * * await faceServiceClient.DeletePersonAsync(personGroupId,Guid.Parse(personlist[i])); * }*/ //await faceServiceClient.TrainPersonGroupAsync(personGroupId); TrainingStatus trainingStatus = null; while (true) { trainingStatus = await faceServiceClient.GetPersonGroupTrainingStatusAsync(personGroupId); if (trainingStatus.Status.ToString() != "running") { break; } await Task.Delay(1000); } string testImageFile = photo; bool firstface = true; using (Stream s = File.OpenRead(await GetPhoto())) { var faces = await faceServiceClient.DetectAsync(s, returnFaceLandmarks : true, returnFaceAttributes : requiredFaceAttributes); foreach (var faceinfo in faces) { var id = faceinfo.FaceId; var attributes = faceinfo.FaceAttributes; var age = attributes.Age; var gender = attributes.Gender; var smile = attributes.Smile; var facialHair = attributes.FacialHair; var headPose = attributes.HeadPose; var glasses = attributes.Glasses; var emotion = attributes.Emotion; var emotionlist = emotion.ToRankedList().First(); if (firstface) { mood = emotionlist.Key; hasglasses = glasses.ToString(); personId = id.ToString(); persongender = gender; personage = age; personsmile = smile; firstface = false; } updadeInfoList(); // emo = emotionlist.Max().Value; // emotionstring = emotion.Happiness.ToString(); //Infostring = "ID: " + id.ToString() + "," + "Age: " + age.ToString() + "," + "Gender: " + gender.ToString() + "," + "Glasses: " + glasses.ToString(); Infostring = mood; } var faceIds = faces.Select(face => face.FaceId).ToArray(); var results = await faceServiceClient.IdentifyAsync(personGroupId, faceIds); foreach (var identifyResult in results) { // tbl_status.Text = ("Result of face: " + identifyResult.FaceId); if (identifyResult.Candidates.Length == 0) { activeId = "I dont know you... but id like to ;)"; /* * Task nn = Task.Run(async () => { newname = await ReqName(); }); * nn.Wait(); * * * * * if (nn.IsCompleted) * { * //string name = await InputTextDialogAsync("Input Name"); * //tbl_status.Text = ("No one identified, i will add you now, your new name is Bill"); * CreatePersonResult friend1 = await faceServiceClient.CreatePersonAsync( * // Id of the person group that the person belonged to * personGroupId, * // Name of the person * newname * ); * * for (int z = 0; z < 6; z++) * { * Random r = new Random(); * photostorage = await KnownFolders.PicturesLibrary.CreateFileAsync((z + PHOTO_FILE_NAME), CreationCollisionOption.ReplaceExisting); * ImageEncodingProperties imageProperties = ImageEncodingProperties.CreateJpeg(); * await mediaCapture.CapturePhotoToStorageFileAsync(imageProperties, photostorage); * var friend1ImageDir = await KnownFolders.PicturesLibrary.GetFileAsync(z + PHOTO_FILE_NAME); * string imagePath = friend1ImageDir.Path; * * using (Stream k = File.OpenRead(imagePath)) * { * await faceServiceClient.AddPersonFaceAsync( * personGroupId, friend1.PersonId, k); * } * * } * * * await faceServiceClient.TrainPersonGroupAsync(personGroupId); * * trainingStatus = null; * while (true) * { * trainingStatus = await faceServiceClient.GetPersonGroupTrainingStatusAsync(personGroupId); * * if (trainingStatus.Status.ToString() != "running") * { * break; * } * * await Task.Delay(1000); * } * CheckFace(); * * } * else * { * * } * */ } else { FillNotAloneList(); //Var í miðju progressi hér kemur basically mögulegar línur sem kemur á spegilinn þegar að það eru fleiri en 1 andlit PunMaker(); var candidateId = identifyResult.Candidates[0].PersonId; var person = await faceServiceClient.GetPersonAsync(personGroupId, candidateId); personname = person.Name; //tbl_status.Text = ("Identified as " + person.Name); //activeId = person.Name.ToString(); if (VisitedPersons.Count() != 0) { foreach (var check in VisitedPersons) { if (check[0] == activeperson[0]) { int comeandgo = Convert.ToInt32(check[8]); comeandgo++; check[3] = mood; check[4] = hasglasses; check[6] = personsmile; check[8] = comeandgo; check[7] = true; activeperson = check; if (check[2].ToString().ToLower() == "female") { activeId = "You are looking good gurl "; } if (check[2].ToString().ToLower() == "male") { activeId = "You are looking good man! "; } else if (comeandgo > 1) { activeId = "Welcome Back " + check[0]; } activeId = greeting() + personname; } else { AddPersonToVisited(personname, personage, persongender, mood, hasglasses, personId, personsmile, false, 0); activeId = greeting() + person.Name.ToString(); } } } else { AddPersonToVisited(personname, personage, persongender, mood, hasglasses, personId, personsmile, false, 0); activeId = greeting() + activeperson[0].ToString(); } } } } } grouptestFinished = true; } catch (Exception e) { activeId = "Nice weather we are having..."; grouptestFinished = true; } }
private async void ImagePicker_Click(object sender, RoutedEventArgs e) { var fileOpenPicker = new FileOpenPicker(); fileOpenPicker.FileTypeFilter.Add(".jpg"); fileOpenPicker.FileTypeFilter.Add(".jpeg"); fileOpenPicker.FileTypeFilter.Add(".png"); fileOpenPicker.ViewMode = PickerViewMode.Thumbnail; fileOpenPicker.SuggestedStartLocation = PickerLocationId.PicturesLibrary; var storageFile = await fileOpenPicker.PickSingleFileAsync(); FaceButtons.Children.Clear(); using (var stream = await storageFile.OpenAsync(FileAccessMode.Read)) { var image = new BitmapImage(); image.SetSource(stream); SelectedImage.Source = image; stream.Seek(0); var faceServiceClient = new FaceServiceClient(_subscriptionKey); var faces = await faceServiceClient.DetectAsync(stream.AsStream(), true, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.FacialHair, FaceAttributeType.Smile }); foreach (var face in faces) { var scale = SelectedImage.ActualHeight / image.PixelHeight; var faceButton = new Button(); faceButton.Width = face.FaceRectangle.Width * scale; faceButton.Height = face.FaceRectangle.Height * scale; faceButton.Margin = new Thickness(face.FaceRectangle.Left * scale + 25, face.FaceRectangle.Top * scale + 25, 0, 0); faceButton.Tag = face.FaceId; faceButton.Click += FaceBox_Click; FaceButtons.Children.Add(faceButton); } } }
/// <summary> /// Pick image folder and detect all faces in these images /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void FolderPicker_Click(object sender, RoutedEventArgs e) { // Show folder picker FolderBrowserDialog dlg = new FolderBrowserDialog(); var result = dlg.ShowDialog(); bool forceContinue = false; if (result == DialogResult.OK) { // Enumerate all ".jpg" files in the folder, call detect List<Task> tasks = new List<Task>(); FacesCollection.Clear(); TargetFaces.Clear(); FindSimilarCollection.Clear(); SelectedFile = null; // Set the suggestion count is intent to minimum the data preparetion step only, // it's not corresponding to service side constraint const int SuggestionCount = 10; int processCount = 0; Output = Output.AppendLine("Request: Preparing, detecting faces in choosen folder."); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); foreach (var img in Directory.EnumerateFiles(dlg.SelectedPath, "*.jpg", SearchOption.AllDirectories)) { tasks.Add(Task.Factory.StartNew( async (obj) => { var imgPath = obj as string; // Call detection using (var fStream = File.OpenRead(imgPath)) { try { var faces = await faceServiceClient.DetectAsync(fStream); return new Tuple<string, ClientContract.Face[]>(imgPath, faces); } catch (ClientException) { // Here we simply ignore all detection failure in this sample // You may handle these exceptions by check the Error.Code and Error.Message property for ClientException object return new Tuple<string, ClientContract.Face[]>(imgPath, null); } } }, img).Unwrap().ContinueWith((detectTask) => { var res = detectTask.Result; if (res.Item2 == null) { return; } foreach (var f in res.Item2) { // Update detected faces on UI this.Dispatcher.Invoke( new Action<ObservableCollection<Face>, string, ClientContract.Face>(UIHelper.UpdateFace), FacesCollection, res.Item1, f); } })); processCount++; if (processCount >= SuggestionCount && !forceContinue) { var continueProcess = System.Windows.Forms.MessageBox.Show("The images loaded have reached the recommended count, may take long time if proceed. Would you like to continue to load images?", "Warning", MessageBoxButtons.YesNo); if (continueProcess == DialogResult.Yes) { forceContinue = true; } else { break; } } } await Task.WhenAll(tasks); Output = Output.AppendLine(string.Format("Response: Success. Total {0} faces are detected.", FacesCollection.Count)); } }
/// <summary> /// Pick image and call find similar with both two modes for each faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void FindSimilar_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif"; var filePicker = dlg.ShowDialog(); if (filePicker.HasValue && filePicker.Value) { // User picked image // Clear previous detection and find similar results TargetFaces.Clear(); FindSimilarMatchPersonCollection.Clear(); FindSimilarMatchFaceCollection.Clear(); var sw = Stopwatch.StartNew(); var pickedImagePath = dlg.FileName; var renderingImage = UIHelper.LoadImageAppliedOrientation(pickedImagePath); var imageInfo = UIHelper.GetImageInfoForRendering(renderingImage); SelectedFile = renderingImage; // Detect all faces in the picked image using (var fStream = File.OpenRead(pickedImagePath)) { MainWindow.Log("Request: Detecting faces in {0}", SelectedFile); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; string endpoint = mainWindow._scenariosControl.SubscriptionEndpoint; var faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint); var faces = await faceServiceClient.DetectAsync(fStream); // Update detected faces on UI foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, SelectedFile); // Find two modes similar faces for each face foreach (var f in faces) { var faceId = f.FaceId; MainWindow.Log("Request: Finding similar faces in Personal Match Mode for face {0}", faceId); try { // Default mode, call find matchPerson similar REST API, the result contains all the face ids which is personal similar to the query face const int requestCandidatesCount = 4; var result = await faceServiceClient.FindSimilarAsync(faceId, largeFaceListId : this._largeFaceListId, maxNumOfCandidatesReturned : requestCandidatesCount); // Update find matchPerson similar results collection for rendering var personSimilarResult = new FindSimilarResult(); personSimilarResult.Faces = new ObservableCollection <Face>(); personSimilarResult.QueryFace = new Face() { ImageFile = SelectedFile, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; foreach (var fr in result) { var candidateFace = FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString()); Face newFace = new Face(); newFace.ImageFile = candidateFace.ImageFile; newFace.Confidence = fr.Confidence; newFace.FaceId = candidateFace.FaceId; personSimilarResult.Faces.Add(newFace); } MainWindow.Log("Response: Found {0} similar faces for face {1}", personSimilarResult.Faces.Count, faceId); FindSimilarMatchPersonCollection.Add(personSimilarResult); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } try { // Call find facial match similar REST API, the result faces the top N with the highest similar confidence const int requestCandidatesCount = 4; var result = await faceServiceClient.FindSimilarAsync(faceId, largeFaceListId : this._largeFaceListId, mode : FindSimilarMatchMode.matchFace, maxNumOfCandidatesReturned : requestCandidatesCount); // Update "matchFace" similar results collection for rendering var faceSimilarResults = new FindSimilarResult(); faceSimilarResults.Faces = new ObservableCollection <Face>(); faceSimilarResults.QueryFace = new Face() { ImageFile = SelectedFile, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; foreach (var fr in result) { var candidateFace = FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString()); Face newFace = new Face(); newFace.ImageFile = candidateFace.ImageFile; newFace.Confidence = fr.Confidence; newFace.FaceId = candidateFace.FaceId; faceSimilarResults.Faces.Add(newFace); } MainWindow.Log("Response: Found {0} similar faces for face {1}", faceSimilarResults.Faces.Count, faceId); FindSimilarMatchFaceCollection.Add(faceSimilarResults); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } } GC.Collect(); }
public async Task<string[]> UploadAndDetectFaceGender(string imageFilePath, FaceServiceClient faceServiceClient) { try { using (Stream imageFileStream = File.OpenRead(imageFilePath)) { var faces = await faceServiceClient.DetectAsync(imageFileStream, false, true, true, false); var faceGender = faces.Select(face => face.Attributes.Gender); return faceGender.ToArray(); } } catch (Exception) { return new string[1]; } }
private async Task<FaceRectangle[]> UploadAndDetectFaces(Stream photoStream) { FaceServiceClient = new FaceServiceClient(Constants.FaceApiKey); try { using (Stream imageFileStream = photoStream) { var faces = await FaceServiceClient.DetectAsync(imageFileStream); var faceRects = faces.Select(face => face.FaceRectangle); return faceRects.ToArray(); } } catch (Exception ex) { return new FaceRectangle[0]; } }
/// <summary> /// Pick image, detect and identify all faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void Identify_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg) | *.jpg"; var result = dlg.ShowDialog(); if (result.HasValue && result.Value) { // User picked one image // Clear previous detection and identification results TargetFaces.Clear(); SelectedFile = dlg.FileName; var sw = Stopwatch.StartNew(); var imageInfo = UIHelper.GetImageInfoForRendering(dlg.FileName); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); // Call detection REST API using (var fileStream = File.OpenRead(dlg.FileName)) { try { var faces = await faceServiceClient.DetectAsync(fileStream); // Convert detection result into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Request: Identifying {0} face(s) in group \"{1}\"", faces.Length, GroupName); // Identify each face // Call identify REST API, the result contains identified person information var identifyResult = await faceServiceClient.IdentifyAsync(GroupName, faces.Select(ff => ff.FaceId).ToArray()); for (int idx = 0; idx < faces.Length; idx++) { // Update identification result for rendering var face = TargetFaces[idx]; var res = identifyResult[idx]; if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString())) { face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName; } else { face.PersonName = "Unknown"; } } var outString = new StringBuilder(); foreach (var face in TargetFaces) { outString.AppendFormat("Face {0} is identified as {1}. ", face.FaceId, face.PersonName); } MainWindow.Log("Response: Success. {0}", outString); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } }
/// <summary> /// Detect faces from a bitmap, and directly mark information on this bitmao /// </summary> /// <param name="originalBitmap"></param> /// <returns></returns> private static async Task<Bitmap> detectFacesAndMarkThem(Bitmap originalBitmap) { FaceServiceClient client = new FaceServiceClient(FaceAPIKey); MemoryStream stream = new MemoryStream(); originalBitmap.Compress(Bitmap.CompressFormat.Jpeg, imageQuality, stream); Face[] faces = await client.DetectAsync(stream.ToArray()); Bitmap resultBitmap = drawFaceRectanglesOnBitmap(originalBitmap, faces); return resultBitmap; }
public async Task <dynamic> FindSimilar() { string message = string.Empty, fileName = string.Empty, actualFileName = string.Empty; bool flag = false; var faceServiceClient = new FaceServiceClient(ServiceKey); FindSimilarCollection.Clear(); //Requested File Collection HttpFileCollection fileRequested = System.Web.HttpContext.Current.Request.Files; if (fileRequested != null) { for (int i = 0; i < fileRequested.Count; i++) { var file = Request.Files[i]; actualFileName = file.FileName; fileName = Guid.NewGuid() + Path.GetExtension(file.FileName); int size = file.ContentLength; try { file.SaveAs(Path.Combine(Server.MapPath(directory), fileName)); var imgPath = Server.MapPath(directory) + '/' + fileName as string; using (var fStream = System.IO.File.OpenRead(imgPath)) { var faces = await faceServiceClient.DetectAsync(fStream); //Find similar faces for each face foreach (var f in faces) { var faceId = f.FaceId; try { //Call find similar REST API, the result contains all the face ids which similar to the query face const int requestCandidatesCount = 10; var result = await faceServiceClient.FindSimilarAsync(faceId, _faceListName, requestCandidatesCount); var findResult = new vmFindSimilarResult(); findResult.Faces = new ObservableCollection <vmFace>(); findResult.QueryFace = new vmFace() { ImagePath = imgPath, FileName = fileName, FilePath = directory + '/' + fileName, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; //Update find similar results collection for rendering foreach (var fr in result) { findResult.Faces.Add(FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString())); } //Update UI FindSimilarCollection.Add(findResult); message = Convert.ToString("Total " + findResult.Faces.Count() + " faces are detected."); flag = true; } catch (FaceAPIException fex) { message = fex.ErrorMessage; } } } } catch (Exception ex) { ex.ToString(); } } } return(new JsonResult { Data = new { Message = message, SimilarFace = FindSimilarCollection, Status = flag } }); }
/// <summary> /// Pick image for face detection and set detection result to result container /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event argument</param> private async void ImagePicker_Click(object sender, RoutedEventArgs e) { // Show file picker dialog Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg) | *.jpg"; var result = dlg.ShowDialog(); if (result.HasValue && result.Value) { // User picked one image var imageInfo = UIHelper.GetImageInfoForRendering(dlg.FileName); SelectedFile = dlg.FileName; // Clear last detection result ResultCollection.Clear(); DetectedFaces.Clear(); DetectedResultsInText = string.Format("Detecting..."); MainWindow.Log("Request: Detecting {0}", SelectedFile); var sw = Stopwatch.StartNew(); // Call detection REST API using (var fileStream = File.OpenRead(SelectedFile)) { try { MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); Contract.Face[] faces = await faceServiceClient.DetectAsync(fileStream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses }); MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, SelectedFile); DetectedResultsInText = string.Format("{0} face(s) has been detected", faces.Length); foreach (var face in faces) { DetectedFaces.Add(new Face() { ImagePath = SelectedFile, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceId = face.FaceId.ToString(), Gender = face.FaceAttributes.Gender, Age = string.Format("{0:#} years old", face.FaceAttributes.Age), IsSmiling = face.FaceAttributes.Smile > 0.0 ? "Smile" : "Not Smile", Glasses = face.FaceAttributes.Glasses.ToString(), }); } // Convert detection result into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { ResultCollection.Add(face); } } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); return; } } } }
/// <summary> /// Pick image for face detection and set detection result to result container /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event argument</param> private async void ImagePicker_Click(object sender, RoutedEventArgs e) { // Show file picker dialog Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif"; var result = dlg.ShowDialog(); if (result.HasValue && result.Value) { // User picked one image var pickedImagePath = dlg.FileName; var renderingImage = UIHelper.LoadImageAppliedOrientation(pickedImagePath); var imageInfo = UIHelper.GetImageInfoForRendering(renderingImage); SelectedFile = renderingImage; // Clear last detection result ResultCollection.Clear(); DetectedFaces.Clear(); DetectedResultsInText = string.Format("Detecting..."); MainWindow.Log("Request: Detecting {0}", pickedImagePath); var sw = Stopwatch.StartNew(); // Call detection REST API using (var fStream = File.OpenRead(pickedImagePath)) { try { MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; string endpoint = mainWindow._scenariosControl.SubscriptionEndpoint; FaceServiceClient faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint); ProjectOxford.Face.Contract.Face[] faces = await faceServiceClient.DetectAsync(fStream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses, FaceAttributeType.HeadPose, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Occlusion, FaceAttributeType.Accessories, FaceAttributeType.Noise, FaceAttributeType.Exposure, FaceAttributeType.Blur }); MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, pickedImagePath); DetectedResultsInText = string.Format("{0} face(s) has been detected", faces.Length); foreach (var face in faces) { DetectedFaces.Add(new Face() { ImageFile = renderingImage, Left = face.FaceRectangle.Left, Top = face.FaceRectangle.Top, Width = face.FaceRectangle.Width, Height = face.FaceRectangle.Height, FaceId = face.FaceId.ToString(), Age = string.Format("{0:#} years old", face.FaceAttributes.Age), Gender = face.FaceAttributes.Gender, HeadPose = string.Format("Pitch: {0}, Roll: {1}, Yaw: {2}", Math.Round(face.FaceAttributes.HeadPose.Pitch, 2), Math.Round(face.FaceAttributes.HeadPose.Roll, 2), Math.Round(face.FaceAttributes.HeadPose.Yaw, 2)), FacialHair = string.Format("FacialHair: {0}", face.FaceAttributes.FacialHair.Moustache + face.FaceAttributes.FacialHair.Beard + face.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No"), Glasses = string.Format("GlassesType: {0}", face.FaceAttributes.Glasses.ToString()), Emotion = $"{GetEmotion(face.FaceAttributes.Emotion)}", Hair = string.Format("Hair: {0}", GetHair(face.FaceAttributes.Hair)), Makeup = string.Format("Makeup: {0}", ((face.FaceAttributes.Makeup.EyeMakeup || face.FaceAttributes.Makeup.LipMakeup) ? "Yes" : "No")), EyeOcclusion = string.Format("EyeOccluded: {0}", ((face.FaceAttributes.Occlusion.EyeOccluded) ? "Yes" : "No")), ForeheadOcclusion = string.Format("ForeheadOccluded: {0}", (face.FaceAttributes.Occlusion.ForeheadOccluded ? "Yes" : "No")), MouthOcclusion = string.Format("MouthOccluded: {0}", (face.FaceAttributes.Occlusion.MouthOccluded ? "Yes" : "No")), Accessories = $"{GetAccessories(face.FaceAttributes.Accessories)}", Blur = string.Format("Blur: {0}", face.FaceAttributes.Blur.BlurLevel.ToString()), Exposure = string.Format("{0}", face.FaceAttributes.Exposure.ExposureLevel.ToString()), Noise = string.Format("Noise: {0}", face.FaceAttributes.Noise.NoiseLevel.ToString()), Moustache = string.Format("Moustache: {0}", face.FaceAttributes.FacialHair.Moustache.ToString()), Beard = string.Format("Beard: {0}", face.FaceAttributes.FacialHair.Beard.ToString()), }); } // Convert detection result into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { ResultCollection.Add(face); } } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); GC.Collect(); return; } GC.Collect(); } } }
public async Task<string[]> FaceUpload(string DeviceId) { Stream req = null; req = await Request.Content.ReadAsStreamAsync(); byte[] bytes = null; MemoryStream ms = new MemoryStream(); int count = 0; do { byte[] buf = new byte[1024]; count = req.Read(buf, 0, 1024); ms.Write(buf, 0, count); } while (req.CanRead && count > 0); bytes = ms.ToArray(); Stream stream = new MemoryStream(bytes); FaceServiceClient faceclient = new FaceServiceClient(ConfigurationManager.AppSettings["OxfordSubscriptionKeyPrimary"]); Face[] faceresult = null; try { faceresult = await faceclient.DetectAsync(stream, false, false, false, false); } catch (Exception ex) { Debug.WriteLine(ex.Message); } if (faceresult.Length == 0) { return new string[]{"Invalid"}; } Guid[] FaceIdSet = new Guid[faceresult.Length]; for (int i = 0; i < faceresult.Length; i ++) { FaceIdSet[i] = faceresult[i].FaceId; } IdentifyResult[] identityresultnew = await faceclient.IdentifyAsync(ConfigurationManager.AppSettings["MemberGroupId"], FaceIdSet, 1); string IdentifyResultName = null; string[] IdentifyResultJson = new String[identityresultnew.Length]; int StrangerNum = 0; for (int j = 0; j < identityresultnew.Length; j++) { if (identityresultnew[j].Candidates.Length == 0) { IdentifyResultJson[j] = "Stranger"; StrangerNum ++; } else { string candidateid = identityresultnew[j].Candidates[0].PersonId.ToString(); Person candidate = await faceclient.GetPersonAsync(ConfigurationManager.AppSettings["MemberGroupId"], new Guid(candidateid)); IdentifyResultName += candidate.Name + "_"; IdentifyResultJson[j] = candidate.Name; } } DateTime temp = DateTime.Now; string ImageNameDate = temp.Year.ToString() + "Y" + temp.Month.ToString() + "M" + temp.Day.ToString() + "D" + temp.Hour.ToString() + "h" + temp.Minute.ToString() + "m" + temp.Second.ToString() + "s"; string ImagePath = await StorageUpload("visitorcapture", ImageNameDate + "_" + IdentifyResultName + StrangerNum.ToString() + "Strangers", bytes); return IdentifyResultJson; }
private void Button_Click(object sender, RoutedEventArgs e) { var client = new FaceServiceClient("5fe605566efd44d6aacbdb51b7719cb0"); client.DetectAsync("https://oxfordportal.blob.core.windows.net/face/demo/detection%201%20thumbnail.jpg").Wait(); }
public async Task<Guid[]> UploadAndDetectFaceId(string imageFilePath, FaceServiceClient faceServiceClient) { try { using (Stream imageFileStream = File.OpenRead(imageFilePath)) { var faces = await faceServiceClient.DetectAsync(imageFileStream, false, true, true, false); var faceId = faces.Select(face => face.FaceId); return faceId.ToArray(); } } catch (Exception) { return new Guid[1]; } }
/// <summary> /// Pick folder, then group detected faces by similarity /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void Grouping_Click(object sender, RoutedEventArgs e) { // Show folder picker FolderBrowserDialog dlg = new FolderBrowserDialog(); var result = dlg.ShowDialog(); // Set the suggestion count is intent to minimum the data preparetion step only, // it's not corresponding to service side constraint const int SuggestionCount = 10; if (result == DialogResult.OK) { // User picked one folder List<Task> tasks = new List<Task>(); int processCount = 0; bool forceContinue = false; // Clear previous grouping result GroupedFaces.Clear(); Faces.Clear(); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); Output = Output.AppendLine("Request: Preparing faces for grouping, detecting faces in choosen folder."); foreach (var img in Directory.EnumerateFiles(dlg.SelectedPath, "*.jpg", SearchOption.AllDirectories)) { tasks.Add(Task.Factory.StartNew( async (obj) => { var imgPath = obj as string; // Detect faces in image using (var fStream = File.OpenRead(imgPath)) { try { var faces = await faceServiceClient.DetectAsync(fStream); return new Tuple<string, ClientContract.Face[]>(imgPath, faces); } catch (ClientException) { // Here we simply ignore all detection failure in this sample // You may handle these exceptions by check the Error.Code and Error.Message property for ClientException object return new Tuple<string, ClientContract.Face[]>(imgPath, null); } } }, img).Unwrap().ContinueWith((detectTask) => { // Update detected faces on UI var res = detectTask.Result; if (res.Item2 == null) { return; } foreach (var f in res.Item2) { this.Dispatcher.Invoke( new Action<ObservableCollection<Face>, string, ClientContract.Face>(UIHelper.UpdateFace), Faces, res.Item1, f); } })); if (processCount >= SuggestionCount && !forceContinue) { var continueProcess = System.Windows.Forms.MessageBox.Show("Found many images under choosen folder, may take long time if proceed. Continue?", "Warning", MessageBoxButtons.YesNo); if (continueProcess == DialogResult.Yes) { forceContinue = true; } else { break; } } } await Task.WhenAll(tasks); Output = Output.AppendLine(string.Format("Response: Success. Total {0} faces are detected.", Faces.Count)); try { Output = Output.AppendLine(string.Format("Request: Grouping {0} faces.", Faces.Count)); // Call grouping, the grouping result is a group collection, each group contains similar faces var groupRes = await faceServiceClient.GroupAsync(Faces.Select(f => Guid.Parse(f.FaceId)).ToArray()); // Update grouping results for rendering foreach (var g in groupRes.Groups) { var gg = new GroupingResult() { Faces = new ObservableCollection<Face>(), IsMessyGroup = false, }; foreach (var fr in g) { gg.Faces.Add(Faces.First(f => f.FaceId == fr.ToString())); } GroupedFaces.Add(gg); } // MessyGroup contains all faces which are not similar to any other faces. // Take an extreme case for exampe: // On grouping faces which are not similar to any other faces, the grouping result will contains only one messy group if (groupRes.MessyGroup.Length > 0) { var messyGroup = new GroupingResult() { Faces = new ObservableCollection<Face>(), IsMessyGroup = true }; foreach (var messy in groupRes.MessyGroup) { messyGroup.Faces.Add(Faces.First(f => f.FaceId == messy.ToString())); } GroupedFaces.Add(messyGroup); } Output = Output.AppendLine(string.Format("Response: Success. {0} faces are grouped into {1} groups.", Faces.Count, GroupedFaces.Count)); } catch (ClientException ex) { Output = Output.AppendLine(string.Format("Response: {0}. {1}", ex.Error.Code, ex.Error.Message)); } } }
public async Task <string> FindSimilarImages() { //Creation of facelist and sending all the images(the images to which we are going to do match) to the facelist _faceListName = Guid.NewGuid().ToString(); // Generating a unique group-id for the entire images var faceServiceClients = new FaceServiceClient(subscriptionKeyValue); //calling the Face API using subscription key try { await faceServiceClients.CreateFaceListAsync(_faceListName, _faceListName, "face_Images"); //Calling the API service'CreateFaceListAsync' to create a facelist with id/name as _faceListName. } catch (FaceAPIException ex) { Errormsg = ex.ErrorMessage; return(RedirectToAction("Error", "Home", new { Errormsg = Errormsg }).ToString()); } DirectoryInfo DirInfo = new DirectoryInfo(@"C:\Image"); Dictionary <string, string> DictionaryListofPersistanceIDAndImagePath = new Dictionary <string, string>();//Dictionary entry for storing the persistance id returned for each image from the Face API service try { foreach (var file in DirInfo.GetFiles("*.jpg")) { string imgPath = @"C:\Image\" + file.ToString(); FileStream fStream = new FileStream(imgPath, FileMode.Open, FileAccess.Read); var faces = await faceServiceClients.AddFaceToFaceListAsync(_faceListName, fStream); //Adding of each image content to the created facelist in the Face API using the service 'AddFaceToFaceListAsync' DictionaryListofPersistanceIDAndImagePath.Add(faces.PersistedFaceId.ToString(), imgPath); //Storing the PersistedFaceId of the image returned by the Face API service and image path in dictionary } } //End catch (FaceAPIException ex) { ViewData["ExceptionMsg"] = ex.ErrorMessage; } // Sending and matching the captured image with the images contained in the facelist // string CapturedImgName = Server.MapPath("~/Image/CapturedImg.jpg"); string CapturedImgName = Server.MapPath("~/test.jpg"); string[] MatchedImgpath; //int MatchedImgcount = 0; using (var fileStream = System.IO.File.OpenRead(CapturedImgName)) { var faceServiceClient = new FaceServiceClient(subscriptionKeyValue); var faces = await faceServiceClient.DetectAsync(fileStream); //Calling the Face API 'DetectAsync' to detect the captured image by sending the content of the captured image // After call it will return a faceid to the captured image foreach (var f in faces) { var faceId = f.FaceId; //Retrive the face id of the captured image const int requestCandidatesCount = 20; // The number of the more confidece images to be rturned.Most matched image have more confidence value. //confidence value is assigned by the Face API service based on the match. try { var result = await faceServiceClient.FindSimilarAsync(faceId, _faceListName, requestCandidatesCount); // Matching the captured image with images by sending faceId and _faceListName to the Face API 'FindSimilarAsync' //The variable result contains the matched image's PersistedFaceId MatchedImgpath = new string[requestCandidatesCount]; //Declare an array with size 'requestCandidatesCount' to store the matched images path // int MatchedImgcount = 0; foreach (var fr in result) //Loop through the PersistedFaceId of matched faces { if (fr.Confidence >= 0.8) //To check whether the confidence value of the matched image is >=0.8 { if (DictionaryListofPersistanceIDAndImagePath.ContainsKey(fr.PersistedFaceId.ToString())) //To check whether the Persistance id is present in the dictionary //if present retrive the curresponding image-path of that PersistedFaceId. { MatchedImgpath[MatchedImgcount] = DictionaryListofPersistanceIDAndImagePath[fr.PersistedFaceId.ToString()]; //Store the image-path in an array.This array contains all the matched image path which have confidence-value >=0.8 MatchedImgcount = MatchedImgcount + 1; } } } } catch (FaceAPIException ex) { ViewData["ExceptionMsg"] = ex.ErrorMessage; } } } if (MatchedImgcount != 0) { return("found"); } else { return("notfound"); } //End }
async void GetEmotions(object sender, object e) { var ms = new MemoryStream(); // Uri uri = new Uri("ms-appx:///Assets/WIN_20160205_23_45_55_Pro.jpg"); StorageFile file = await ApplicationData.Current.LocalFolder.CreateFileAsync( "TestPhoto.jpg", CreationCollisionOption.GenerateUniqueName); await MC.CapturePhotoToStorageFileAsync(ImageEncodingProperties.CreateJpeg(), file); //.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), ms.AsRandomAccessStream()); ms.Position = 0L; var ms1 = new MemoryStream(); await ms.CopyToAsync(ms1); ms.Position = 0L; var ms2 = new MemoryStream(); var randomAccessStream = await file.OpenReadAsync(); Stream stream = randomAccessStream.AsStreamForRead(); Microsoft.ProjectOxford.Face.Contract.Face[] faces = await faceServiceClient.DetectAsync(stream, false, true, new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.FacialHair, FaceAttributeType.Smile, FaceAttributeType.Glasses }); var randomAccessStream2 = await file.OpenReadAsync(); Stream stream2 = randomAccessStream2.AsStreamForRead(); var Emo = await Oxford.RecognizeAsync(stream2); if (Emo != null && Emo.Length > 0) { var Face = Emo[0]; var s = Face.Scores; if (faces[0].FaceAttributes.Gender.Equals("male")) { faces[0].FaceAttributes.Gender = "мужчина"; } else { faces[0].FaceAttributes.Gender = "женщина"; } Speak(faces); //Wait(); //if (s.Surprise > 0.8) //{ // if (!SentSurprize) // { // ms1.Position = 0L; // var u = await SendPicture(ms1); // await RoverServices.InsertSF(u, s.Surprise); // SentSurprize = true; // } //} var T = new Thickness(); T.Left = Face.FaceRectangle.Left; T.Top = Face.FaceRectangle.Top; MyEmo.Update(Face.Scores); //await RoverServices.Insert(Face.Scores); } }
/// <summary> /// Pick image and call find similar for each faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void FindSimilar_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg) | *.jpg"; var filePicker = dlg.ShowDialog(); if (filePicker.HasValue && filePicker.Value) { // User picked image // Clear previous detection and find similar results TargetFaces.Clear(); FindSimilarCollection.Clear(); var sw = Stopwatch.StartNew(); SelectedFile = dlg.FileName; var imageInfo = UIHelper.GetImageInfoForRendering(SelectedFile); // Detect all faces in the picked image using (var fileStream = File.OpenRead(SelectedFile)) { MainWindow.Log("Request: Detecting faces in {0}", SelectedFile); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); var faces = await faceServiceClient.DetectAsync(fileStream); // Update detected faces on UI foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Response: Success. Detected {0} face(s) in {0}", faces.Length, SelectedFile); // Find similar faces for each face foreach (var f in faces) { var faceId = f.FaceId; MainWindow.Log("Request: Finding similar faces for face {0}", faceId); try { // Call find similar REST API, the result contains all the face ids which similar to the query face const int requestCandidatesCount = 3; var result = await faceServiceClient.FindSimilarAsync(faceId, _faceListName, requestCandidatesCount); // Update find similar results collection for rendering var gg = new FindSimilarResult(); gg.Faces = new ObservableCollection<Face>(); gg.QueryFace = new Face() { ImagePath = SelectedFile, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; foreach (var fr in result) { gg.Faces.Add(FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString())); } MainWindow.Log("Response: Found {0} similar faces for face {1}", gg.Faces.Count, faceId); FindSimilarCollection.Add(gg); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } } }
// method to take a still image, send to APIs, and display result public async void takePhoto_Click(object sender, RoutedEventArgs e) { try { takePhoto.IsEnabled = false; photoFile = await KnownFolders.PicturesLibrary.CreateFileAsync( PHOTO_FILE_NAME, CreationCollisionOption.GenerateUniqueName); ImageEncodingProperties imageProperties = ImageEncodingProperties.CreateJpeg(); await mediaCapture.CapturePhotoToStorageFileAsync(imageProperties, photoFile); takePhoto.IsEnabled = true; statusBox.Text = "Take Photo succeeded: " + photoFile.Path; IRandomAccessStream photoStream = await photoFile.OpenReadAsync(); IRandomAccessStream photoStream2 = await photoFile.OpenReadAsync(); BitmapImage bitmap = new BitmapImage(); bitmap.SetSource(photoStream); await writeableBitmap.SetSourceAsync(photoStream2); // and now for the face API call statusBox.Text = "Uploading image for Face API"; Stream fs1 = await photoFile.OpenStreamForReadAsync(); Stream fs2 = await photoFile.OpenStreamForReadAsync(); var faceClient = new FaceServiceClient("9725d03742394560be3ff295e1e435a2"); var emotionClient = new EmotionServiceClient("c9306a1f134749759f1f4f9ae8838e1a"); faceResult = await faceClient.DetectAsync(fs1); emotionResult = await emotionClient.RecognizeAsync(fs2); numFaces = faceResult.Length; statusBox.Text = "Number of faces detected: " + numFaces.ToString(); currentFace = 0; if (numFaces > 0) // if faces were returned in the result, display the first one { displayFaceInfo(); displayImage(); } } catch (Exception ex) { statusBox.Text = ex.Message; Cleanup(); } finally { takePhoto.IsEnabled = true; } }
/// <summary> /// Pick image for detection, get detection result and put detection results into RightResultCollection /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event argument</param> private async void RightImagePicker_Click(object sender, RoutedEventArgs e) { // Show image picker, show jpg type files only Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg) | *.jpg"; var result = dlg.ShowDialog(); if (result.HasValue && result.Value) { VerifyResult = string.Empty; // User already picked one image var pickedImagePath = dlg.FileName; var imageInfo = UIHelper.GetImageInfoForRendering(pickedImagePath); RightImageDisplay.Source = new BitmapImage(new Uri(pickedImagePath)); // Clear last time detection results RightResultCollection.Clear(); MainWindow.Log("Request: Detecting in {0}", pickedImagePath); var sw = Stopwatch.StartNew(); // Call detection REST API, detect faces inside the image using (var fileStream = File.OpenRead(pickedImagePath)) { try { MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); var faces = await faceServiceClient.DetectAsync(fileStream); // Handle REST API calling error if (faces == null) { return; } MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, pickedImagePath); // Convert detection results into UI binding object for rendering foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { // Detected faces are hosted in result container, will be used in the verification later RightResultCollection.Add(face); } } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); return; } } } }
public async Task <CognitiveResult> AnalyzeAsync(Stream stream, string language, RecognitionType recognitionType = RecognitionType.All, Func <RecognitionPhase, Task> onProgress = null) { var result = new CognitiveResult(); var imageBytes = await stream.ToArrayAsync().ConfigureAwait(false); await RaiseOnProgressAsync(onProgress, RecognitionPhase.QueryingService).ConfigureAwait(false); var visionService = new VisionServiceClient(Settings.VisionSubscriptionKey); AnalysisResult analyzeImageResult = null; if (recognitionType.HasFlag(RecognitionType.Vision)) { var features = new HashSet <VisualFeature> { VisualFeature.Description }; if (recognitionType.HasFlag(RecognitionType.Face) || recognitionType.HasFlag(RecognitionType.Emotion)) { // If recognition types include face or emotions, add also the Faces Visual Feature, so Face and Emotion services are called // only if really needed. features.Add(VisualFeature.Faces); } try { analyzeImageResult = await visionService.AnalyzeImageAsync(stream, features).ConfigureAwait(false); } catch (Microsoft.ProjectOxford.Vision.ClientException ex) { var exception = await CreateExceptionAsync(ex.Error.Code, ex.Error.Message, "Vision", ex.GetHttpStatusCode(), ex, language, onProgress).ConfigureAwait(false); throw exception; } Caption originalDescription; Caption filteredDescription; var visionSettings = VisionSettingsProvider != null ? await VisionSettingsProvider.GetSettingsAsync().ConfigureAwait(false) : null; var isValid = analyzeImageResult.IsValid(out originalDescription, out filteredDescription, visionSettings); var visionResult = result.VisionResult; visionResult.IsValid = isValid; visionResult.RawDescription = originalDescription.Text; visionResult.Confidence = originalDescription.Confidence; if (isValid) { visionResult.Description = filteredDescription.Text; visionResult.TranslatedDescription = await TranslateAsync(filteredDescription.Text, language, onProgress).ConfigureAwait(false); } } if ((recognitionType.HasFlag(RecognitionType.Face) || recognitionType.HasFlag(RecognitionType.Emotion)) && (analyzeImageResult?.Faces.Any() ?? true)) // If Vision service was previously called, checks if any face was detected. { var faceService = new FaceServiceClient(Settings.FaceSubscriptionKey, "https://westus.api.cognitive.microsoft.com/face/v1.0"); var emotionService = new EmotionServiceClient(Settings.EmotionSubscriptionKey); await RaiseOnProgressAsync(onProgress, RecognitionPhase.RecognizingFaces).ConfigureAwait(false); try { stream.Position = 0; var faces = await faceService.DetectAsync(stream, returnFaceAttributes : new[] { FaceAttributeType.Gender, FaceAttributeType.Age /*, FaceAttributeType.Smile, FaceAttributeType.Glasses */ }).ConfigureAwait(false); if (faces.Any()) { if (!faceServiceInitialized) { // If necessary, initializes face service by obtaining the face group used for identification, if any. await InitializeFaceServiceAsync(faceService).ConfigureAwait(false); } // Tries to identify faces in the image. IdentifyResult[] faceIdentificationResult = null; if (!string.IsNullOrWhiteSpace(identifyPersonGroupId)) { var faceIds = faces.Select(face => face.FaceId).ToArray(); faceIdentificationResult = await faceService.IdentifyAsync(identifyPersonGroupId, faceIds).ConfigureAwait(false); } var faceTasks = new List <Task>(); foreach (var face in faces) { // Runs face identification in parallel. var task = Task.Run(async() => { var faceResult = face.GetFaceResult(); var faceRecognitionTask = Task.Run(async() => { // Checks if there is a candidate (i.e. a known person) in the identification result. var candidate = faceIdentificationResult?.FirstOrDefault(r => r.FaceId == face.FaceId)?.Candidates.FirstOrDefault(); if (candidate != null) { // Gets the person name. var person = await faceService.GetPersonAsync(identifyPersonGroupId, candidate.PersonId).ConfigureAwait(false); faceResult.IdentifyConfidence = candidate.Confidence; faceResult.Name = person?.Name; } }); var emotionTask = Task.Run(async() => { if (recognitionType.HasFlag(RecognitionType.Emotion)) { // If required, for each face gets the corresponding emotion. try { using (var ms = new MemoryStream(imageBytes)) { var emotions = await emotionService.RecognizeAsync(ms, face.FaceRectangle.ToRectangle()).ConfigureAwait(false); var bestEmotion = emotions.GetBestEmotion(); faceResult.Emotion = bestEmotion; } } catch (Microsoft.ProjectOxford.Common.ClientException ex) { var exception = await CreateExceptionAsync(ex.Error.Code, ex.Error.Message, "Emotion", ex.HttpStatus, ex, language, onProgress).ConfigureAwait(false); throw exception; } } }); await Task.WhenAll(faceRecognitionTask, emotionTask).ConfigureAwait(false); result.FaceResults.Add(faceResult); }); faceTasks.Add(task); } await Task.WhenAll(faceTasks).ConfigureAwait(false); } } catch (FaceAPIException ex) { var exception = await CreateExceptionAsync(ex.ErrorCode, ex.ErrorMessage, "Face", ex.HttpStatus, ex, language, onProgress).ConfigureAwait(false); throw exception; } } if (recognitionType.HasFlag(RecognitionType.Text)) { await RaiseOnProgressAsync(onProgress, RecognitionPhase.RecognizingText).ConfigureAwait(false); try { using (var ms = new MemoryStream(imageBytes)) { var results = await visionService.RecognizeTextAsync(ms).ConfigureAwait(false); var text = results.GetRecognizedText(); result.OcrResult.Text = text; } } catch (Microsoft.ProjectOxford.Vision.ClientException ex) { var exception = await CreateExceptionAsync(ex.Error.Code, ex.Error.Message, "Vision", ex.GetHttpStatusCode(), ex, language, onProgress).ConfigureAwait(false); throw exception; } } return(result); }