private async Task OnSubmitToOxfordAsync() { try { progress1.Visibility = Visibility.Visible; txtAge.Text = ""; txtConf.Text = ""; txtId.Text = ""; txtGlasses.Text = ""; txtDescription.Text = ""; txtName.Text = ""; txtAlert.Text = ""; var requiredFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.HeadPose, FaceAttributeType.Glasses }; string path = Path.GetTempPath() + "file1.jpg"; MPEAdaptive.MediaPlayer.takeSnapshot(0, path, 1280, 720); using (Stream s = File.OpenRead(@path)) { Face[] faces = await FaceService1.DetectAsync(s, returnFaceLandmarks : true, returnFaceAttributes : requiredFaceAttributes); if (faces.Length > 0) { foreach (var face in faces) { var idGuid = Guid.Parse(face.FaceId.ToString()); //Here we get the range of simility respect our data on the Azure Cloud. var facescomp = await FaceService1.FindSimilarAsync(idGuid, "21122012", 1); var confidence = Double.Parse((facescomp[0].Confidence.ToString(CultureInfo.InvariantCulture))) * 100; var lentes = face.FaceAttributes.Glasses.ToString(); Debug.WriteLine("Conf: " + confidence.ToString()); txtConf.Text = confidence.ToString() + " %"; txtId.Text = "ID: " + idGuid.ToString(); txtGlasses.Text = lentes; Query(facescomp[0].PersistedFaceId.ToString()); } } else { txtId.Text = "No match"; } } progress1.Visibility = Visibility.Collapsed; } catch (Exception ex) { MessageDialog msj = new MessageDialog(ex.Message, "Error"); } }
private static async Task <Guid?> FindSimilarAsync(Guid faceId, Guid faceListId) { var similarFaces = await faceServiceClient.FindSimilarAsync(faceId, faceListId.ToString()); var similarFace = similarFaces.FirstOrDefault(_ => _.Confidence > 0.5); return(similarFace?.PersistedFaceId); }
private static async Task <Guid?> FindSimilarAsync(Guid faceId, Guid faceListId) { faceServiceClient = new FaceServiceClient(Configuration["FaceAPIKey"], "https://eastus.api.cognitive.microsoft.com/face/v1.0"); var similarFaces = await faceServiceClient.FindSimilarAsync(faceId, faceListId.ToString()); var similarFace = similarFaces.FirstOrDefault(_ => _.Confidence > 0.5); return(similarFace?.PersistedFaceId); }
public async Task FindSimilarsAsync() { if (_lastTask != null && !_lastTask.IsCompleted) { return; } while (true) { User user = null; User userRetrived = null; lock (_detectedUsers) { user = _detectedUsers.FirstOrDefault(); } if (user != null) { var containsAnyFaceOnList = UpsertFaceListAndCheckIfContainsFaceAsync().Result; if (containsAnyFaceOnList) { SendLog($"Buscando similaridades do user \"{user.UserId.ToString()}\""); var similarsFaces = await _faceServiceClient.FindSimilarAsync(user.UserId, _faceListId, 20); var referenceFace = similarsFaces.FirstOrDefault(x => x.Confidence >= 0.5); if (referenceFace == null) { SendLog($"Novo User detectado - \"{user.UserId.ToString()}\" "); user.PersistedId = await AddUserToFaceListAsync(user.FaceBase64); } else { userRetrived = await GetUserByPersistedId(referenceFace.PersistedFaceId); SendLog($"Similaridade encontrada de User - \"{user.UserId.ToString()}\" para User já existente {userRetrived.UserId.ToString()} "); user.GuidReference = userRetrived.UserId.ToString(); user.Restrictions = userRetrived.Restrictions; } } else { user.PersistedId = await AddUserToFaceListAsync(user.FaceBase64); } _lastTask = _userRepository.UpdateUserAsync(user); await _lastTask; HandleException(_userService.UserRetrieved(userRetrived)); _detectedUsers.Remove(user); } } }
public async Task <Guid?> FindSimilarAsync(Guid faceId, string faceListId) { try { var similarFaces = await _faceServiceClient.FindSimilarAsync(faceId, faceListId.ToString()); var similarFace = similarFaces.FirstOrDefault(_ => _.Confidence > 0.5); return(similarFace?.PersistedFaceId); } catch (Exception ex) { return(null); } }
/// <summary> /// 根据目标图片faceid比较 /// </summary> /// <param name="targetFaceId"></param> /// <returns></returns> public async Task <double> FindSimilar(Guid targetFaceId) { double confidenceet = 0; if (targetFaceId != Guid.Empty) { var ret = await faceServiceClient.FindSimilarAsync(targetFaceId, FaceListId, 3); if (ret.Any()) { confidenceet = ret.Max(x => x.Confidence); } } return(confidenceet); }
/// <summary> /// Function to find similar faces. Will first detect any faces in the given photo, and use this result for the similar faces search /// </summary> /// <param name="obj"></param> private async void FindSimilarFace(object obj) { if (!_faceListExists || FaceIds.Count == 0) { SimilarResult = "Face list does not exist"; } Guid findFaceGuid = await BrowseAndDetectFaces(); if (findFaceGuid.Equals(Guid.Empty)) { return; } try { SimilarPersistedFace[] similarFaces = await _faceServiceClient.FindSimilarAsync(findFaceGuid, FaceListName.ToLower(), 3); if (similarFaces == null || similarFaces.Length == 0) { SimilarResult = "No faces were similar"; } StringBuilder result = new StringBuilder(); result.Append("Similar faces:\n"); if (similarFaces.Length == 0) { result.Append("No similar faces found"); } foreach (SimilarPersistedFace similarFace in similarFaces) { result.AppendFormat("Face ID: {0}\n", similarFace.PersistedFaceId); result.AppendFormat("Probability: {0}\n\n", similarFace.Confidence); } SimilarResult = result.ToString(); } catch (Exception ex) { Debug.WriteLine(ex.Message); } }
/// <summary> /// Busca si existen rostros actualmente registrados. /// </summary> /// <param name="faceId"></param> /// <param name="facesIds"></param> /// <param name="mode"></param> /// <returns></returns> private async Task <Coincidence> FindSimilarFaceAsync(Guid faceId, Guid[] facesIds, FindSimilarMatchMode mode) { try { SimilarFace[] similarFaces = await _faceClient.FindSimilarAsync(faceId, facesIds, mode); SimilarFace similarFace = similarFaces.ToList().Where(x => x.Confidence > 0.30).FirstOrDefault(); return(new Entities.Coincidence { MatchId = (similarFace != null) ? similarFace.FaceId : Guid.Empty, Confidence = (similarFace != null) ? similarFace.Confidence : 0, NewId = faceId }); } catch (Exception ex) { throw ex; } }
//metodo 2 public async void SimilarFinder(string fileLocation, string subscriptionKey, string name, int edad) { using (var fileStream = File.OpenRead(fileLocation)) { try { var client = new FaceServiceClient(subscriptionKey); var faces = await client.DetectAsync(fileStream, true); Console.WriteLine(" > " + faces.Length + " detected."); Console.WriteLine(" >> IdActual: " + faces[0].FaceId.ToString()); idlist = faces[0].FaceId.ToString(); } catch (Exception exception) { Console.WriteLine(exception.ToString()); } fileStream.Close(); } Guid guidId = Guid.Parse(idlist); Console.WriteLine("Entrando a Find Similar\n" + guidId); Thread.Sleep(4000); //findsimilar using (var fileStream = File.OpenRead(fileLocation)) { try { var client = new FaceServiceClient(subscriptionKey); var faces = await client.FindSimilarAsync(guidId, "21122012", 1); Console.WriteLine(" >> PId: " + faces[0].ToString() + faces[1].ToString()); //Comparer.idActual = faces.PersistedFaceId.ToString(); } catch (Exception exception) { Console.WriteLine(exception.ToString()); } fileStream.Close(); } }
/// <summary> /// Pick image and call find similar for each faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void FindSimilar_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg) | *.jpg"; var filePicker = dlg.ShowDialog(); if (filePicker.HasValue && filePicker.Value) { // User picked image // Clear previous detection and find similar results TargetFaces.Clear(); FindSimilarCollection.Clear(); var sw = Stopwatch.StartNew(); SelectedFile = dlg.FileName; var imageInfo = UIHelper.GetImageInfoForRendering(SelectedFile); // Detect all faces in the picked image using (var fileStream = File.OpenRead(SelectedFile)) { MainWindow.Log("Request: Detecting faces in {0}", SelectedFile); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); var faces = await faceServiceClient.DetectAsync(fileStream); // Update detected faces on UI foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Response: Success. Detected {0} face(s) in {0}", faces.Length, SelectedFile); // Find similar faces for each face foreach (var f in faces) { var faceId = f.FaceId; MainWindow.Log("Request: Finding similar faces for face {0}", faceId); try { // Call find similar REST API, the result contains all the face ids which similar to the query face const int requestCandidatesCount = 3; var result = await faceServiceClient.FindSimilarAsync(faceId, _faceListName, requestCandidatesCount); // Update find similar results collection for rendering var gg = new FindSimilarResult(); gg.Faces = new ObservableCollection <Face>(); gg.QueryFace = new Face() { ImagePath = SelectedFile, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; foreach (var fr in result) { gg.Faces.Add(FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString())); } MainWindow.Log("Response: Found {0} similar faces for face {1}", gg.Faces.Count, faceId); FindSimilarCollection.Add(gg); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } } }
private async Task <FaceDetectionResult> DetectFacesAsync(Stream image) { Result.Text = ""; FaceServiceClient faceService = new FaceServiceClient("2bddec152651472a8cb690e00db31a43"); FaceDetectionResult faceDetectionResult = new FaceDetectionResult(); var requiredFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Glasses }; Face[] faces = await faceService.DetectAsync(image, returnFaceLandmarks : true, returnFaceAttributes : requiredFaceAttributes); if (faces.Length >= 1) { var edad = faces[0].FaceAttributes.Age; var genero = faces[0].FaceAttributes.Gender; int roundedAge = (int)Math.Round(edad); faceDetectionResult.FaceId = faces[0].FaceId.ToString(); faceDetectionResult.Age = faces[0].FaceAttributes.Age; faceDetectionResult.Glasses = faces[0].FaceAttributes.Glasses.ToString(); Debug.WriteLine("ID de rostro: " + faces[0].FaceId); Debug.WriteLine("Edad: " + edad); Debug.WriteLine("Género: " + genero); Debug.WriteLine("Lentes: " + faces[0].FaceAttributes.Glasses); if (faceDetectionResult.Glasses == "NoGlasses") { Guid idGuid = Guid.Parse(faces[0].FaceId.ToString()); SimilarPersistedFace[] facescomp = await faceService.FindSimilarAsync(idGuid, "21122011", 1); double conf = Double.Parse(facescomp[0].Confidence.ToString()); string pid = facescomp[0].PersistedFaceId.ToString(); Debug.WriteLine("conf: " + conf); if (conf >= .67) { Result.Text = "Posible coincidencia"; try { Query(pid); } catch (Exception ex) { Debug.WriteLine(" ex: " + ex); } } else { Result.Text = "No hay coincidencias"; } } else { Result.Text = "Try again without glasses!"; } } else { Debug.WriteLine("No faces detected: {0} ", faces.Length); Result.Text = faces.Length + " faces detected"; } return(faceDetectionResult); }
public async Task <ActionResult> UploadUserPic(HttpPostedFileBase file) { int percent = -1; string persistedFaceId = null; string photoUriStr = null; string errorMsg = null; if (file != null && file.ContentLength > 0) { // Make sure the user selected an image file if (!file.ContentType.StartsWith("image")) { TempData["Message"] = "Only image files may be uploaded"; } else { // Save the original image in the "photos" container CloudStorageAccount account = CloudStorageAccount.Parse(CloudConfigurationManager.GetSetting("StorageConnectionString")); CloudBlobClient client = account.CreateCloudBlobClient(); CloudBlobContainer container = client.GetContainerReference("photos"); CloudBlockBlob photo = container.GetBlockBlobReference(Path.GetFileName(file.FileName)); await photo.UploadFromStreamAsync(file.InputStream); file.InputStream.Seek(0L, SeekOrigin.Begin); // Generate a thumbnail and save it in the "thumbnails" container using (var outputStream = new MemoryStream()) { var settings = new ResizeSettings { MaxWidth = 192, Format = "png" }; ImageBuilder.Current.Build(file.InputStream, outputStream, settings); outputStream.Seek(0L, SeekOrigin.Begin); container = client.GetContainerReference("userthumbnails"); CloudBlockBlob thumbnail = container.GetBlockBlobReference(Path.GetFileName(file.FileName)); await thumbnail.UploadFromStreamAsync(outputStream); } // Submit the image to Azure's Computer Vision API FaceServiceClient faceServiceClient = new FaceServiceClient(CloudConfigurationManager.GetSetting("SubscriptionKey")); photoUriStr = photo.Uri.ToString(); var faces = await faceServiceClient.DetectAsync(photoUriStr); var faceIds = faces.Select(face => face.FaceId).ToArray(); if (faceIds.Count() == 1) { var results = await faceServiceClient.FindSimilarAsync(faceIds[0], "4976", FindSimilarMatchMode.matchFace); percent = (int)(results[0].Confidence * 100); persistedFaceId = results[0].PersistedFaceId.ToString(); errorMsg = null; } else if (faceIds.Count() == 0) { errorMsg = "No face detected in the image!"; } else { errorMsg = "There is more than 1 face in the image or in the specified targetFace area!"; } } } // redirect back to the index action to show the form once again return(RedirectToAction("Index", new { userPhotoURL = photoUriStr, similarPercent = percent, celebrityFaceId = persistedFaceId, error = errorMsg })); }
public async Task <dynamic> FindSimilar() { string message = string.Empty, fileName = string.Empty, actualFileName = string.Empty; bool flag = false; var faceServiceClient = new FaceServiceClient(ServiceKey); FindSimilarCollection.Clear(); //Requested File Collection HttpFileCollection fileRequested = System.Web.HttpContext.Current.Request.Files; if (fileRequested != null) { for (int i = 0; i < fileRequested.Count; i++) { var file = Request.Files[i]; actualFileName = file.FileName; fileName = Guid.NewGuid() + Path.GetExtension(file.FileName); int size = file.ContentLength; try { file.SaveAs(Path.Combine(Server.MapPath(directory), fileName)); var imgPath = Server.MapPath(directory) + '/' + fileName as string; using (var fStream = System.IO.File.OpenRead(imgPath)) { var faces = await faceServiceClient.DetectAsync(fStream); //Find similar faces for each face foreach (var f in faces) { var faceId = f.FaceId; try { //Call find similar REST API, the result contains all the face ids which similar to the query face const int requestCandidatesCount = 10; var result = await faceServiceClient.FindSimilarAsync(faceId, _faceListName, requestCandidatesCount); var findResult = new vmFindSimilarResult(); findResult.Faces = new ObservableCollection <vmFace>(); findResult.QueryFace = new vmFace() { ImagePath = imgPath, FileName = fileName, FilePath = directory + '/' + fileName, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; //Update find similar results collection for rendering foreach (var fr in result) { findResult.Faces.Add(FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString())); } //Update UI FindSimilarCollection.Add(findResult); message = Convert.ToString("Total " + findResult.Faces.Count() + " faces are detected."); flag = true; } catch (FaceAPIException fex) { message = fex.ErrorMessage; } } } } catch (Exception ex) { ex.ToString(); } } } return(new JsonResult { Data = new { Message = message, SimilarFace = FindSimilarCollection, Status = flag } }); }
public async Task FindSimilarFace() { try { var faceIdsToCompare = new List <Guid>(); var faceServiceClient = new FaceServiceClient(FaceApiKey); List <Guid> identifiedPeople = new List <Guid>(); if (Visitors.Count > 0) { foreach (var vis in Visitors) { var cnt = vis.FaceIds.Count > 3 ? 3 : vis.FaceIds.Count; for (int i = 0; i < cnt; i++) { faceIdsToCompare.Add(vis.FaceIds[i]); } } } else { faceIdsToCompare = DetectedFaceIds; } foreach (var currentFaceId in CurrentFaceIds) { var similarFace = await faceServiceClient.FindSimilarAsync(currentFaceId, faceIdsToCompare.ToArray(), 10); if (similarFace.Count() > 0) { foreach (var visitor in Visitors) { if (visitor.FaceIds.AsParallel().Any(x => similarFace.AsParallel().Any(y => y.FaceId == x))) { if (!visitor.FaceIds.Contains(currentFaceId)) { visitor.FaceIds.Add(currentFaceId); visitor.LastSeen = DateTime.Now; } CurrentVisitorId = visitor.Id; } identifiedPeople.AddRange(visitor.FaceIds); } if (!identifiedPeople.AsParallel().Any(x => similarFace.AsParallel().Any(y => y.FaceId == x))) { var cnt = Visitors.Count; var newVisitor = new Visitor { Id = cnt + 1, FaceIds = new List <Guid> { currentFaceId }, LastSeen = DateTime.Now, DwellingTime = new Stopwatch() }; CurrentVisitorId = newVisitor.Id; Visitors.Add(newVisitor); } } else { var cnt = Visitors.Count; var newVisitor = new Visitor { Id = cnt + 1, FaceIds = new List <Guid> { currentFaceId }, LastSeen = DateTime.Now, DwellingTime = new Stopwatch() }; CurrentVisitorId = newVisitor.Id; Visitors.Add(newVisitor); } } } catch (FaceAPIException ex) { System.Diagnostics.Debug.WriteLine(ex.ErrorMessage); } catch (Exception ex) { } }
public async Task <SimilarPersistedFace[]> FindSimilarAsync(Guid faceId, string faceListId, int maxNumOfCandidatesReturned = 1) { return(await RunTaskWithAutoRetryOnQuotaLimitExceededError <SimilarPersistedFace[]>(() => faceClient.FindSimilarAsync(faceId, faceListId, maxNumOfCandidatesReturned))); }
public async Task <string> FindSimilarImages() { //Creation of facelist and sending all the images(the images to which we are going to do match) to the facelist _faceListName = Guid.NewGuid().ToString(); // Generating a unique group-id for the entire images var faceServiceClients = new FaceServiceClient(subscriptionKeyValue); //calling the Face API using subscription key try { await faceServiceClients.CreateFaceListAsync(_faceListName, _faceListName, "face_Images"); //Calling the API service'CreateFaceListAsync' to create a facelist with id/name as _faceListName. } catch (FaceAPIException ex) { Errormsg = ex.ErrorMessage; return(RedirectToAction("Error", "Home", new { Errormsg = Errormsg }).ToString()); } DirectoryInfo DirInfo = new DirectoryInfo(@"C:\Image"); Dictionary <string, string> DictionaryListofPersistanceIDAndImagePath = new Dictionary <string, string>();//Dictionary entry for storing the persistance id returned for each image from the Face API service try { foreach (var file in DirInfo.GetFiles("*.jpg")) { string imgPath = @"C:\Image\" + file.ToString(); FileStream fStream = new FileStream(imgPath, FileMode.Open, FileAccess.Read); var faces = await faceServiceClients.AddFaceToFaceListAsync(_faceListName, fStream); //Adding of each image content to the created facelist in the Face API using the service 'AddFaceToFaceListAsync' DictionaryListofPersistanceIDAndImagePath.Add(faces.PersistedFaceId.ToString(), imgPath); //Storing the PersistedFaceId of the image returned by the Face API service and image path in dictionary } } //End catch (FaceAPIException ex) { ViewData["ExceptionMsg"] = ex.ErrorMessage; } // Sending and matching the captured image with the images contained in the facelist // string CapturedImgName = Server.MapPath("~/Image/CapturedImg.jpg"); string CapturedImgName = Server.MapPath("~/test.jpg"); string[] MatchedImgpath; //int MatchedImgcount = 0; using (var fileStream = System.IO.File.OpenRead(CapturedImgName)) { var faceServiceClient = new FaceServiceClient(subscriptionKeyValue); var faces = await faceServiceClient.DetectAsync(fileStream); //Calling the Face API 'DetectAsync' to detect the captured image by sending the content of the captured image // After call it will return a faceid to the captured image foreach (var f in faces) { var faceId = f.FaceId; //Retrive the face id of the captured image const int requestCandidatesCount = 20; // The number of the more confidece images to be rturned.Most matched image have more confidence value. //confidence value is assigned by the Face API service based on the match. try { var result = await faceServiceClient.FindSimilarAsync(faceId, _faceListName, requestCandidatesCount); // Matching the captured image with images by sending faceId and _faceListName to the Face API 'FindSimilarAsync' //The variable result contains the matched image's PersistedFaceId MatchedImgpath = new string[requestCandidatesCount]; //Declare an array with size 'requestCandidatesCount' to store the matched images path // int MatchedImgcount = 0; foreach (var fr in result) //Loop through the PersistedFaceId of matched faces { if (fr.Confidence >= 0.8) //To check whether the confidence value of the matched image is >=0.8 { if (DictionaryListofPersistanceIDAndImagePath.ContainsKey(fr.PersistedFaceId.ToString())) //To check whether the Persistance id is present in the dictionary //if present retrive the curresponding image-path of that PersistedFaceId. { MatchedImgpath[MatchedImgcount] = DictionaryListofPersistanceIDAndImagePath[fr.PersistedFaceId.ToString()]; //Store the image-path in an array.This array contains all the matched image path which have confidence-value >=0.8 MatchedImgcount = MatchedImgcount + 1; } } } } catch (FaceAPIException ex) { ViewData["ExceptionMsg"] = ex.ErrorMessage; } } } if (MatchedImgcount != 0) { return("found"); } else { return("notfound"); } //End }
async void ProcessAll() { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { stackpanelAlert.Visibility = Visibility.Collapsed; stackpanel.Visibility = Visibility.Collapsed; imgCaution.Visibility = Visibility.Collapsed; imgGlasses.Visibility = Visibility.Collapsed; imgClean.Visibility = Visibility.Collapsed; imgNoFaces.Visibility = Visibility.Collapsed; stackpanelNames.Visibility = Visibility.Collapsed; stackpanelInternet.Visibility = Visibility.Collapsed; }); if (CheckNetwork == true) { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { ScanModeTxt.Text = "SCAN MODE: ENABLED"; MyMap.Width = 100; MyMap.Height = 100; MyMap.Margin = new Thickness(5, -300, 0, 0); }); try { copiedVideoFrameComplete = new TaskCompletionSource <SoftwareBitmap>(); var bgra16CopiedFrame = await copiedVideoFrameComplete.Task; copiedVideoFrameComplete = null; InMemoryRandomAccessStream destStream = new InMemoryRandomAccessStream(); BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, destStream); encoder.SetSoftwareBitmap(bgra16CopiedFrame); await encoder.FlushAsync(); FaceServiceClient faceService = new FaceServiceClient(OxfordApiKey); FaceServiceClient faceService1 = new FaceServiceClient(OxfordApiKey); var requiredFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Glasses }; Face[] faces = await faceService.DetectAsync(destStream.AsStream(), returnFaceLandmarks : true, returnFaceAttributes : requiredFaceAttributes); try { if (faces.Length >= 1) { Debug.WriteLine("ID de rostro: " + faces[0].FaceId); Guid idGuid = Guid.Parse(faces[0].FaceId.ToString()); SimilarPersistedFace[] facescomp = await faceService1.FindSimilarAsync(idGuid, "21122012", 1); double confidence = Double.Parse(facescomp[0].Confidence.ToString()); string persistentID = facescomp[0].PersistedFaceId.ToString(); Debug.WriteLine("PID: " + facescomp[0].PersistedFaceId); Debug.WriteLine("conf: " + facescomp[0].Confidence); string lentes = faces[0].FaceAttributes.Glasses.ToString(); try { if (lentes == "NoGlasses") { try { if (confidence >= .67) { await StartTracking(); await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { stackpanel.Visibility = Visibility.Visible; stackpanelNames.Visibility = Visibility.Visible; stackpanelAlert.Width = 496; stackpanelAlert.Visibility = Visibility.Visible; stackpanelAlert.Background = new SolidColorBrush(Colors.Red); imgCaution.Visibility = Visibility.Visible; imgGlasses.Visibility = Visibility.Collapsed; imgClean.Visibility = Visibility.Collapsed; imgNoFaces.Visibility = Visibility.Collapsed; Debug.WriteLine("Usuario encontrado"); Query(facescomp[0].PersistedFaceId.ToString()); }); sinth.StartSpeaking(media, "Nombre:, , , , ," + (list_Name.SelectedItems[0]) + "Edad:, , , , ," + (list_Name.SelectedItems[0]) + "Descripcion:, , , , ," + (list_Name.SelectedItems[0])); Debug.WriteLine((list_Name.SelectedItems[0]) + "\n" + (list_Name.SelectedItems[0]) + "\n" + (list_Name.SelectedItems[0])); //Video Stream //await StartListener(); //await BeginRecording(); //Mapping(); } else { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { stackpanelNames.Visibility = Visibility.Collapsed; stackpanelAlert.Width = 550; stackpanelAlert.Visibility = Visibility.Visible; stackpanelAlert.Background = new SolidColorBrush(Colors.Green); imgCaution.Visibility = Visibility.Collapsed; imgClean.Visibility = Visibility.Visible; imgGlasses.Visibility = Visibility.Collapsed; Debug.WriteLine("Usuario no identificado"); sinth.StartSpeaking(media, "Usuario no identificado"); stackpanel.Visibility = Visibility.Collapsed; imgNoFaces.Visibility = Visibility.Collapsed; }); } } catch (Exception) { } } else { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { sinth.StartSpeaking(media, "No se puede realizar el proceso con lentes"); stackpanelNames.Visibility = Visibility.Collapsed; stackpanelAlert.Width = 616; stackpanelAlert.Visibility = Visibility.Visible; imgCaution.Visibility = Visibility.Collapsed; imgClean.Visibility = Visibility.Collapsed; imgGlasses.Visibility = Visibility.Visible; stackpanelAlert.Background = new SolidColorBrush(Colors.LightYellow); stackpanel.Visibility = Visibility.Collapsed; imgNoFaces.Visibility = Visibility.Collapsed; }); } } catch (Exception e) { } } } catch (Exception eex) { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { stackpanelNames.Visibility = Visibility.Collapsed; stackpanelAlert.Width = 550; stackpanelAlert.Visibility = Visibility.Visible; stackpanelAlert.Background = new SolidColorBrush(Colors.Green); imgCaution.Visibility = Visibility.Collapsed; imgClean.Visibility = Visibility.Visible; imgGlasses.Visibility = Visibility.Collapsed; Debug.WriteLine("Usuario no identificado"); sinth.StartSpeaking(media, "Usuario no identificado"); stackpanel.Visibility = Visibility.Collapsed; imgNoFaces.Visibility = Visibility.Collapsed; }); } } catch (Exception ex) { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { stackpanelNames.Visibility = Visibility.Collapsed; stackpanelAlert.Width = 550; stackpanelAlert.Visibility = Visibility.Visible; stackpanelAlert.Background = new SolidColorBrush(Colors.Green); imgCaution.Visibility = Visibility.Collapsed; imgClean.Visibility = Visibility.Visible; imgGlasses.Visibility = Visibility.Collapsed; Debug.WriteLine("Usuario no identificado"); sinth.StartSpeaking(media, "Usuario no identificado"); stackpanel.Visibility = Visibility.Collapsed; imgNoFaces.Visibility = Visibility.Collapsed; }); } } else { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { stackpanel.Visibility = Visibility.Collapsed; imgCaution.Visibility = Visibility.Collapsed; imgGlasses.Visibility = Visibility.Collapsed; imgClean.Visibility = Visibility.Collapsed; imgNoFaces.Visibility = Visibility.Collapsed; stackpanelNames.Visibility = Visibility.Collapsed; stackpanelInternet.Visibility = Visibility.Visible; imgConnectivity.Visibility = Visibility.Visible; Debug.WriteLine("No hay internet"); }); } await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { ScanModeTxt.Text = "SCAN MODE: DISABLED"; }); }
/// <summary> /// Pick image and call find similar with both two modes for each faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void FindSimilar_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif"; var filePicker = dlg.ShowDialog(); if (filePicker.HasValue && filePicker.Value) { // User picked image // Clear previous detection and find similar results TargetFaces.Clear(); FindSimilarMatchPersonCollection.Clear(); FindSimilarMatchFaceCollection.Clear(); var sw = Stopwatch.StartNew(); var pickedImagePath = dlg.FileName; var renderingImage = UIHelper.LoadImageAppliedOrientation(pickedImagePath); var imageInfo = UIHelper.GetImageInfoForRendering(renderingImage); SelectedFile = renderingImage; // Detect all faces in the picked image using (var fStream = File.OpenRead(pickedImagePath)) { MainWindow.Log("Request: Detecting faces in {0}", SelectedFile); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); var faces = await faceServiceClient.DetectAsync(fStream); // Update detected faces on UI foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, SelectedFile); // Find two modes similar faces for each face foreach (var f in faces) { var faceId = f.FaceId; MainWindow.Log("Request: Finding similar faces in Personal Match Mode for face {0}", faceId); try { // Default mode, call find matchPerson similar REST API, the result contains all the face ids which is personal similar to the query face const int requestCandidatesCount = 4; var result = await faceServiceClient.FindSimilarAsync(faceId, _faceListName, requestCandidatesCount); // Update find matchPerson similar results collection for rendering var personSimilarResult = new FindSimilarResult(); personSimilarResult.Faces = new ObservableCollection <Face>(); personSimilarResult.QueryFace = new Face() { ImageFile = SelectedFile, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; foreach (var fr in result) { var candidateFace = FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString()); Face newFace = new Face(); newFace.ImageFile = candidateFace.ImageFile; newFace.Confidence = fr.Confidence; newFace.FaceId = candidateFace.FaceId; personSimilarResult.Faces.Add(newFace); } MainWindow.Log("Response: Found {0} similar faces for face {1}", personSimilarResult.Faces.Count, faceId); FindSimilarMatchPersonCollection.Add(personSimilarResult); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } try { // Call find facial match similar REST API, the result faces the top N with the highest similar confidence const int requestCandidatesCount = 4; var result = await faceServiceClient.FindSimilarAsync(faceId, _faceListName, FindSimilarMatchMode.matchFace, requestCandidatesCount); // Update "matchFace" similar results collection for rendering var faceSimilarResults = new FindSimilarResult(); faceSimilarResults.Faces = new ObservableCollection <Face>(); faceSimilarResults.QueryFace = new Face() { ImageFile = SelectedFile, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; foreach (var fr in result) { var candidateFace = FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString()); Face newFace = new Face(); newFace.ImageFile = candidateFace.ImageFile; newFace.Confidence = fr.Confidence; newFace.FaceId = candidateFace.FaceId; faceSimilarResults.Faces.Add(newFace); } MainWindow.Log("Response: Found {0} similar faces for face {1}", faceSimilarResults.Faces.Count, faceId); FindSimilarMatchFaceCollection.Add(faceSimilarResults); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } } GC.Collect(); }
private async void FaceBox_Click(object sender, RoutedEventArgs e) { var faceButton = (Button)sender; var faceServiceClient = new FaceServiceClient(_subscriptionKey); var faceLists = await faceServiceClient.ListFaceListsAsync(); var faceList = faceLists.First(fl => fl.Name.StartsWith(_faceListNameRoot)); var fl2 = await faceServiceClient.GetFaceListAsync(faceList.FaceListId); var similarFaces = await faceServiceClient.FindSimilarAsync(new Guid(faceButton.Tag.ToString()), faceList.FaceListId); SimilarFaces.ItemsSource = from pf in fl2.PersistedFaces join sf in similarFaces on pf.PersistedFaceId equals sf.PersistedFaceId orderby sf.Confidence descending select new SimilarFaceResult(pf.UserData) { Confidence = (int)(sf.Confidence * 100) }; }
/// <summary> /// Pick image and call find similar for each faces detected /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void FindSimilar_Click(object sender, RoutedEventArgs e) { // Show file picker Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); dlg.DefaultExt = ".jpg"; dlg.Filter = "Image files(*.jpg) | *.jpg"; var filePicker = dlg.ShowDialog(); if (filePicker.HasValue && filePicker.Value) { // User picked image // Clear previous detection and find similar results TargetFaces.Clear(); FindSimilarCollection.Clear(); var sw = Stopwatch.StartNew(); SelectedFile = dlg.FileName; var imageInfo = UIHelper.GetImageInfoForRendering(SelectedFile); // Detect all faces in the picked image using (var fileStream = File.OpenRead(SelectedFile)) { MainWindow.Log("Request: Detecting faces in {0}", SelectedFile); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; var faceServiceClient = new FaceServiceClient(subscriptionKey); var faces = await faceServiceClient.DetectAsync(fileStream); // Update detected faces on UI foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Response: Success. Detected {0} face(s) in {0}", faces.Length, SelectedFile); // Find similar faces for each face foreach (var f in faces) { var faceId = f.FaceId; MainWindow.Log("Request: Finding similar faces for face {0}", faceId); try { // Call find similar REST API, the result contains all the face ids which similar to the query face const int requestCandidatesCount = 3; var result = await faceServiceClient.FindSimilarAsync(faceId, _faceListName, requestCandidatesCount); // Update find similar results collection for rendering var gg = new FindSimilarResult(); gg.Faces = new ObservableCollection<Face>(); gg.QueryFace = new Face() { ImagePath = SelectedFile, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; foreach (var fr in result) { gg.Faces.Add(FacesCollection.First(ff => ff.FaceId == fr.PersistedFaceId.ToString())); } MainWindow.Log("Response: Found {0} similar faces for face {1}", gg.Faces.Count, faceId); FindSimilarCollection.Add(gg); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } } }
/// <summary> /// open camera dialog /// </summary> /// <param name="sender">Event sender</param> /// <param name="e">Event arguments</param> private async void OpenCamera_Click(object sender, RoutedEventArgs e) { //OpenFaceButton.IsEnabled = false; CameraOpen camera = new CameraOpen(); camera.ShowDialog(); //Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); //dlg.DefaultExt = ".jpg"; //dlg.Filter = "Image files (*.jpg, *.png, *.bmp, *.gif) | *.jpg; *.png; *.bmp; *.gif"; //var filePicker = dlg.ShowDialog(); //if (filePicker.HasValue && filePicker.Value) //{ // User picked image // Clear previous detection and find similar results TargetFaces.Clear(); FindSimilarMatchPersonCollection.Clear(); FindSimilarMatchFaceCollection.Clear(); var sw = Stopwatch.StartNew(); var pickedImagePath = @"D:\3.jpg"; //dlg.FileName; var renderingImage = UIHelper.LoadImageAppliedOrientation(pickedImagePath); var imageInfo = UIHelper.GetImageInfoForRendering(renderingImage); SelectedFile = renderingImage; // Detect all faces in the picked image using (var fStream = File.OpenRead(pickedImagePath)) { MainWindow.Log("Request: Detecting faces in {0}", SelectedFile); MainWindow mainWindow = Window.GetWindow(this) as MainWindow; string subscriptionKey = mainWindow._scenariosControl.SubscriptionKey; string endpoint = mainWindow._scenariosControl.SubscriptionEndpoint; var faceServiceClient = new FaceServiceClient(subscriptionKey, endpoint); var faces = await faceServiceClient.DetectAsync(fStream); // Update detected faces on UI foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo)) { TargetFaces.Add(face); } MainWindow.Log("Response: Success. Detected {0} face(s) in {1}", faces.Length, SelectedFile); // Find two modes similar faces for each face foreach (var f in faces) { var faceId = f.FaceId; MainWindow.Log("Request: Finding similar faces in Personal Match Mode for face {0}", faceId); try { // Default mode, call find matchPerson similar REST API, the result contains all the face ids which is personal similar to the query face const int requestCandidatesCount = 4; var result = await faceServiceClient.FindSimilarAsync(faceId, faceid_list, requestCandidatesCount); //faceServiceClient.F // Update find matchPerson similar results collection for rendering var personSimilarResult = new FindSimilarResult(); personSimilarResult.Faces = new ObservableCollection <Face>(); personSimilarResult.QueryFace = new Face() { ImageFile = SelectedFile, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; foreach (var fr in result) { var candidateFace = FacesCollection.First(ff => ff.FaceId == fr.FaceId.ToString()); Face newFace = new Face(); newFace.ImageFile = candidateFace.ImageFile; newFace.Confidence = fr.Confidence; newFace.FaceId = candidateFace.FaceId; personSimilarResult.Faces.Add(newFace); } MainWindow.Log("Response: Found {0} similar faces for face {1}", personSimilarResult.Faces.Count, faceId); FindSimilarMatchPersonCollection.Add(personSimilarResult); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } try { // Call find facial match similar REST API, the result faces the top N with the highest similar confidence const int requestCandidatesCount = 4; var result = await faceServiceClient.FindSimilarAsync(faceId, faceid_list, FindSimilarMatchMode.matchFace, requestCandidatesCount); // Update "matchFace" similar results collection for rendering var faceSimilarResults = new FindSimilarResult(); faceSimilarResults.Faces = new ObservableCollection <Face>(); faceSimilarResults.QueryFace = new Face() { ImageFile = SelectedFile, Top = f.FaceRectangle.Top, Left = f.FaceRectangle.Left, Width = f.FaceRectangle.Width, Height = f.FaceRectangle.Height, FaceId = faceId.ToString(), }; foreach (var fr in result) { var candidateFace = FacesCollection.First(ff => ff.FaceId == fr.FaceId.ToString()); Face newFace = new Face(); newFace.ImageFile = candidateFace.ImageFile; //Bitmap imag = new Bitmap(); //(candidateFace.ImageFile); //g2. // MainWindow.Log("Response: Found {0} similar faces for face {1}", , faceId); newFace.Confidence = fr.Confidence; newFace.Top = candidateFace.Top; newFace.Left = candidateFace.Left; newFace.Width = candidateFace.Width; newFace.Height = candidateFace.Height; newFace.FaceId = fr.FaceId.ToString();//candidateFace.FaceId; faceSimilarResults.Faces.Add(newFace); } var candidate1 = FacesCollection.First(ff => ff.FaceId == result[0].FaceId.ToString()); Bitmap graph = new Bitmap(UIHelper.ImageSourceToBitmap(candidate1.ImageFile).Width, UIHelper.ImageSourceToBitmap(candidate1.ImageFile).Height); Graphics g2 = Graphics.FromImage(graph); g2.DrawImage(UIHelper.ImageSourceToBitmap(candidate1.ImageFile), 0, 0); // Rectangle zuibiao = new Rectangle(f.FaceRectangle.Left, f.FaceRectangle.Top, f.FaceRectangle.Width, f.FaceRectangle.Height); Rectangle zuibiao = new Rectangle(candidate1.Left, candidate1.Top, candidate1.Width, candidate1.Height); //g2.DrawImageUnscaled(UIHelper.ImageSourceToBitmap(candidateFace.ImageFile),0,0); g2.DrawImage(UIHelper.ImageSourceToBitmap(SelectedFile), zuibiao, f.FaceRectangle.Left, f.FaceRectangle.Top, f.FaceRectangle.Width, f.FaceRectangle.Height, GraphicsUnit.Pixel); System.Drawing.Image saveImage = System.Drawing.Image.FromHbitmap(graph.GetHbitmap()); saveImage.Save(@"E:\hackathon\ls\cognitive-Face-Windows\data1\image1.jpg", ImageFormat.Jpeg); Bitmap graph1 = new Bitmap(UIHelper.ImageSourceToBitmap(candidate1.ImageFile).Width, UIHelper.ImageSourceToBitmap(candidate1.ImageFile).Height); Graphics g3 = Graphics.FromImage(graph1); g3.DrawImage(UIHelper.ImageSourceToBitmap(candidate1.ImageFile), 0, 0); System.Drawing.Image saveImage1 = System.Drawing.Image.FromHbitmap(graph1.GetHbitmap()); saveImage1.Save(@"E:\hackathon\ls\cognitive-Face-Windows\image1.jpg", ImageFormat.Jpeg); MainWindow.Log("Response: Found {0} similar faces for face {1}", faceSimilarResults.Faces.Count, faceId); MergeImage1 = getMergedPicture(@"D:\3.jpg", @"E:\hackathon\ls\cognitive-Face-Windows\image1.jpg"); //MergeImage1 = getMergedPicture("D:\\3.jpg", "D:\\1.jpg"); FindSimilarMatchFaceCollection.Add(faceSimilarResults); /* MediaPlayer player = new MediaPlayer(); * player.Open(new Uri(media_name[candidate1.FaceId].Substring(0, media_name[candidate1.FaceId].Length - 4) + ".WAV", UriKind.Relative)); * player.Play();*/ Thread.Sleep(4000); } catch (FaceAPIException ex) { MainWindow.Log("Response: {0}. {1}", ex.ErrorCode, ex.ErrorMessage); } } } //} //GC.Collect(); // OpenFaceButton.IsEnabled = false; GC.Collect(); }
/// <summary> /// Find similar face in a reference set /// </summary> /// <param name="subscriptionId">Cognitive Services Subscription ID</param> /// <param name="referenceFaces">List of faces to compare against</param> /// <param name="lookupFaceUrl">URL of lookup face to find</param> /// <returns>New collection of comparison faces with confidence ratings</returns> public async Task <List <ReferenceFace> > DetectFacesAsync(string subscriptionId, List <ReferenceFace> referenceFaces, string lookupFaceUrl) { var resultFaces = new List <ReferenceFace>(); FaceIds = new List <Guid>(); var faceServiceClient = new FaceServiceClient(subscriptionId); var faceCount = 0; try { foreach (var refface in referenceFaces) { // Find faces and get face ids in reference images var faces = await faceServiceClient.DetectAsync(refface.ImageUrl); foreach (var face in faces) { refface.FaceId = face.FaceId; FaceIds.Add(face.FaceId); faceCount++; } } // Find faces and get face IDs in lookup face var lookupFace = await faceServiceClient.DetectAsync(lookupFaceUrl); // Get Guid's for all faces that were found in reference set var faceIdGuids = new Guid[faceCount]; var count = 0; foreach (var faceid in FaceIds) { faceIdGuids[count] = faceid; count++; } // Get the confidence ranking for all faces var results = await faceServiceClient.FindSimilarAsync(lookupFace[0].FaceId, faceIdGuids); if (results == null) { throw new ArgumentNullException(nameof(results)); } // Add the confidence ranking back into the reference set foreach (var refface in referenceFaces) { var returnFace = new ReferenceFace { FaceId = refface.FaceId, ImageUrl = refface.ImageUrl, Confidence = refface.Confidence }; foreach (var result in results) { if (result.FaceId == refface.FaceId) { returnFace.Confidence = result.Confidence; break; } else { returnFace.Confidence = 0.00; } } resultFaces.Add(returnFace); } } catch { //todo: fancy exception handling } return(resultFaces); }
async Task <AiResult> MakeRequest(string imageToCheck) { AiResult res = new AiResult(); // imageToCheck = "https://www.liberationnews.org/wp-content/uploads/2015/07/donaldtrump61815.jpg"; EmotionServiceClient emotionServiceClient = new EmotionServiceClient(emotionKey); Emotion[] imageEmotion = await emotionServiceClient.RecognizeAsync(imageToCheck); Console.WriteLine("Feeling: " + imageEmotion[0].Scores.ToRankedList().First().Key); Console.WriteLine("Top score: " + imageEmotion[0].Scores.ToRankedList().First().Value); res.Emotion = string.Format("Unknwn ({0:P2})", 0); float bestScore = 0; foreach (var em in imageEmotion[0].Scores.ToRankedList()) { if (em.Value > bestScore) { bestScore = em.Value; res.Emotion = res.Emotion = string.Format("{0} ({1:P2})", em.Key, em.Value); } } FaceServiceClient faceServiceClient = new FaceServiceClient(faceKey); FaceList trumpList = null; try { trumpList = await faceServiceClient.GetFaceListAsync(faceList); } catch (FaceAPIException apiExp) { if (apiExp.ErrorCode == "FaceListNotFound") { await faceServiceClient.CreateFaceListAsync(faceList, faceList, "A collection of trumps"); trumpList = await faceServiceClient.GetFaceListAsync(faceList); } else { throw apiExp; } } if (trumpList.PersistedFaces.Count() < 5) { await faceServiceClient.AddFaceToFaceListAsync(faceList, "https://www.liberationnews.org/wp-content/uploads/2015/07/donaldtrump61815.jpg"); await faceServiceClient.AddFaceToFaceListAsync(faceList, "http://thefederalist.com/wp-content/uploads/2016/02/trumpie.jpg"); await faceServiceClient.AddFaceToFaceListAsync(faceList, "http://www.redstate.com/uploads/2016/02/donald-trump-is-still-soaring-in-iowa-but-there-are-now-some-clear-warning-signs.jpg"); await faceServiceClient.AddFaceToFaceListAsync(faceList, "http://i.huffpost.com/gen/3706868/images/o-DONALD-TRUMP-FUNNY-facebook.jpg"); await faceServiceClient.AddFaceToFaceListAsync(faceList, "http://media.salon.com/2015/04/donald_trump_thumbsup.jpg"); trumpList = await faceServiceClient.GetFaceListAsync(faceList); } Face[] faceToCompare = await faceServiceClient.DetectAsync(imageToCheck); SimilarPersistedFace[] faces = await faceServiceClient.FindSimilarAsync(faceToCompare[0].FaceId, faceList, FindSimilarMatchMode.matchFace); res.TrumpMatch = String.Format("{0:P2}", 0); if (faces.Count() == 0) { Console.WriteLine("Sorry, nothing compares to you"); } else { double totalConfidence = 0; foreach (SimilarPersistedFace matching in faces) { totalConfidence += matching.Confidence; } double averageConfidence = totalConfidence / faces.Count(); res.TrumpMatch = String.Format("{0:P2}", averageConfidence); Console.WriteLine("Trump comparison: " + res.TrumpMatch); } return(res); }