public async Task <IList <DetectedFace> > DetectFaces(string imageBase64) { IList <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair }; try { var bytes = Convert.FromBase64String(imageBase64.Substring(imageBase64.LastIndexOf(',') + 1)); using (var stream = new MemoryStream(bytes)) { IList <DetectedFace> faceList = await FaceClient.Face.DetectWithStreamAsync( stream, true, true, faceAttributes); return(faceList); } } catch (Exception ex) { Console.WriteLine(ex.Message); return(new List <DetectedFace>()); } }
//Incarca imaginea si apeleaza Detect Faces private async Task <Face[]> UploadAndDetectFaces(string imageFilePath) { //Lista atributelor fetei ce se returneaza IEnumerable <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair }; //Apeleaza Face APIC try { using (Stream imageFileStream = File.OpenRead(imageFilePath)) { Face[] faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes); return(faces); } } //API error handling catch (FaceAPIException f) { MessageBox.Show(f.ErrorMessage, f.ErrorCode); return(new Face[0]); } //Error handling general catch (Exception e) { MessageBox.Show(e.Message, "Error"); return(new Face[0]); } }
private async void imgPhoto_ImageOpened(object sender, RoutedEventArgs e) { size_image = new Size((imgPhoto.Source as BitmapImage).PixelWidth, (imgPhoto.Source as BitmapImage).PixelHeight); var f_client = new FaceClient( new ApiKeyServiceClientCredentials(key_face), new System.Net.Http.DelegatingHandler[] { }); // need to provide and endpoint and a delegate. key_face, face_apiroot); f_client.Endpoint = face_apiroot; var requiedFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.HeadPose, FaceAttributeType.Emotion, FaceAttributeType.Glasses }; var faces_task = await f_client.Face.DetectWithUrlAsync(txtLocation.Text, true, true, requiedFaceAttributes); faces = faces_task.ToArray(); if (faces != null) { DisplayFacesData(faces); DisplayEmotionsData(faces); } ringLoading.IsActive = false; }
private async Task <Face[]> UploadAndDetectFaces(string imageFilePath) { // The list of Face attributes to return. IEnumerable <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair }; // Call the Face API. try { using (Stream imageFileStream = File.OpenRead(imageFilePath)) { Face[] faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes); return(faces); } } // Catch and display Face API errors. catch (FaceAPIException f) { MessageBox.Show(f.ErrorMessage, f.ErrorCode); return(new Face[0]); } // Catch and display all other errors. catch (Exception e) { MessageBox.Show(e.Message, "Error"); return(new Face[0]); } }
public async Task <Respuesta> SetFoto([FromBody] FotoRequest value) // public async Task<IHttpActionResult> { if (value == null) { return(new Respuesta { mensaje = "Error no se obtuvo datos", validacion = false }); } ; IEnumerable <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair }; // Call the Face API. using (Stream imageFileStream = new MemoryStream(value.Array)) { faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes); } return(new Respuesta { mensaje = faces[0].FaceAttributes.Age.ToString(), validacion = true }); }
private async Task <IList <DetectedFace> > UploadAndDetectFaces(string imageFilePath) { IList <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair }; try { using (Stream imageFileStream = File.OpenRead(imageFilePath)) { IList <DetectedFace> faceList = await faceClient.Face.DetectWithStreamAsync(imageFileStream, true, false, faceAttributes); return(faceList); } } catch (APIErrorException f) { MessageBox.Show(f.Message); return(new List <DetectedFace>()); } catch (Exception e) { MessageBox.Show(e.Message, "Error"); return(new List <DetectedFace>()); } }
// </snippet_browsebuttonclick_end> // <snippet_mousemove_start> // Displays the face description when the mouse is over a face rectangle. private async Task <IList <DetectedFace> > UploadAndDetectFaces(string imageFilePath) { // The list of Face attributes to return. IList <FaceAttributeType> faceAttributes = new FaceAttributeType[] { //FaceAttributeType.Gender, FaceAttributeType.Age, //FaceAttributeType.Smile, FaceAttributeType.Emotion, //FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.HeadPose }; // Call the Face API. try { using (Stream imageFileStream = File.OpenRead(imageFilePath)) { // The second argument specifies to return the faceId, while // the third argument specifies not to return face landmarks. IList <DetectedFace> faceList = await faceClient.Face.DetectWithStreamAsync( imageFileStream, true, false, faceAttributes); return(faceList); } } // Catch and display Face API errors. catch (APIErrorException f) { return(new List <DetectedFace>()); } // Catch and display all other errors. catch (Exception e) { return(new List <DetectedFace>()); } }
// Uploads the image file and calls Detect Faces. private async Task <Face[]> UploadAndDetectFaces(string filename) { // The list of Face attributes to return. var faceAttributes = new FaceAttributeType[] { }; // Call the Face API. try { var file = Path.Combine(_environment.WebRootPath, "uploads", filename); var fileData = System.IO.File.ReadAllBytes(file); using (Stream ms = new MemoryStream(fileData)) { return(await faceServiceClient.DetectAsync(ms, returnFaceId : true)); } } // Catch and display Face API errors. catch (FaceAPIException f) { errors.Add($"{filename}, {f.ErrorCode}: {f.ErrorMessage}"); } // Catch and display all other errors. catch (Exception e) { errors.Add($"{filename}, Error: {e.Message}"); } return(null); }
private async Task <Demographics> GetPhotoDemographics(string sasUri) { Demographics d = null; //TODO 6. Invoke Face API with URI to photo IFaceServiceClient faceServiceClient = new FaceServiceClient(_faceApiKey, _faceEndpoint); //TODO 7. Configure the desired attributes Age and Gender IEnumerable <FaceAttributeType> desiredAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender }; //TODO 8. Invoke the Face API Detect operation Face[] faces = await faceServiceClient.DetectAsync(sasUri, false, true, desiredAttributes); if (faces.Length > 0) { //TODO 9. Extract the age and gender from the Face API response double computedAge = faces[0].FaceAttributes.Age; string computedGender = faces[0].FaceAttributes.Gender; d = new Demographics() { age = faces[0].FaceAttributes.Age, gender = faces[0].FaceAttributes.Gender }; } return(d); }
private async Task <Face[]> UploadAndDetectFaces(string imageFilePath) { try { var faceServiceClient = new FaceServiceClient("cc6905d4d8414b5ebe8341f17d776e94"); var requiredFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.HeadPose, FaceAttributeType.Glasses }; var faces = await faceServiceClient.DetectAsync( imageFilePath, returnFaceLandmarks : true, returnFaceAttributes : requiredFaceAttributes); return(faces); } catch (Exception) { return(new Face[0]); } }
public async Task <Face> GetFaceAttributes(string url) { var requiredFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.FacialHair, FaceAttributeType.Hair, }; var token = await new MicrosoftAppCredentials(BotServiceUser, BotServicePassword).GetTokenAsync(); HttpClient httpClient = new HttpClient(); httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", token); var memoryStream = await httpClient.GetStreamAsync(url); var face = await faceClient.DetectAsync(memoryStream, true, false, requiredFaceAttributes); if (face != null && face.Length > 0) { return(face[0]); } else { return(null); } }
public static async Task <int> DetectOverSdk() { var attributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses, FaceAttributeType.HeadPose, FaceAttributeType.FacialHair, FaceAttributeType.Emotion, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Occlusion, FaceAttributeType.Accessories, FaceAttributeType.Noise, FaceAttributeType.Exposure, FaceAttributeType.Blur }; var results = await s_sdkService.DetectAsync(s_image, true, false, attributes); var result = results[0]; if (result.FaceAttributes.Age < 30 || result.FaceAttributes.Age > 100) { throw new Exception(); } return(result.GetHashCode()); }
private async Task <String> ExtractFace(Stream image) { String responseString = string.Empty; using (var client = new FaceClient(new ApiKeyServiceClientCredentials(ApiKey)) { Endpoint = Endpoint }) { var attributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Glasses }; var detectedFaces = await client.Face.DetectWithStreamAsync(image, returnFaceAttributes : attributes); if (detectedFaces?.Count == 0) { responseString = ">No faces detected from image."; } else { foreach (var face in detectedFaces) { var rect = face.FaceRectangle; responseString = $">Rectangle: {rect.Left} {rect.Top} {rect.Width} {rect.Height}\n"; responseString += $">Gender: {face.FaceAttributes.Gender}\n"; responseString += $">Age: {face.FaceAttributes.Age}\n"; responseString += $">Smile: {face.FaceAttributes.Smile}\n"; responseString += $">Glasses: {face.FaceAttributes.Glasses}\n"; } } } return(responseString); }
public async Task <Emotion> UploadFaceAndGetEmotions(StorageFile file) { var faceAttributesToAnalyze = new FaceAttributeType[] { FaceAttributeType.Emotion }; // Call the Face API try { using (var stream = await file.OpenStreamForReadAsync()) { // The second argument specifies not return the faceId, while // the third argument specifies not to return face landmarks. IList <DetectedFace> faceList = await _client.Face.DetectWithStreamAsync(stream, true, false, faceAttributesToAnalyze); return(faceList.FirstOrDefault().FaceAttributes.Emotion);; } } // Catch and display Face API errors. catch (APIErrorException f) { Debug.WriteLine(f.Message); } // Catch and display all other errors. catch (Exception e) { Debug.WriteLine(e.Message); } return(null); }
//// Uploads the image file and calls Detect Faces. public async Task <Face[]> UploadAndDetectFaces(Bitmap image) { // The list of Face attributes to return. IEnumerable <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.FacialHair }; // Call the Face API. try { using (Stream imageFileStream = new MemoryStream()) { image.Save(imageFileStream, System.Drawing.Imaging.ImageFormat.Bmp); imageFileStream.Seek(0, SeekOrigin.Begin); Face[] faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes); return(faces); } } // Catch and display Face API errors. catch (FaceAPIException f) { MessageBox.Show(f.ErrorMessage, f.ErrorCode); return(new Face[0]); } // Catch and display all other errors. catch (Exception e) { MessageBox.Show(e.Message, "Error"); return(new Face[0]); } }
private async Task <Face[]> DetectFacesFrom(HttpPostedFileBase file) { // The list of Face attributes to return. IEnumerable <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.FacialHair }; // Call the Face API. try { Log("Detecting Faces"); Face[] faces = await _faceServiceClient.DetectAsync(file.InputStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes); Log("Faces length: " + faces.Length); return(faces); } // Catch and display Face API errors. catch (FaceAPIException f) { //MessageBox.Show(f.ErrorMessage, f.ErrorCode); return(new Face[0]); } // Catch and display all other errors. catch (Exception e) { //MessageBox.Show(e.Message, "Error"); return(new Face[0]); } }
// carica l'immagine e chiama Detect Faces private async Task <Face[]> UploadAndDetectFaces(string imageFilePath) { // l'elenco degli attributi del viso da ritornare IEnumerable <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair }; // chiama le Face API try { using (Stream imageFileStream = File.OpenRead(imageFilePath)) { Face[] faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes); return(faces); } } // intercetta e visualizza gli errori delle Face API catch (FaceAPIException f) { MessageBox.Show(f.ErrorMessage, f.ErrorCode); return(new Face[0]); } // intercetta e visualizza tutti gli altri errori catch (Exception e) { MessageBox.Show(e.Message, "Errore!"); return(new Face[0]); } }
static async void FaceTest() { FaceServiceClient faceServiceClient = new FaceServiceClient(subscriptionKey.ToString(), "https://northeurope.api.cognitive.microsoft.com/face/v1.0/"); string imageUrl = "https://scontent-lht6-1.xx.fbcdn.net/v/t31.0-8/23215886_10154914800207233_6176823990811634886_o.jpg?oh=76552a0d5e6afe0bebf4ec8b09f69a4f&oe=5AA49AE7"; var requiredFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.HeadPose, FaceAttributeType.Glasses, FaceAttributeType.Emotion }; var faces = await faceServiceClient.DetectAsync(imageUrl, returnFaceLandmarks : true, returnFaceAttributes : requiredFaceAttributes); foreach (var face in faces) { var id = face.FaceId; var attributes = face.FaceAttributes; var age = attributes.Age; var gender = attributes.Gender; var smile = attributes.Smile; var facialHair = attributes.FacialHair; var headPose = attributes.HeadPose; var glasses = attributes.Glasses; } }
public static async Task <FaceDetectResult> DetectFaceAsync(string photoPath) { // Face API 呼び出し準備 var apiKey = "YOUR_API_KEY"; var apiEndpoint = "https://YOUR_API_LOCATION.api.cognitive.microsoft.com/face/v1.0"; var client = new FaceServiceClient(apiKey, apiEndpoint); // Face API で判定 var file = new FileFromPath(photoPath); var imageStream = file.Open(FileAccess.Read); var attributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, }; var result = await client.DetectAsync(imageStream, false, false, attributes); // 判定結果を代入 var detectResult = new FaceDetectResult(); detectResult.Age = result[0].FaceAttributes.Age; detectResult.Gender = result[0].FaceAttributes.Gender; detectResult.Smile = result[0].FaceAttributes.Smile; return(detectResult); }
private async Task <bool> BuscarCara(MemoryStream streamFace) { string subscriptionKey = "a6fa05b6601b4ea398aa2039d601d983"; string subscriptionEndpoint = "https://southcentralus.api.cognitive.microsoft.com/face/v1.0"; var faceServiceClient = new FaceServiceClient(subscriptionKey, subscriptionEndpoint); // { IEnumerable <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair }; try { var faces = await faceServiceClient.DetectAsync(streamFace, true, true); string edad = string.Empty; string genero = string.Empty; return(true); } catch (Exception ex) { var error = ex.Message.ToString(); throw; } }
public async Task <EmoPicture> DetectAndExtracFacesAsync(Stream vImageString) { //Emotion[] vEmotion = await emoClient.RecognizeAsync(vImageString); //var vPicture = new EmoPicture(); //vPicture.Faces = ExtractFaces(vEmotion, vPicture); //return vPicture; var emoPicture = new EmoPicture(); try { IEnumerable <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Emotion }; Face[] faces = await emoClientFace.DetectAsync(vImageString, false, false, faceAttributes); emoPicture.Faces = ExtractFaces(faces, emoPicture); //Face } catch (Exception exc) { throw; } return(emoPicture); }
public static async Task <string> UploadAndDetectFaces(string imageFilePath) { try { var requiredFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Emotion }; using (WebClient webClient = new WebClient()) { using (Stream imageFileStream = webClient.OpenRead(imageFilePath)) { var faces = await faceServiceClient.DetectAsync(imageFileStream, returnFaceLandmarks : true, returnFaceAttributes : requiredFaceAttributes); var faceAttributes = faces.Select(face => face.FaceAttributes); string result = string.Empty; faceAttributes.ToList().ForEach(f => result += $"Age: {f.Age.ToString()} Years Gender: {f.Gender} Emotion: {f.Emotion.ToString()}{Environment.NewLine}{Environment.NewLine}" ); return(result); } } } catch (Exception ex) { return(string.Empty); } }
public static async Task <Emocion> ObtenerEmocion(Stream foto) { Emocion emocion = null; try { if (foto != null) { var clienteFace = new FaceServiceClient(Constantes.FaceApiKey, Constantes.FaceApiURL); var atributosFace = new FaceAttributeType[] { FaceAttributeType.Emotion, FaceAttributeType.Age }; using (var stream = foto) { Face[] rostros = await clienteFace.DetectAsync(stream, false, false, atributosFace); if (rostros.Any()) { var analisisEmocion = rostros.FirstOrDefault().FaceAttributes.Emotion.ToRankedList().FirstOrDefault(); emocion = new Emocion() { Nombre = analisisEmocion.Key, Score = analisisEmocion.Value }; } foto.Dispose(); } } } catch (Exception ex) { } return(emocion); }
public async Task <List <Face> > DetectFaces(string photoUrl) { IEnumerable <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age }; List <Face> faces = new List <Face>(); try { Thread.Sleep(3500); var detectionResult = await faceServiceClient.DetectAsync(photoUrl, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes); faces.AddRange(detectionResult.ToList()); return(faces); } catch (FaceAPIException f) { return(faces); } // Catch and display all other errors. catch (Exception e) { return(faces); } }
public bool MakeRequestLocalFile(String file) { var requiedFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, //FaceAttributeType.Smile, //FaceAttributeType.FacialHair, //FaceAttributeType.HeadPose }; try { Thread.Sleep(3 * 1000); using (Stream s = File.OpenRead(file)) { var faces = new List <Face>(faceServiceClient.DetectAsync(s, true, false, requiedFaceAttributes).Result); return(faces.Any(face => face.FaceAttributes.Gender == "female" && face.FaceAttributes.Age < 30)); } } catch { return(false); } }
public async Task <Face[]> SimpleDetectFace(string imageFilePath) { // The list of Face attributes to return. IEnumerable <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age, FaceAttributeType.Smile, FaceAttributeType.Emotion, FaceAttributeType.Glasses, FaceAttributeType.Hair }; // Call the Face API. try { using (Stream imageFileStream = File.OpenRead(imageFilePath)) { Face[] faces = await FaceServiceHelper.DetectAsync(imageFileStream, returnFaceId : true, returnFaceLandmarks : false, returnFaceAttributes : faceAttributes); return(faces); } } // Catch and display Face API errors. catch (FaceAPIException f) { return(new Face[0]); } // Catch and display all other errors. catch (Exception e) { return(new Face[0]); } }
public static async Task <Face> DetectarRostro(MediaFile foto) { Face rostro = null; try { if (foto != null) { var clienteFace = new FaceServiceClient(Constantes.FaceApiKey, Constantes.FaceApiURL); var atributosFace = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.HeadPose, FaceAttributeType.Emotion }; using (var stream = foto.GetStream()) { Face[] rostros = await clienteFace.DetectAsync(stream, false, true, atributosFace); rostro = rostros.FirstOrDefault(); } } } catch (Exception ex) { } return(rostro); }
public static async Task <Face> DetectarRostro(byte[] foto) { FaceClient.Shared.Endpoint = Constantes.FaceApiURL; FaceClient.Shared.SubscriptionKey = Constantes.FaceApiKey; try { if (foto != null) { var atributosFace = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.HeadPose, FaceAttributeType.Emotion }; using (var stream = new MemoryStream(foto)) { var rostros = await FaceClient.Shared.DetectFacesInPhoto(stream, true, atributosFace); if (rostros.Count > 0) { return(rostros.FirstOrDefault()); } } } } catch (Exception ex) { } return(null); }
private async void imgPhoto_ImageOpened(object sender, RoutedEventArgs e) { size_image = new Size((imgPhoto.Source as BitmapImage).PixelWidth, (imgPhoto.Source as BitmapImage).PixelHeight); FaceServiceClient f_client = new FaceServiceClient(key_face); var requiedFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.HeadPose, FaceAttributeType.Emotion, FaceAttributeType.Glasses }; var faces_task = f_client.DetectAsync(txtLocation.Text, true, true, requiedFaceAttributes); faces = await faces_task; if (faces != null) { DisplayFacesData(faces); DisplayEmotionsData(faces); } ringLoading.IsActive = false; }
// Uploads the image file and calls DetectWithStreamAsync. private async Task <IList <DetectedFace> > UploadAndDetectFaces(string imageFilePath, CancellationToken token) { // The list of Face attributes to return. IList <FaceAttributeType> faceAttributes = new FaceAttributeType[] { FaceAttributeType.Gender, FaceAttributeType.Age }; // Call the Face API. try { using (Stream imageFileStream = File.OpenRead(imageFilePath)) { IList <DetectedFace> faceList = await faceClient.Face.DetectWithStreamAsync( imageFileStream, true, false, faceAttributes, token); return(faceList); } } // Catch and display Face API errors. catch (APIErrorException f) { MessageBox.Show(f.Message); return(new List <DetectedFace>()); } // Catch and display all other errors. catch (Exception) { //MessageBox.Show(e.Message, "Error"); return(new List <DetectedFace>()); } }
public async Task<IEnumerable<ProfileResult>> GetProfileFromImageAsync(Stream imageStream) { FaceAttributeType[] attributes = new FaceAttributeType[2]; attributes[0] = FaceAttributeType.Age; attributes[1] = FaceAttributeType.Gender; var profile = await _faceServiceClient.DetectAsync(imageStream, true, false, attributes ); return ConvertProfileToProfileResults(profile); }
async Task AddFaceBasedTagsToPhotoAsync(PhotoResult photoResult) { // See comment at bottom of file. if (!string.IsNullOrEmpty(cognitiveServiceFaceServiceKey)) { FaceServiceClient client = new FaceServiceClient( cognitiveServiceFaceServiceKey); using (var stream = await photoResult.PhotoFile.OpenStreamForReadAsync()) { var attributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.FacialHair, FaceAttributeType.Gender, FaceAttributeType.Glasses, FaceAttributeType.Smile }; var results = await client.DetectAsync(stream, true, false, attributes); var firstFace = results?.FirstOrDefault(); if (firstFace != null) { var automaticTags = new List<string>(); automaticTags.Add($"age {firstFace.FaceAttributes.Age}"); automaticTags.Add(firstFace.FaceAttributes.Gender.ToString()); automaticTags.Add(firstFace.FaceAttributes.Glasses.ToString()); Action<double, string> compareFunc = (double value, string name) => { if (value > 0.5) automaticTags.Add(name); }; compareFunc(firstFace.FaceAttributes.Smile, "smile"); compareFunc(firstFace.FaceAttributes.FacialHair.Beard, "beard"); compareFunc(firstFace.FaceAttributes.FacialHair.Moustache, "moustache"); compareFunc(firstFace.FaceAttributes.FacialHair.Sideburns, "sideburns"); await this.photoControl.AddTagsToPhotoAsync( photoResult.PhotoId, automaticTags); } } } }