/// <summary> /// Gets the hair. /// </summary> /// <param name="hair">The hair.</param> /// <returns></returns> private string GetHair(Hair hair) { if (hair.HairColor.Length == 0) { if (hair.Invisible) { return("Invisible"); } else { return("Bald"); } } else { HairColorType returnColor = HairColorType.Unknown; double maxConfidence = 0.0f; for (int i = 0; i < hair.HairColor.Length; ++i) { if (hair.HairColor[i].Confidence > maxConfidence) { maxConfidence = hair.HairColor[i].Confidence; returnColor = hair.HairColor[i].Color; } } return(returnColor.ToString()); } }
private static string GetHair(Hair hair) { if (hair.HairColor.Count == 0) { return(hair.Invisible ? "Invisible" : "Bald"); } HairColorType returnColor = HairColorType.Unknown; double maxConfidence = 0.0f; foreach (HairColor hairColor in hair.HairColor) { if (hairColor.Confidence <= maxConfidence) { continue; } maxConfidence = hairColor.Confidence; returnColor = hairColor.Color; } return(returnColor.ToString()); }
/* * Detect faces from URL images */ public static async Task DetectFaceExtract(IFaceClient client, string recognitionModel) { Console.WriteLine("========Sample of face detection========"); // Create a list of images const string IMAGE_BASE_URL = "https://csdx.blob.core.windows.net/resources/Face/Images/"; List <string> imageFileNames = new List <string> { "detection1.jpg", // single female with glasses // "detection2.jpg", // (optional: single man) // "detection3.jpg", // (optional: single male construction worker) // "detection4.jpg", // (optional: 3 people at cafe, 1 is blurred) "detection5.jpg", // black family, woman child man "detection6.jpg" // elderly couple, male female }; foreach (var imageFileName in imageFileNames) { IList <DetectedFace> detectedFaces; // Detect faces with all attributes from image url. detectedFaces = await client.Face.DetectWithUrlAsync($"{IMAGE_BASE_URL}{imageFileName}", returnFaceAttributes : new List <FaceAttributeType> { FaceAttributeType.Accessories, FaceAttributeType.Age, FaceAttributeType.Blur, FaceAttributeType.Emotion, FaceAttributeType.Exposure, FaceAttributeType.FacialHair, FaceAttributeType.Gender, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.HeadPose, FaceAttributeType.Makeup, FaceAttributeType.Noise, FaceAttributeType.Occlusion, FaceAttributeType.Smile }, recognitionModel : recognitionModel); Console.WriteLine($"{detectedFaces.Count} face(s) detected from image `{imageFileName}`."); // Parse and print all attributes of each detected face. foreach (var face in detectedFaces) { Console.WriteLine($"Face attributes for {imageFileName}:"); // Get bounding box of the faces Console.WriteLine($"Rectangle(Left/Top/Width/Height) : {face.FaceRectangle.Left} {face.FaceRectangle.Top} {face.FaceRectangle.Width} {face.FaceRectangle.Height}"); // Get accessories of the faces List <Accessory> accessoriesList = (List <Accessory>)face.FaceAttributes.Accessories; int count = face.FaceAttributes.Accessories.Count; string accessory; string[] accessoryArray = new string[count]; if (count == 0) { accessory = "NoAccessories"; } else { for (int i = 0; i < count; ++i) { accessoryArray[i] = accessoriesList[i].Type.ToString(); } accessory = string.Join(",", accessoryArray); } Console.WriteLine($"Accessories : {accessory}"); // Get face other attributes Console.WriteLine($"Age : {face.FaceAttributes.Age}"); Console.WriteLine($"Blur : {face.FaceAttributes.Blur.BlurLevel}"); // Get emotion on the face string emotionType = string.Empty; double emotionValue = 0.0; Emotion emotion = face.FaceAttributes.Emotion; if (emotion.Anger > emotionValue) { emotionValue = emotion.Anger; emotionType = "Anger"; } if (emotion.Contempt > emotionValue) { emotionValue = emotion.Contempt; emotionType = "Contempt"; } if (emotion.Disgust > emotionValue) { emotionValue = emotion.Disgust; emotionType = "Disgust"; } if (emotion.Fear > emotionValue) { emotionValue = emotion.Fear; emotionType = "Fear"; } if (emotion.Happiness > emotionValue) { emotionValue = emotion.Happiness; emotionType = "Happiness"; } if (emotion.Neutral > emotionValue) { emotionValue = emotion.Neutral; emotionType = "Neutral"; } if (emotion.Sadness > emotionValue) { emotionValue = emotion.Sadness; emotionType = "Sadness"; } if (emotion.Surprise > emotionValue) { emotionType = "Surprise"; } Console.WriteLine($"Emotion : {emotionType}"); // Get more face attributes Console.WriteLine($"Exposure : {face.FaceAttributes.Exposure.ExposureLevel}"); Console.WriteLine($"FacialHair : {string.Format("{0}", face.FaceAttributes.FacialHair.Moustache + face.FaceAttributes.FacialHair.Beard + face.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No")}"); Console.WriteLine($"Gender : {face.FaceAttributes.Gender}"); Console.WriteLine($"Glasses : {face.FaceAttributes.Glasses}"); // Get hair color Hair hair = face.FaceAttributes.Hair; string color = null; if (hair.HairColor.Count == 0) { if (hair.Invisible) { color = "Invisible"; } else { color = "Bald"; } } HairColorType returnColor = HairColorType.Unknown; double maxConfidence = 0.0f; foreach (HairColor hairColor in hair.HairColor) { if (hairColor.Confidence <= maxConfidence) { continue; } maxConfidence = hairColor.Confidence; returnColor = hairColor.Color; color = returnColor.ToString(); } Console.WriteLine($"Hair : {color}"); // Get more attributes Console.WriteLine($"HeadPose : {string.Format("Pitch: {0}, Roll: {1}, Yaw: {2}", Math.Round(face.FaceAttributes.HeadPose.Pitch, 2), Math.Round(face.FaceAttributes.HeadPose.Roll, 2), Math.Round(face.FaceAttributes.HeadPose.Yaw, 2))}"); Console.WriteLine($"Makeup : {string.Format("{0}", (face.FaceAttributes.Makeup.EyeMakeup || face.FaceAttributes.Makeup.LipMakeup) ? "Yes" : "No")}"); Console.WriteLine($"Noise : {face.FaceAttributes.Noise.NoiseLevel}"); Console.WriteLine($"Occlusion : {string.Format("EyeOccluded: {0}", face.FaceAttributes.Occlusion.EyeOccluded ? "Yes" : "No")} " + $" {string.Format("ForeheadOccluded: {0}", face.FaceAttributes.Occlusion.ForeheadOccluded ? "Yes" : "No")} {string.Format("MouthOccluded: {0}", face.FaceAttributes.Occlusion.MouthOccluded ? "Yes" : "No")}"); Console.WriteLine($"Smile : {face.FaceAttributes.Smile}"); Console.WriteLine(); } } }
public static async Task DetectFaceExtract() { Console.WriteLine("========DETECT FACES========"); Console.WriteLine(); var client = AuthenticateService.AuthenticateFaceClient(); // Create a list of images string[] filePaths = Directory.GetFiles(@"D:\Work\Mimobod\dir_001", "*", SearchOption.AllDirectories); foreach (var filePath in filePaths) { try { byte[] byteData = ParserService.GetImageAsByteArray(filePath); using (var stream = new MemoryStream(byteData)) { IList <DetectedFace> detectedFaces; // Detect faces with all attributes from image url. detectedFaces = await client.Face.DetectWithStreamAsync(stream, returnFaceAttributes : new List <FaceAttributeType> { FaceAttributeType.Accessories, FaceAttributeType.Age, FaceAttributeType.Emotion, FaceAttributeType.FacialHair, FaceAttributeType.Gender, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.Makeup, FaceAttributeType.Smile }); Console.WriteLine($"{detectedFaces.Count} face(s) detected from image `{filePath}`."); if (detectedFaces.Count > 0) { var imageIndsex = DbService.SaveImage(byteData); // Parse and print all attributes of each detected face. var faceNumber = 1; foreach (var face in detectedFaces) { // Get accessories of the faces List <Accessory> accessoriesList = (List <Accessory>)face.FaceAttributes.Accessories; int count = face.FaceAttributes.Accessories.Count; string accessory; string[] accessoryArray = new string[count]; if (count == 0) { accessory = "NoAccessories"; } else { for (int i = 0; i < count; ++i) { accessoryArray[i] = accessoriesList[i].Type.ToString(); } accessory = string.Join(",", accessoryArray); } // Get emotion on the face string emotionType = string.Empty; double emotionValue = 0.0; Emotion emotion = face.FaceAttributes.Emotion; if (emotion.Anger > emotionValue) { emotionValue = emotion.Anger; emotionType = "Anger"; } if (emotion.Contempt > emotionValue) { emotionValue = emotion.Contempt; emotionType = "Contempt"; } if (emotion.Disgust > emotionValue) { emotionValue = emotion.Disgust; emotionType = "Disgust"; } if (emotion.Fear > emotionValue) { emotionValue = emotion.Fear; emotionType = "Fear"; } if (emotion.Happiness > emotionValue) { emotionValue = emotion.Happiness; emotionType = "Happiness"; } if (emotion.Neutral > emotionValue) { emotionValue = emotion.Neutral; emotionType = "Neutral"; } if (emotion.Sadness > emotionValue) { emotionValue = emotion.Sadness; emotionType = "Sadness"; } if (emotion.Surprise > emotionValue) { emotionType = "Surprise"; } // Get more face attributes var facialHair = string.Format("{0}", face.FaceAttributes.FacialHair.Moustache + face.FaceAttributes.FacialHair.Beard + face.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No"); // Get hair color Hair hair = face.FaceAttributes.Hair; string color = null; if (hair.HairColor.Count == 0) { if (hair.Invisible) { color = "Invisible"; } else { color = "Bald"; } } HairColorType returnColor = HairColorType.Unknown; double maxConfidence = 0.0f; foreach (HairColor hairColor in hair.HairColor) { if (hairColor.Confidence <= maxConfidence) { continue; } maxConfidence = hairColor.Confidence; returnColor = hairColor.Color; color = returnColor.ToString(); } DbService.SaveToDatabase( imageIndsex, accessory, face.FaceAttributes.Age, emotionType, facialHair, (int)face.FaceAttributes.Gender, (int)face.FaceAttributes.Glasses, color, face.FaceAttributes.Smile, faceNumber); faceNumber++; } } } } catch (Exception) { Console.WriteLine("----------------------------------------------------------"); } } }
//authenticate the key //detection model one public static async Task DetectFaceExtract(IFaceClient client, string url, string recognitionModel) { Console.WriteLine("======== DETECT FACE ========\n======== Detection Mode 1 ======== "); Console.WriteLine(); IList <DetectedFace> detectedFaces; detectedFaces = await client.Face.DetectWithUrlAsync(url, returnFaceAttributes : new List <FaceAttributeType?> { FaceAttributeType.Accessories, FaceAttributeType.Age, FaceAttributeType.Blur, FaceAttributeType.Emotion, FaceAttributeType.Exposure, FaceAttributeType.FacialHair, FaceAttributeType.Gender, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.HeadPose, FaceAttributeType.Makeup, FaceAttributeType.Noise, FaceAttributeType.Occlusion, FaceAttributeType.Smile }, // We specify detection model 1 because we are retrieving attributes. detectionModel : DetectionModel.Detection01, recognitionModel : recognitionModel); Console.WriteLine($"{detectedFaces.Count} face detected from the link."); foreach (var face in detectedFaces) { Console.WriteLine($"Face attributes for the link:"); // Get bounding box of the faces Console.WriteLine($"Rectangle(Left/Top/Width/Height) : {face.FaceRectangle.Left} {face.FaceRectangle.Top} {face.FaceRectangle.Width} {face.FaceRectangle.Height}"); // Get accessories of the faces List <Accessory> accessoriesList = (List <Accessory>)face.FaceAttributes.Accessories; int count = face.FaceAttributes.Accessories.Count; string accessory; string[] accessoryArray = new string[count]; if (count == 0) { accessory = "NoAccessories"; } else { for (int i = 0; i < count; ++i) { accessoryArray[i] = accessoriesList[i].Type.ToString(); } accessory = string.Join(",", accessoryArray); } Console.WriteLine($"Accessories : {accessory}"); // Get face other attributes Console.WriteLine($"Age : {face.FaceAttributes.Age}"); Console.WriteLine($"Blur : {face.FaceAttributes.Blur.BlurLevel}"); // Get emotion on the face string emotionType = string.Empty; double emotionValue = 0.0; Emotion emotion = face.FaceAttributes.Emotion; if (emotion.Anger > emotionValue) { emotionValue = emotion.Anger; emotionType = "Anger"; } if (emotion.Contempt > emotionValue) { emotionValue = emotion.Contempt; emotionType = "Contempt"; } if (emotion.Disgust > emotionValue) { emotionValue = emotion.Disgust; emotionType = "Disgust"; } if (emotion.Fear > emotionValue) { emotionValue = emotion.Fear; emotionType = "Fear"; } if (emotion.Happiness > emotionValue) { emotionValue = emotion.Happiness; emotionType = "Happiness"; } if (emotion.Neutral > emotionValue) { emotionValue = emotion.Neutral; emotionType = "Neutral"; } if (emotion.Sadness > emotionValue) { emotionValue = emotion.Sadness; emotionType = "Sadness"; } if (emotion.Surprise > emotionValue) { emotionType = "Surprise"; } Console.WriteLine($"Emotion : {emotionType}"); // Get more face attributes Console.WriteLine($"Exposure : {face.FaceAttributes.Exposure.ExposureLevel}"); Console.WriteLine($"FacialHair : {string.Format("{0}", face.FaceAttributes.FacialHair.Moustache + face.FaceAttributes.FacialHair.Beard + face.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No")}"); Console.WriteLine($"Gender : {face.FaceAttributes.Gender}"); Console.WriteLine($"Glasses : {face.FaceAttributes.Glasses}"); // Get hair color Hair hair = face.FaceAttributes.Hair; string color = null; if (hair.HairColor.Count == 0) { if (hair.Invisible) { color = "Invisible"; } else { color = "Bald"; } } HairColorType returnColor = HairColorType.Unknown; double maxConfidence = 0.0f; foreach (HairColor hairColor in hair.HairColor) { if (hairColor.Confidence <= maxConfidence) { continue; } maxConfidence = hairColor.Confidence; returnColor = hairColor.Color; color = returnColor.ToString(); } Console.WriteLine($"Hair : {color}"); // Get more attributes Console.WriteLine($"HeadPose : {string.Format("Pitch: {0}, Roll: {1}, Yaw: {2}", Math.Round(face.FaceAttributes.HeadPose.Pitch, 2), Math.Round(face.FaceAttributes.HeadPose.Roll, 2), Math.Round(face.FaceAttributes.HeadPose.Yaw, 2))}"); Console.WriteLine($"Makeup : {string.Format("{0}", (face.FaceAttributes.Makeup.EyeMakeup || face.FaceAttributes.Makeup.LipMakeup) ? "Yes" : "No")}"); Console.WriteLine($"Noise : {face.FaceAttributes.Noise.NoiseLevel}"); Console.WriteLine($"Occlusion : {string.Format("EyeOccluded: {0}", face.FaceAttributes.Occlusion.EyeOccluded ? "Yes" : "No")} " + $" {string.Format("ForeheadOccluded: {0}", face.FaceAttributes.Occlusion.ForeheadOccluded ? "Yes" : "No")} {string.Format("MouthOccluded: {0}", face.FaceAttributes.Occlusion.MouthOccluded ? "Yes" : "No")}"); Console.WriteLine($"Smile : {face.FaceAttributes.Smile}"); Console.WriteLine(); } }
public static async Task DetectFaceExtract(IFaceClient client, string url, string recognitionModel) { List <string> imageFileNames = new List <string> { "detection1.jpg" // single female with glasses //"detection5.jpg", // family, woman child man //"detection6.jpg" // elderly couple, male female }; foreach (var imageFileName in imageFileNames) { IList <DetectedFace> detectedFaces; // 取得する画像のurl、ファイル名、その中の情報の指定 // 分析の結果をdetectedFacesに格納 detectedFaces = await client.Face.DetectWithUrlAsync($"{url}{imageFileName}", returnFaceAttributes : new List <FaceAttributeType?> { FaceAttributeType.Accessories, FaceAttributeType.Age, FaceAttributeType.Blur, FaceAttributeType.Emotion, FaceAttributeType.Exposure, FaceAttributeType.FacialHair, FaceAttributeType.Gender, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.HeadPose, FaceAttributeType.Makeup, FaceAttributeType.Noise, FaceAttributeType.Occlusion, FaceAttributeType.Smile }, // We specify detection model 1 because we are retrieving attributes. detectionModel : DetectionModel.Detection01, // 分析の方法 recognitionModel : recognitionModel); // 出力の方法 Console.WriteLine($"{detectedFaces.Count} face(s) detected from image `{imageFileName}`."); foreach (var face in detectedFaces) { Console.WriteLine($"Face attributes for {imageFileName}:"); // 顔の向きの表示 Console.WriteLine($"Rectangle(Left/Top/Width/Height) : {face.FaceRectangle.Left} {face.FaceRectangle.Top} {face.FaceRectangle.Width} {face.FaceRectangle.Height}"); // アクセサリーの表示 List <Accessory> accessoriesList = (List <Accessory>)face.FaceAttributes.Accessories; int count = face.FaceAttributes.Accessories.Count; string accessory; string[] accessoryArray = new string[count]; if (count == 0) { accessory = "NoAccessories"; } else { for (int i = 0; i < count; ++i) { accessoryArray[i] = accessoriesList[i].Type.ToString(); } accessory = string.Join(",", accessoryArray); } Console.WriteLine($"Accessories : {accessory}"); // 年齢、画像のぼかし度 Console.WriteLine($"Age : {face.FaceAttributes.Age}"); Console.WriteLine($"Blur : {face.FaceAttributes.Blur.BlurLevel}"); // 表情から感情の分析 string emotionType = string.Empty; double emotionValue = 0.0; Emotion emotion = face.FaceAttributes.Emotion; if (emotion.Anger > emotionValue) { emotionValue = emotion.Anger; emotionType = "Anger"; } // 怒り if (emotion.Contempt > emotionValue) { emotionValue = emotion.Contempt; emotionType = "Contempt"; } // 悔しさ if (emotion.Disgust > emotionValue) { emotionValue = emotion.Disgust; emotionType = "Disgust"; } // 嫌悪 if (emotion.Fear > emotionValue) { emotionValue = emotion.Fear; emotionType = "Fear"; } // 恐怖 if (emotion.Happiness > emotionValue) { emotionValue = emotion.Happiness; emotionType = "Happiness"; } // 喜び if (emotion.Neutral > emotionValue) { emotionValue = emotion.Neutral; emotionType = "Neutral"; } // 無表情 if (emotion.Sadness > emotionValue) { emotionValue = emotion.Sadness; emotionType = "Sadness"; } // 悲しみ if (emotion.Surprise > emotionValue) { emotionType = "Surprise"; } // 驚き Console.WriteLine($"Emotion : {emotionType}"); // 最終的な表情の分析結果 // その他の分析情報1 Console.WriteLine($"Exposure : {face.FaceAttributes.Exposure.ExposureLevel}"); // 露出 Console.WriteLine($"FacialHair : {string.Format("{0}", face.FaceAttributes.FacialHair.Moustache + face.FaceAttributes.FacialHair.Beard + face.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No")}"); // 髭 Console.WriteLine($"Gender : {face.FaceAttributes.Gender}"); // 性別 Console.WriteLine($"Glasses : {face.FaceAttributes.Glasses}"); // 眼鏡 // 髪の毛の色の表示 Hair hair = face.FaceAttributes.Hair; string color = null; if (hair.HairColor.Count == 0) { if (hair.Invisible) { color = "Invisible"; } else { color = "Bald"; } } HairColorType returnColor = HairColorType.Unknown; double maxConfidence = 0.0f; foreach (HairColor hairColor in hair.HairColor) { if (hairColor.Confidence <= maxConfidence) { continue; } maxConfidence = hairColor.Confidence; returnColor = hairColor.Color; color = returnColor.ToString(); } Console.WriteLine($"Hair : {color}"); // その他の分析情報2 Console.WriteLine($"HeadPose : {string.Format("Pitch: {0}, Roll: {1}, Yaw: {2}", Math.Round(face.FaceAttributes.HeadPose.Pitch, 2), Math.Round(face.FaceAttributes.HeadPose.Roll, 2), Math.Round(face.FaceAttributes.HeadPose.Yaw, 2))}"); // 顔の角度 Console.WriteLine($"Makeup : {string.Format("{0}", (face.FaceAttributes.Makeup.EyeMakeup || face.FaceAttributes.Makeup.LipMakeup) ? "Yes" : "No")}"); // 化粧 Console.WriteLine($"Noise : {face.FaceAttributes.Noise.NoiseLevel}"); // 粗さ Console.WriteLine($"Occlusion : {string.Format("EyeOccluded: {0}", face.FaceAttributes.Occlusion.EyeOccluded ? "Yes" : "No")} " + $" {string.Format("ForeheadOccluded: {0}", face.FaceAttributes.Occlusion.ForeheadOccluded ? "Yes" : "No")} {string.Format("MouthOccluded: {0}", face.FaceAttributes.Occlusion.MouthOccluded ? "Yes" : "No")}"); // 顔の全体の露出度 Console.WriteLine($"Smile : {face.FaceAttributes.Smile}"); // 笑顔かどうか Console.WriteLine(); } } }
public static string GetAttributeText(this DetectedFace face, string headerText = null, bool verbose = true) { var resultBuilder = new StringBuilder(); if (headerText != null) { Append(resultBuilder, verbose, headerText); } // Get bounding box of the faces if (verbose) { Append(resultBuilder, verbose, $"Rectangle(Left/Top/Width/Height) : {face.FaceRectangle.Left} {face.FaceRectangle.Top} {face.FaceRectangle.Width} {face.FaceRectangle.Height}"); } // Get accessories of the faces List <Accessory> accessoriesList = (List <Accessory>)face.FaceAttributes.Accessories; int count = face.FaceAttributes.Accessories.Count; string accessory; string[] accessoryArray = new string[count]; if (count == 0) { accessory = "NoAccessories"; } else { for (int i = 0; i < count; ++i) { accessoryArray[i] = accessoriesList[i].Type.ToString(); } accessory = string.Join(",", accessoryArray); } if (verbose) { Append(resultBuilder, verbose, $"Accessories : {accessory}"); } // Get face other attributes Append(resultBuilder, verbose, $"Age : {face.FaceAttributes.Age}"); if (verbose) { Append(resultBuilder, verbose, $"Blur : {face.FaceAttributes.Blur.BlurLevel}"); } // Get emotion on the face string emotionType = string.Empty; double emotionValue = 0.0; Emotion emotion = face.FaceAttributes.Emotion; if (emotion.Anger > emotionValue) { emotionValue = emotion.Anger; emotionType = "Anger"; } if (emotion.Contempt > emotionValue) { emotionValue = emotion.Contempt; emotionType = "Contempt"; } if (emotion.Disgust > emotionValue) { emotionValue = emotion.Disgust; emotionType = "Disgust"; } if (emotion.Fear > emotionValue) { emotionValue = emotion.Fear; emotionType = "Fear"; } if (emotion.Happiness > emotionValue) { emotionValue = emotion.Happiness; emotionType = "Happiness"; } if (emotion.Neutral > emotionValue) { emotionValue = emotion.Neutral; emotionType = "Neutral"; } if (emotion.Sadness > emotionValue) { emotionValue = emotion.Sadness; emotionType = "Sadness"; } if (emotion.Surprise > emotionValue) { emotionType = "Surprise"; } Append(resultBuilder, verbose, $"Emotion : {emotionType}"); // Get more face attributes if (verbose) { Append(resultBuilder, verbose, $"Exposure : {face.FaceAttributes.Exposure.ExposureLevel}"); } Append(resultBuilder, verbose, $"FacialHair : {string.Format("{0}", face.FaceAttributes.FacialHair.Moustache + face.FaceAttributes.FacialHair.Beard + face.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No")}"); Append(resultBuilder, verbose, $"Gender : {face.FaceAttributes.Gender}"); Append(resultBuilder, verbose, $"Glasses : {face.FaceAttributes.Glasses}"); // Get hair color var hair = face.FaceAttributes.Hair; string color = null; if (hair.HairColor.Count == 0) { if (hair.Invisible) { color = "Invisible"; } else { color = "Bald"; } } HairColorType returnColor = HairColorType.Unknown; double maxConfidence = 0.0f; foreach (HairColor hairColor in hair.HairColor) { if (hairColor.Confidence <= maxConfidence) { continue; } maxConfidence = hairColor.Confidence; returnColor = hairColor.Color; color = returnColor.ToString(); } resultBuilder.AppendLine($"Hair : {color}"); // Get more attributes if (verbose) { Append(resultBuilder, verbose, $"HeadPose : {string.Format("Pitch: {0}, Roll: {1}, Yaw: {2}", Math.Round(face.FaceAttributes.HeadPose.Pitch, 2), Math.Round(face.FaceAttributes.HeadPose.Roll, 2), Math.Round(face.FaceAttributes.HeadPose.Yaw, 2))}"); Append(resultBuilder, verbose, $"Makeup : {string.Format("{0}", (face.FaceAttributes.Makeup.EyeMakeup || face.FaceAttributes.Makeup.LipMakeup) ? "Yes" : "No")}"); Append(resultBuilder, verbose, $"Noise : {face.FaceAttributes.Noise.NoiseLevel}"); Append(resultBuilder, verbose, $"Occlusion : {string.Format("EyeOccluded: {0}", face.FaceAttributes.Occlusion.EyeOccluded ? "Yes" : "No")} " + $" {string.Format("ForeheadOccluded: {0}", face.FaceAttributes.Occlusion.ForeheadOccluded ? "Yes" : "No")} {string.Format("MouthOccluded: {0}", face.FaceAttributes.Occlusion.MouthOccluded ? "Yes" : "No")}"); Append(resultBuilder, verbose, $"Smile : {face.FaceAttributes.Smile}"); } return(resultBuilder.ToString()); }
public static async Task <FaceImage> DetectFaceExtract(IFaceClient client, IFormFile file, string recognitionModel) { var stream = file.OpenReadStream(); FaceImage faceImage = new FaceImage(); IList <DetectedFace> detectedFaces; // Detect faces with all attributes from image. detectedFaces = await client.Face.DetectWithStreamAsync(stream, returnFaceAttributes : new List <FaceAttributeType> { FaceAttributeType.Accessories, FaceAttributeType.Age, FaceAttributeType.Blur, FaceAttributeType.Emotion, FaceAttributeType.Exposure, FaceAttributeType.FacialHair, FaceAttributeType.Gender, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.HeadPose, FaceAttributeType.Makeup, FaceAttributeType.Noise, FaceAttributeType.Occlusion, FaceAttributeType.Smile }, recognitionModel : recognitionModel); faceImage.FileName = file.FileName; faceImage.FaceCount = detectedFaces.Count; // Parse all attributes of each detected face. List <Attributes> faceAttributes = new List <Attributes>(); foreach (var face in detectedFaces.Select((value, i) => new { i, value })) { Attributes attributes = new Attributes(); attributes.FaceNumber = face.i + 1; // Get bounding box of the faces attributes.Rectangle.Left = face.value.FaceRectangle.Left; attributes.Rectangle.Top = face.value.FaceRectangle.Top; attributes.Rectangle.Width = face.value.FaceRectangle.Width; attributes.Rectangle.Height = face.value.FaceRectangle.Height; // Get accessories of the faces List <Accessory> accessoriesList = (List <Accessory>)face.value.FaceAttributes.Accessories; int count = face.value.FaceAttributes.Accessories.Count; string accessory; string[] accessoryArray = new string[count]; if (count == 0) { accessory = "NoAccessories"; } else { for (int i = 0; i < count; ++i) { accessoryArray[i] = accessoriesList[i].Type.ToString(); } accessory = string.Join(",", accessoryArray); } attributes.Accessories = accessory; // Get face other attributes attributes.Age = face.value.FaceAttributes.Age; attributes.Blur = face.value.FaceAttributes.Blur.BlurLevel.ToString(); // Get emotion on the face string emotionType = string.Empty; double emotionValue = 0.0; Emotion emotion = face.value.FaceAttributes.Emotion; if (emotion.Anger > emotionValue) { emotionValue = emotion.Anger; emotionType = "Anger"; } if (emotion.Contempt > emotionValue) { emotionValue = emotion.Contempt; emotionType = "Contempt"; } if (emotion.Disgust > emotionValue) { emotionValue = emotion.Disgust; emotionType = "Disgust"; } if (emotion.Fear > emotionValue) { emotionValue = emotion.Fear; emotionType = "Fear"; } if (emotion.Happiness > emotionValue) { emotionValue = emotion.Happiness; emotionType = "Happiness"; } if (emotion.Neutral > emotionValue) { emotionValue = emotion.Neutral; emotionType = "Neutral"; } if (emotion.Sadness > emotionValue) { emotionValue = emotion.Sadness; emotionType = "Sadness"; } if (emotion.Surprise > emotionValue) { emotionType = "Surprise"; } attributes.Emotion = emotionType; // Get more face attributes attributes.Exposure = face.value.FaceAttributes.Exposure.ExposureLevel.ToString(); attributes.FacialHair = $"{ string.Format("{0}", face.value.FaceAttributes.FacialHair.Moustache + face.value.FaceAttributes.FacialHair.Beard + face.value.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No")}"; attributes.Gender = face.value.FaceAttributes.Gender.ToString(); attributes.Glasses = face.value.FaceAttributes.Glasses.ToString(); // Get hair color Hair hair = face.value.FaceAttributes.Hair; string color = null; if (hair.HairColor.Count == 0) { if (hair.Invisible) { color = "Invisible"; } else { color = "Bald"; } } HairColorType returnColor = HairColorType.Unknown; double maxConfidence = 0.0f; foreach (HairColor hairColor in hair.HairColor) { if (hairColor.Confidence <= maxConfidence) { continue; } maxConfidence = hairColor.Confidence; returnColor = hairColor.Color; color = returnColor.ToString(); } attributes.Hair = color; // Get more attributes attributes.HeadPose.Pitch = Math.Round(face.value.FaceAttributes.HeadPose.Pitch, 2); attributes.HeadPose.Roll = Math.Round(face.value.FaceAttributes.HeadPose.Roll, 2); attributes.HeadPose.Yaw = Math.Round(face.value.FaceAttributes.HeadPose.Yaw, 2); attributes.Makeup = $"{string.Format("{0}", (face.value.FaceAttributes.Makeup.EyeMakeup || face.value.FaceAttributes.Makeup.LipMakeup) ? "Yes" : "No")}"; attributes.Noise = face.value.FaceAttributes.Noise.NoiseLevel.ToString(); attributes.Occlusion.EyeOccluded = face.value.FaceAttributes.Occlusion.EyeOccluded ? true : false; attributes.Occlusion.ForeheadOccluded = face.value.FaceAttributes.Occlusion.ForeheadOccluded ? true : false; attributes.Occlusion.MouthOccluded = face.value.FaceAttributes.Occlusion.MouthOccluded ? true : false; attributes.Smile = face.value.FaceAttributes.Smile.ToString(); faceImage.Attributes.Add(attributes); } return(faceImage); }
public static async Task DetectAllFaceAtributes(IFaceClient client, string url, string recognitionModel) { Console.WriteLine("========DETECT FACES========"); Console.WriteLine(); // Create a list of images List <string> imageFileNames = Directory.GetFiles("C:\\imagenes", "*.*", SearchOption.AllDirectories).ToList(); imageFileNames = imageFileNames.Where(item => item.Contains("WebCam")).ToList(); if (imageFileNames.Count >= 10) { // Intente tirar directamentela excepción, pero no te muestra el message en la consola.... string message = "Límite de imágenes excedido, no se puede mandar más de 10 imágenes"; Console.WriteLine("ERROR: " + message); throw new Exception(message); } foreach (var imageFileName in imageFileNames) { IList <DetectedFace> detectedFaces; // Detect faces with all attributes from image url. FileStream file = new FileStream(imageFileName, FileMode.Open); detectedFaces = await client.Face.DetectWithStreamAsync( file, returnFaceLandmarks : true, returnFaceAttributes : new List <FaceAttributeType> { FaceAttributeType.Accessories, FaceAttributeType.Age, FaceAttributeType.Blur, FaceAttributeType.Emotion, FaceAttributeType.Exposure, FaceAttributeType.FacialHair, FaceAttributeType.Gender, FaceAttributeType.Glasses, FaceAttributeType.Hair, FaceAttributeType.HeadPose, FaceAttributeType.Makeup, FaceAttributeType.Noise, FaceAttributeType.Occlusion, FaceAttributeType.Smile, }, recognitionModel : recognitionModel); Console.WriteLine($"{detectedFaces.Count} face(s) detected from image `{imageFileName}`."); // Parse and print all attributes of each detected face. foreach (var face in detectedFaces) { Console.WriteLine($"Face attributes for {imageFileName}:"); // Get bounding box of the faces Console.WriteLine($"Rectangle(Left/Top/Width/Height) : {face.FaceRectangle.Left} {face.FaceRectangle.Top} {face.FaceRectangle.Width} {face.FaceRectangle.Height}"); // Get accessories of the faces List <Accessory> accessoriesList = (List <Accessory>)face.FaceAttributes.Accessories; int count = face.FaceAttributes.Accessories.Count; string accessory; string[] accessoryArray = new string[count]; if (count == 0) { accessory = "NoAccessories"; } else { for (int i = 0; i < count; ++i) { accessoryArray[i] = accessoriesList[i].Type.ToString(); } accessory = string.Join(",", accessoryArray); } Console.WriteLine($"Accessories : {accessory}"); // Get face other attributes Console.WriteLine($"Age : {face.FaceAttributes.Age}"); Console.WriteLine($"Blur : {face.FaceAttributes.Blur.BlurLevel}"); // Get emotion on the face string emotionType = string.Empty; double emotionValue = 0.0; Emotion emotion = face.FaceAttributes.Emotion; if (emotion.Anger > emotionValue) { emotionValue = emotion.Anger; emotionType = "Anger"; } if (emotion.Contempt > emotionValue) { emotionValue = emotion.Contempt; emotionType = "Contempt"; } if (emotion.Disgust > emotionValue) { emotionValue = emotion.Disgust; emotionType = "Disgust"; } if (emotion.Fear > emotionValue) { emotionValue = emotion.Fear; emotionType = "Fear"; } if (emotion.Happiness > emotionValue) { emotionValue = emotion.Happiness; emotionType = "Happiness"; } if (emotion.Neutral > emotionValue) { emotionValue = emotion.Neutral; emotionType = "Neutral"; } if (emotion.Sadness > emotionValue) { emotionValue = emotion.Sadness; emotionType = "Sadness"; } if (emotion.Surprise > emotionValue) { emotionType = "Surprise"; } Console.WriteLine($"Emotion : {emotionType}"); // Get more face attributes Console.WriteLine($"Exposure : {face.FaceAttributes.Exposure.ExposureLevel}"); Console.WriteLine($"FacialHair : {string.Format("{0}", face.FaceAttributes.FacialHair.Moustache + face.FaceAttributes.FacialHair.Beard + face.FaceAttributes.FacialHair.Sideburns > 0 ? "Yes" : "No")}"); Console.WriteLine($"Gender : {face.FaceAttributes.Gender}"); Console.WriteLine($"Glasses : {face.FaceAttributes.Glasses}"); // Get hair color Hair hair = face.FaceAttributes.Hair; string color = null; if (hair.HairColor.Count == 0) { if (hair.Invisible) { color = "Invisible"; } else { color = "Bald"; } } HairColorType returnColor = HairColorType.Unknown; double maxConfidence = 0.0f; foreach (HairColor hairColor in hair.HairColor) { if (hairColor.Confidence <= maxConfidence) { continue; } maxConfidence = hairColor.Confidence; returnColor = hairColor.Color; color = returnColor.ToString(); } Console.WriteLine($"Hair : {color}"); // Get more attributes Console.WriteLine($"HeadPose : {string.Format("Pitch: {0}, Roll: {1}, Yaw: {2}", Math.Round(face.FaceAttributes.HeadPose.Pitch, 2), Math.Round(face.FaceAttributes.HeadPose.Roll, 2), Math.Round(face.FaceAttributes.HeadPose.Yaw, 2))}"); Console.WriteLine($"Makeup : {string.Format("{0}", (face.FaceAttributes.Makeup.EyeMakeup || face.FaceAttributes.Makeup.LipMakeup) ? "Yes" : "No")}"); Console.WriteLine($"Noise : {face.FaceAttributes.Noise.NoiseLevel}"); Console.WriteLine($"Occlusion : {string.Format("EyeOccluded: {0}", face.FaceAttributes.Occlusion.EyeOccluded ? "Yes" : "No")} " + $" {string.Format("ForeheadOccluded: {0}", face.FaceAttributes.Occlusion.ForeheadOccluded ? "Yes" : "No")} {string.Format("MouthOccluded: {0}", face.FaceAttributes.Occlusion.MouthOccluded ? "Yes" : "No")}"); Console.WriteLine($"Smile : {face.FaceAttributes.Smile}"); Console.WriteLine(); } } }