public static async Task <ImageInsights> ProcessImageAsync(Func <Task <Stream> > imageStream, string imageId) { ImageAnalyzer analyzer = new ImageAnalyzer(imageStream); analyzer.ShowDialogOnFaceApiErrors = true; // trigger vision, face and emotion requests await Task.WhenAll(analyzer.AnalyzeImageAsync(detectCelebrities: false, visualFeatures: DefaultVisualFeatureTypes), analyzer.DetectFacesAsync(detectFaceAttributes: true)); // trigger face match against previously seen faces await analyzer.FindSimilarPersistedFacesAsync(); ImageInsights result = new ImageInsights { ImageId = imageId }; // assign computer vision results result.VisionInsights = new VisionInsights { Caption = analyzer.AnalysisResult.Description?.Captions[0].Text, Tags = analyzer.AnalysisResult.Tags != null?analyzer.AnalysisResult.Tags.Select(t => t.Name).ToArray() : new string[0] }; // assign face api and emotion api results List <FaceInsights> faceInsightsList = new List <FaceInsights>(); foreach (var face in analyzer.DetectedFaces) { FaceInsights faceInsights = new FaceInsights { FaceRectangle = face.FaceRectangle, Age = face.FaceAttributes.Age, Gender = face.FaceAttributes.Gender }; SimilarFaceMatch similarFaceMatch = analyzer.SimilarFaceMatches.FirstOrDefault(s => s.Face.FaceId == face.FaceId); if (similarFaceMatch != null) { faceInsights.UniqueFaceId = similarFaceMatch.SimilarPersistedFace.PersistedFaceId; } Emotion faceEmotion = CoreUtil.FindFaceClosestToRegion(analyzer.DetectedEmotion, face.FaceRectangle); if (faceEmotion != null) { faceInsights.TopEmotion = faceEmotion.Scores.ToRankedList().First().Key; } faceInsightsList.Add(faceInsights); } result.FaceInsights = faceInsightsList.ToArray(); return(result); }
public SimilarFace GetLastSimilarPersistedFaceForFace(BitmapBounds faceBox) { if (this.lastSimilarPersistedFaceSample == null || !this.lastSimilarPersistedFaceSample.Any()) { return(null); } SimilarFaceMatch match = this.lastSimilarPersistedFaceSample.Where(f => Util.AreFacesPotentiallyTheSame(faceBox, f.Face.FaceRectangle)) .OrderBy(f => Math.Abs(faceBox.X - f.Face.FaceRectangle.Left) + Math.Abs(faceBox.Y - f.Face.FaceRectangle.Top)).FirstOrDefault(); return(match?.SimilarPersistedFace); }
public void UpdateVisitorDemographics(SimilarFaceMatch item) { AgeDistribution genderBasedAgeDistribution = null; if (item.Face.FaceAttributes.Gender == Gender.Male) { Demographics.TotalMales++; genderBasedAgeDistribution = Demographics.AgeGenderDistribution.MaleDistribution; } else { Demographics.TotalFemales++; genderBasedAgeDistribution = Demographics.AgeGenderDistribution.FemaleDistribution; } if (item.Face.FaceAttributes.Age < 16) { genderBasedAgeDistribution.Age0To15++; } else if (item.Face.FaceAttributes.Age < 20) { genderBasedAgeDistribution.Age16To19++; } else if (item.Face.FaceAttributes.Age < 30) { genderBasedAgeDistribution.Age20s++; } else if (item.Face.FaceAttributes.Age < 40) { genderBasedAgeDistribution.Age30s++; } else if (item.Face.FaceAttributes.Age < 50) { genderBasedAgeDistribution.Age40s++; } else if (item.Face.FaceAttributes.Age < 60) { genderBasedAgeDistribution.Age50s++; } else { genderBasedAgeDistribution.Age60sAndOlder++; } //updating emotions as well: UpdateVisitorEmotions(item); }
private async void StartDriverIdAsync(ImageAnalyzer e) { if (this.isProcessingDriverId) { return; } if (!e.DetectedFaces.Any()) { this.UpdateUIForNoDriverDetected(); return; } await Task.WhenAll(e.IdentifyFacesAsync(), e.FindSimilarPersistedFacesAsync()); SimilarFaceMatch faceMatch = e.SimilarFaceMatches.FirstOrDefault(); if (faceMatch != null) { string name = "Unknown"; IdentifiedPerson p = e.IdentifiedPersons.FirstOrDefault(f => f.FaceId == faceMatch.Face.FaceId); if (p != null) { name = p.Person.Name; } else if (SettingsHelper.Instance.ShowAgeAndGender) { switch (faceMatch.Face.FaceAttributes.Gender) { case Gender.Male: name = "Unknown male"; break; case Gender.Female: name = "Unknown female"; break; } } this.driverId.Text = string.Format("{0}", name, faceMatch.SimilarPersistedFace.PersistedFaceId.GetValueOrDefault().ToString("N").Substring(0, 4)); } this.isProcessingDriverId = false; }
private async void StartDriverIdAsync(ImageAnalyzer e) { if (this.isProcessingDriverId) { return; } if (!e.DetectedFaces.Any()) { this.UpdateUIForNoDriverDetected(); return; } await Task.WhenAll(e.IdentifyFacesAsync(), e.FindSimilarPersistedFacesAsync()); SimilarFaceMatch faceMatch = e.SimilarFaceMatches.FirstOrDefault(); if (faceMatch != null) { string name = "Unknown"; IdentifiedPerson p = e.IdentifiedPersons.FirstOrDefault(f => f.FaceId == faceMatch.Face.FaceId); if (p != null) { name = p.Person.Name; } else { if (faceMatch.Face.FaceAttributes.Gender == "male") { name = "Unknown male"; } else if (faceMatch.Face.FaceAttributes.Gender == "female") { name = "Unknown female"; } } this.driverId.Text = string.Format("{0}\nFace Id: {1}", name, faceMatch.SimilarPersistedFace.PersistedFaceId.ToString("N").Substring(0, 4)); } this.isProcessingDriverId = false; }
private void UpdateDemographics(SimilarFaceMatch item) { AgeDistribution genderBasedAgeDistribution = null; switch (item.Face.FaceAttributes.Gender) { case Microsoft.Azure.CognitiveServices.Vision.Face.Models.Gender.Male: this.demographics.OverallMaleCount++; genderBasedAgeDistribution = this.demographics.AgeGenderDistribution.MaleDistribution; break; case Microsoft.Azure.CognitiveServices.Vision.Face.Models.Gender.Female: this.demographics.OverallFemaleCount++; genderBasedAgeDistribution = this.demographics.AgeGenderDistribution.FemaleDistribution; break; } if (item.Face.FaceAttributes.Age < 16) { genderBasedAgeDistribution.Age0To15++; } else if (item.Face.FaceAttributes.Age < 20) { genderBasedAgeDistribution.Age16To19++; } else if (item.Face.FaceAttributes.Age < 30) { genderBasedAgeDistribution.Age20s++; } else if (item.Face.FaceAttributes.Age < 40) { genderBasedAgeDistribution.Age30s++; } else if (item.Face.FaceAttributes.Age < 50) { genderBasedAgeDistribution.Age40s++; } else { genderBasedAgeDistribution.Age50sAndOlder++; } }
private static string GetDisplayTextForPersonAsync(ImageAnalyzer analyzer, SimilarFaceMatch item) { // See if we identified this person against a trained model IdentifiedPerson identifiedPerson = analyzer.IdentifiedPersons.FirstOrDefault(p => p.FaceId == item.Face.FaceId); if (identifiedPerson != null) { return(identifiedPerson.Person.Name); } if (identifiedPerson == null) { // Let's see if this is a celebrity if (analyzer.AnalysisResult?.Categories != null) { foreach (var category in analyzer.AnalysisResult.Categories.Where(c => c.Detail != null)) { dynamic detail = JObject.Parse(category.Detail.ToString()); if (detail.celebrities != null) { foreach (var celebrity in detail.celebrities) { uint left = UInt32.Parse(celebrity.faceRectangle.left.ToString()); uint top = UInt32.Parse(celebrity.faceRectangle.top.ToString()); uint height = UInt32.Parse(celebrity.faceRectangle.height.ToString()); uint width = UInt32.Parse(celebrity.faceRectangle.width.ToString()); if (Util.AreFacesPotentiallyTheSame(new BitmapBounds { Height = height, Width = width, X = left, Y = top }, item.Face.FaceRectangle)) { return(celebrity.name.ToString()); } } } } } } return(string.Empty); }
private void UpdateDemographics(SimilarFaceMatch item) { AgeDistribution genderBasedAgeDistribution = null; if (string.Compare(item.Face.FaceAttributes.Gender, "male", StringComparison.OrdinalIgnoreCase) == 0) { this.demographics.OverallMaleCount++; genderBasedAgeDistribution = this.demographics.AgeGenderDistribution.MaleDistribution; } else { this.demographics.OverallFemaleCount++; genderBasedAgeDistribution = this.demographics.AgeGenderDistribution.FemaleDistribution; } if (item.Face.FaceAttributes.Age < 16) { genderBasedAgeDistribution.Age0To15++; } else if (item.Face.FaceAttributes.Age < 20) { genderBasedAgeDistribution.Age16To19++; } else if (item.Face.FaceAttributes.Age < 30) { genderBasedAgeDistribution.Age20s++; } else if (item.Face.FaceAttributes.Age < 40) { genderBasedAgeDistribution.Age30s++; } else if (item.Face.FaceAttributes.Age < 50) { genderBasedAgeDistribution.Age40s++; } else { genderBasedAgeDistribution.Age50sAndOlder++; } }
private static string GetDisplayTextForPersonAsync(ImageAnalyzer analyzer, SimilarFaceMatch item) { // See if we identified this person against a trained model IdentifiedPerson identifiedPerson = analyzer.IdentifiedPersons.FirstOrDefault(p => p.FaceId == item.Face.FaceId); if (identifiedPerson != null) { return(identifiedPerson.Person.Name); } if (identifiedPerson == null) { // Let's see if this is a celebrity if (analyzer.AnalysisResult?.Categories != null) { foreach (var category in analyzer.AnalysisResult.Categories.Where(c => c.Detail != null)) { foreach (var celebrity in category.Detail.Celebrities) { var celebrityFaceRectangle = new Microsoft.Azure.CognitiveServices.Vision.Face.Models.FaceRectangle( celebrity.FaceRectangle.Width, celebrity.FaceRectangle.Height, celebrity.FaceRectangle.Left, celebrity.FaceRectangle.Top); if (CoreUtil.AreFacesPotentiallyTheSame(celebrityFaceRectangle, item.Face.FaceRectangle)) { return(celebrity.Name.ToString()); } } } } } return(string.Empty); }
public void UpdateVisitorEmotions(SimilarFaceMatch item) { EmotionDistribution genderBasedEmotionDistribution = null; if (item.Face.FaceAttributes.Gender == Gender.Male) { genderBasedEmotionDistribution = Demographics.EmotionGenderDistribution.MaleDistribution; } else { genderBasedEmotionDistribution = Demographics.EmotionGenderDistribution.FemaleDistribution; } var emotions = new Dictionary <string, double> { { "Anger", item.Face.FaceAttributes.Emotion.Anger }, { "Contempt", item.Face.FaceAttributes.Emotion.Contempt }, { "Disgust", item.Face.FaceAttributes.Emotion.Disgust }, { "Fear", item.Face.FaceAttributes.Emotion.Fear }, { "Happiness", item.Face.FaceAttributes.Emotion.Happiness }, { "Neutral", item.Face.FaceAttributes.Emotion.Neutral }, { "Sadness", item.Face.FaceAttributes.Emotion.Sadness }, { "Surprise", item.Face.FaceAttributes.Emotion.Surprise } }; var topEmotion = emotions.OrderByDescending(e => e.Value).FirstOrDefault(); switch (topEmotion.Key) { case "Anger": genderBasedEmotionDistribution.Anger++; break; case "Contempt": genderBasedEmotionDistribution.Contempt++; break; case "Disgust": genderBasedEmotionDistribution.Disgust++; break; case "Fear": genderBasedEmotionDistribution.Fear++; break; case "Happiness": genderBasedEmotionDistribution.Happiness++; break; case "Neutral": genderBasedEmotionDistribution.Neutral++; break; case "Sadness": genderBasedEmotionDistribution.Sadness++; break; case "Surprise": genderBasedEmotionDistribution.Surprise++; break; default: break; } }