public FaceInfo[] GetFaceInfo(IEnumerable <Face> DetectedFaces, IEnumerable <Emotion> DetectedEmotion, IEnumerable <IdentifyResult> IdentifiedPersons) { List <FaceInfo> faceInfoList = new List <FaceInfo>(); if (DetectedFaces != null) { foreach (var detectedFace in DetectedFaces) { FaceInfo faceInfo = new FaceInfo(); // Check if we have age/gender for this face. if (detectedFace?.FaceAttributes != null) { faceInfo.Attributes = detectedFace.FaceAttributes; } // Check if we identified this face. If so send the name along. if (IdentifiedPersons != null) { var matchingPerson = IdentifiedPersons.FirstOrDefault(p => p.FaceId == detectedFace.FaceId); string name; if (matchingPerson != null && matchingPerson.Candidates.Length > 0 && personMap.TryGetValue(matchingPerson.Candidates[0].PersonId, out name)) { faceInfo.Name = name; } } // Check if we have emotion for this face. If so send it along. if (DetectedEmotion != null) { Emotion matchingEmotion = CoreUtil.FindFaceClosestToRegion(DetectedEmotion, detectedFace.FaceRectangle); if (matchingEmotion != null) { faceInfo.Emotion = matchingEmotion.Scores; } } //// Check if we have an unique Id for this face. If so send it along. //if (SimilarFaceMatches != null) //{ // var matchingPerson = SimilarFaceMatches.FirstOrDefault(p => p.Face.FaceId == detectedFace.FaceId); // if (matchingPerson != null) // { // faceInfo.UniqueId = matchingPerson.SimilarPersistedFace.PersistedFaceId.ToString("N").Substring(0, 4); // } //} faceInfoList.Add(faceInfo); } } else if (DetectedEmotion != null) { // If we are here we only have emotion. No age/gender or id. faceInfoList.AddRange(DetectedEmotion.Select(emotion => new FaceInfo { Emotion = emotion.Scores })); } return(faceInfoList.ToArray()); }
public Face GetLastFaceAttributesForFace(BitmapBounds faceBox) { if (this.lastDetectedFaceSample == null || !this.lastDetectedFaceSample.Any()) { return(null); } return(CoreUtil.FindFaceClosestToRegion(this.lastDetectedFaceSample, faceBox)); }
public static async Task <ImageInsights> ProcessImageAsync(Func <Task <Stream> > imageStream, string imageId) { ImageAnalyzer analyzer = new ImageAnalyzer(imageStream); analyzer.ShowDialogOnFaceApiErrors = true; // trigger vision, face and emotion requests await Task.WhenAll(analyzer.AnalyzeImageAsync(detectCelebrities: false, visualFeatures: DefaultVisualFeatureTypes), analyzer.DetectFacesAsync(detectFaceAttributes: true)); // trigger face match against previously seen faces await analyzer.FindSimilarPersistedFacesAsync(); ImageInsights result = new ImageInsights { ImageId = imageId }; // assign computer vision results result.VisionInsights = new VisionInsights { Caption = analyzer.AnalysisResult.Description?.Captions[0].Text, Tags = analyzer.AnalysisResult.Tags != null?analyzer.AnalysisResult.Tags.Select(t => t.Name).ToArray() : new string[0] }; // assign face api and emotion api results List <FaceInsights> faceInsightsList = new List <FaceInsights>(); foreach (var face in analyzer.DetectedFaces) { FaceInsights faceInsights = new FaceInsights { FaceRectangle = face.FaceRectangle, Age = face.FaceAttributes.Age, Gender = face.FaceAttributes.Gender }; SimilarFaceMatch similarFaceMatch = analyzer.SimilarFaceMatches.FirstOrDefault(s => s.Face.FaceId == face.FaceId); if (similarFaceMatch != null) { faceInsights.UniqueFaceId = similarFaceMatch.SimilarPersistedFace.PersistedFaceId; } Emotion faceEmotion = CoreUtil.FindFaceClosestToRegion(analyzer.DetectedEmotion, face.FaceRectangle); if (faceEmotion != null) { faceInsights.TopEmotion = faceEmotion.Scores.ToRankedList().First().Key; } faceInsightsList.Add(faceInsights); } result.FaceInsights = faceInsightsList.ToArray(); return(result); }
public static async Task <ImageInsights> ProcessImageAsync(Func <Task <Stream> > imageStreamCallback, string imageId) { ImageInsights result = new ImageInsights { ImageId = imageId }; // trigger computer vision, face and emotion analysis List <Emotion> emotionResult = new List <Emotion>(); await Task.WhenAll(AnalyzeImageFeaturesAsync(imageStreamCallback, result), AnalyzeFacesAsync(imageStreamCallback, result), AnalyzeEmotionAsync(imageStreamCallback, emotionResult)); // Combine emotion and face results based on face rectangle location/size similarity foreach (var faceInsights in result.FaceInsights) { Emotion faceEmotion = CoreUtil.FindFaceClosestToRegion(emotionResult, faceInsights.FaceRectangle); if (faceEmotion != null) { faceInsights.TopEmotion = faceEmotion.Scores.ToRankedList().First().Key; } } return(result); }
private async Task ProcessPeopleInsightsAsync(ImageAnalyzer analyzer, int frameNumber) { foreach (var item in analyzer.SimilarFaceMatches) { bool demographicsChanged = false; Visitor personInVideo; if (this.peopleInVideo.TryGetValue(item.SimilarPersistedFace.PersistedFaceId, out personInVideo)) { personInVideo.Count++; if (this.pendingIdentificationAttemptCount.ContainsKey(item.SimilarPersistedFace.PersistedFaceId)) { // This is a face we haven't identified yet. See how many times we have tried it, if we need to do it again or stop trying if (this.pendingIdentificationAttemptCount[item.SimilarPersistedFace.PersistedFaceId] <= 5) { string personName = GetDisplayTextForPersonAsync(analyzer, item); if (string.IsNullOrEmpty(personName)) { // Increment the times we have tried and failed to identify this person this.pendingIdentificationAttemptCount[item.SimilarPersistedFace.PersistedFaceId]++; } else { // Bingo! Let's remove it from the list of pending identifications this.pendingIdentificationAttemptCount.Remove(item.SimilarPersistedFace.PersistedFaceId); VideoTrack existingTrack = (VideoTrack)this.peopleListView.Children.FirstOrDefault(f => (Guid)((FrameworkElement)f).Tag == item.SimilarPersistedFace.PersistedFaceId); if (existingTrack != null) { existingTrack.DisplayText = string.Format("{0}, {1}", personName, Math.Floor(item.Face.FaceAttributes.Age)); } } } else { // Give up this.pendingIdentificationAttemptCount.Remove(item.SimilarPersistedFace.PersistedFaceId); } } } else { // New person... let's catalog it. // Crop the face, enlarging the rectangle so we frame it better double heightScaleFactor = 1.8; double widthScaleFactor = 1.8; Rectangle biggerRectangle = new Rectangle { Height = Math.Min((int)(item.Face.FaceRectangle.Height * heightScaleFactor), FrameRelayVideoEffect.LatestSoftwareBitmap.PixelHeight), Width = Math.Min((int)(item.Face.FaceRectangle.Width * widthScaleFactor), FrameRelayVideoEffect.LatestSoftwareBitmap.PixelWidth) }; biggerRectangle.Left = Math.Max(0, item.Face.FaceRectangle.Left - (int)(item.Face.FaceRectangle.Width * ((widthScaleFactor - 1) / 2))); biggerRectangle.Top = Math.Max(0, item.Face.FaceRectangle.Top - (int)(item.Face.FaceRectangle.Height * ((heightScaleFactor - 1) / 1.4))); var croppedImage = await Util.GetCroppedBitmapAsync(analyzer.GetImageStreamCallback, biggerRectangle); if (croppedImage == null || biggerRectangle.Height == 0 && biggerRectangle.Width == 0) { // Couldn't get a shot of this person continue; } demographicsChanged = true; string personName = GetDisplayTextForPersonAsync(analyzer, item); if (string.IsNullOrEmpty(personName)) { personName = item.Face.FaceAttributes.Gender; // Add the person to the list of pending identifications so we can try again on some future frames this.pendingIdentificationAttemptCount.Add(item.SimilarPersistedFace.PersistedFaceId, 1); } personInVideo = new Visitor { UniqueId = item.SimilarPersistedFace.PersistedFaceId }; this.peopleInVideo.Add(item.SimilarPersistedFace.PersistedFaceId, personInVideo); this.demographics.Visitors.Add(personInVideo); // Update the demographics stats. this.UpdateDemographics(item); VideoTrack videoTrack = new VideoTrack { Tag = item.SimilarPersistedFace.PersistedFaceId, CroppedFace = croppedImage, DisplayText = string.Format("{0}, {1}", personName, Math.Floor(item.Face.FaceAttributes.Age)), Duration = (int)this.videoPlayer.NaturalDuration.TimeSpan.TotalSeconds, }; videoTrack.Tapped += this.TimelineTapped; this.peopleListView.Children.Insert(0, videoTrack); } // Update the timeline for this person VideoTrack track = (VideoTrack)this.peopleListView.Children.FirstOrDefault(f => (Guid)((FrameworkElement)f).Tag == item.SimilarPersistedFace.PersistedFaceId); if (track != null) { Emotion matchingEmotion = CoreUtil.FindFaceClosestToRegion(analyzer.DetectedEmotion, item.Face.FaceRectangle); if (matchingEmotion == null) { matchingEmotion = new Emotion { Scores = new EmotionScores { Neutral = 1 } }; } track.SetVideoFrameState(frameNumber, matchingEmotion.Scores); uint childIndex = (uint)this.peopleListView.Children.IndexOf(track); if (childIndex > 5) { // Bring to towards the top so it becomes visible this.peopleListView.Children.Move(childIndex, 5); } } if (demographicsChanged) { this.ageGenderDistributionControl.UpdateData(this.demographics); } this.overallStatsControl.UpdateData(this.demographics); } }
private async Task ProcessCurrentVideoFrame() { int frameNumber = (int)this.videoPlayer.Position.TotalSeconds; if (this.processedFrames.Contains(frameNumber)) { return; } Guid videoIdBeforeProcessing = this.currentVideoId; var analyzer = new ImageAnalyzer(await Util.GetPixelBytesFromSoftwareBitmapAsync(FrameRelayVideoEffect.LatestSoftwareBitmap)); DateTime start = DateTime.Now; // Compute Emotion, Age and Gender await Task.WhenAll(analyzer.DetectEmotionAsync(), analyzer.DetectFacesAsync(detectFaceAttributes: true)); // Compute Face Identification and Unique Face Ids await Task.WhenAll(analyzer.IdentifyFacesAsync(), analyzer.FindSimilarPersistedFacesAsync()); foreach (var item in analyzer.SimilarFaceMatches) { if (videoIdBeforeProcessing != this.currentVideoId) { // Media source changed while we were processing. Make sure we are in a clear state again. await this.ResetStateAsync(); break; } bool demographicsChanged = false; Visitor personInVideo; if (this.peopleInVideo.TryGetValue(item.SimilarPersistedFace.PersistedFaceId, out personInVideo)) { personInVideo.Count++; if (this.pendingIdentificationAttemptCount.ContainsKey(item.SimilarPersistedFace.PersistedFaceId)) { // This is a face we haven't identified yet. See how many times we have tried it, if we need to do it again or stop trying if (this.pendingIdentificationAttemptCount[item.SimilarPersistedFace.PersistedFaceId] <= 5) { string personName = await GetDisplayTextForPersonAsync(analyzer, item); if (string.IsNullOrEmpty(personName)) { // Increment the times we have tried and failed to identify this person this.pendingIdentificationAttemptCount[item.SimilarPersistedFace.PersistedFaceId]++; } else { // Bingo! Let's remove it from the list of pending identifications this.pendingIdentificationAttemptCount.Remove(item.SimilarPersistedFace.PersistedFaceId); VideoTrack existingTrack = (VideoTrack)this.peopleListView.Children.FirstOrDefault(f => (Guid)((FrameworkElement)f).Tag == item.SimilarPersistedFace.PersistedFaceId); if (existingTrack != null) { existingTrack.DisplayText = string.Format("{0}, {1}", personName, Math.Floor(item.Face.FaceAttributes.Age)); } } } else { // Give up this.pendingIdentificationAttemptCount.Remove(item.SimilarPersistedFace.PersistedFaceId); } } } else { // New person... let's catalog it. // Crop the face, enlarging the rectangle so we frame it better double heightScaleFactor = 1.8; double widthScaleFactor = 1.8; Rectangle biggerRectangle = new Rectangle { Height = Math.Min((int)(item.Face.FaceRectangle.Height * heightScaleFactor), FrameRelayVideoEffect.LatestSoftwareBitmap.PixelHeight), Width = Math.Min((int)(item.Face.FaceRectangle.Width * widthScaleFactor), FrameRelayVideoEffect.LatestSoftwareBitmap.PixelWidth) }; biggerRectangle.Left = Math.Max(0, item.Face.FaceRectangle.Left - (int)(item.Face.FaceRectangle.Width * ((widthScaleFactor - 1) / 2))); biggerRectangle.Top = Math.Max(0, item.Face.FaceRectangle.Top - (int)(item.Face.FaceRectangle.Height * ((heightScaleFactor - 1) / 1.4))); var croppedImage = await Util.GetCroppedBitmapAsync(analyzer.GetImageStreamCallback, biggerRectangle); if (croppedImage == null || biggerRectangle.Height == 0 && biggerRectangle.Width == 0) { // Couldn't get a shot of this person continue; } demographicsChanged = true; string personName = await GetDisplayTextForPersonAsync(analyzer, item); if (string.IsNullOrEmpty(personName)) { personName = item.Face.FaceAttributes.Gender; // Add the person to the list of pending identifications so we can try again on some future frames this.pendingIdentificationAttemptCount.Add(item.SimilarPersistedFace.PersistedFaceId, 1); } personInVideo = new Visitor { UniqueId = item.SimilarPersistedFace.PersistedFaceId }; this.peopleInVideo.Add(item.SimilarPersistedFace.PersistedFaceId, personInVideo); this.demographics.Visitors.Add(personInVideo); // Update the demographics stats. this.UpdateDemographics(item); VideoTrack videoTrack = new VideoTrack { Tag = item.SimilarPersistedFace.PersistedFaceId, CroppedFace = croppedImage, DisplayText = string.Format("{0}, {1}", personName, Math.Floor(item.Face.FaceAttributes.Age)), Duration = (int)this.videoPlayer.NaturalDuration.TimeSpan.TotalSeconds, }; videoTrack.Tapped += this.TimelineTapped; this.peopleListView.Children.Add(videoTrack); } // Update the timeline for this person VideoTrack track = (VideoTrack)this.peopleListView.Children.FirstOrDefault(f => (Guid)((FrameworkElement)f).Tag == item.SimilarPersistedFace.PersistedFaceId); if (track != null) { Emotion matchingEmotion = CoreUtil.FindFaceClosestToRegion(analyzer.DetectedEmotion, item.Face.FaceRectangle); if (matchingEmotion == null) { matchingEmotion = new Emotion { Scores = new Scores { Neutral = 1 } }; } track.SetVideoFrameState(frameNumber, matchingEmotion.Scores); } if (demographicsChanged) { this.ageGenderDistributionControl.UpdateData(this.demographics); } this.overallStatsControl.UpdateData(this.demographics); } debugText.Text = string.Format("Latency: {0:0}ms", (DateTime.Now - start).TotalMilliseconds); this.processedFrames.Add(frameNumber); }