private async Task ProcessObjectDetectionInsightsAsync(ImageAnalyzer analyzer, int frameNumber)
        {
            this.detectedObjectsInFrame.Add(frameNumber, analyzer);

            foreach (var detectedObject in analyzer.DetectedObjects)
            {
                if (this.detectedObjectsInVideo.ContainsKey(detectedObject.ObjectProperty))
                {
                    this.detectedObjectsInVideo[detectedObject.ObjectProperty]++;
                }
                else
                {
                    this.detectedObjectsInVideo[detectedObject.ObjectProperty] = 1;

                    ImageSource croppedContent = await Util.GetCroppedBitmapAsync(analyzer.GetImageStreamCallback,
                                                                                  new Microsoft.Azure.CognitiveServices.Vision.Face.Models.FaceRectangle
                    {
                        Left   = detectedObject.Rectangle.X,
                        Top    = detectedObject.Rectangle.Y,
                        Width  = detectedObject.Rectangle.W,
                        Height = detectedObject.Rectangle.H
                    });

                    BitmapImage frameBitmap = new BitmapImage();
                    await frameBitmap.SetSourceAsync((await analyzer.GetImageStreamCallback()).AsRandomAccessStream());

                    VideoTrack videoTrack = new VideoTrack
                    {
                        Tag         = detectedObject.ObjectProperty,
                        CroppedFace = croppedContent,
                        DisplayText = detectedObject.ObjectProperty,
                        Duration    = (int)this.videoPlayer.NaturalDuration.TimeSpan.TotalSeconds,
                    };

                    videoTrack.Tapped += this.TimelineTapped;
                    this.detectedObjectsListView.Children.Insert(0, videoTrack);

                    this.FilterDetectedObjectTimeline();
                }

                // Update the timeline for this tag
                VideoTrack track = (VideoTrack)this.detectedObjectsListView.Children.FirstOrDefault(f => (string)((FrameworkElement)f).Tag == detectedObject.ObjectProperty);
                if (track != null)
                {
                    track.SetVideoFrameState(frameNumber, new Emotion {
                        Neutral = 1
                    }, analyzer);

                    uint childIndex = (uint)this.detectedObjectsListView.Children.IndexOf(track);
                    if (childIndex > 5)
                    {
                        // Bring towards the top so it becomes visible
                        this.detectedObjectsListView.Children.Move(childIndex, 5);
                    }
                }
            }

            this.UpdateObjectDetectionFilters();
        }
        private async Task ProcessVisualFeaturesInsightsAsync(ImageAnalyzer analyzer, int frameNumber)
        {
            var tags = analyzer.AnalysisResult.Tags;

            if (!ShowAgeAndGender)
            {
                tags = tags.Where(t => !Util.ContainsGenderRelatedKeyword(t.Name)).ToList();
            }

            foreach (var tag in tags)
            {
                if (this.tagsInVideo.ContainsKey(tag.Name))
                {
                    this.tagsInVideo[tag.Name]++;
                }
                else
                {
                    this.tagsInVideo[tag.Name] = 1;

                    BitmapImage frameBitmap = new BitmapImage();
                    await frameBitmap.SetSourceAsync((await analyzer.GetImageStreamCallback()).AsRandomAccessStream());

                    VideoTrack videoTrack = new VideoTrack
                    {
                        Tag         = tag.Name,
                        CroppedFace = frameBitmap,
                        DisplayText = tag.Name,
                        Duration    = (int)this.videoPlayer.NaturalDuration.TimeSpan.TotalSeconds,
                    };

                    videoTrack.Tapped += this.TimelineTapped;
                    this.tagsListView.Children.Insert(0, videoTrack);

                    this.FilterFeatureTimeline();
                }

                // Update the timeline for this tag
                VideoTrack track = (VideoTrack)this.tagsListView.Children.FirstOrDefault(f => (string)((FrameworkElement)f).Tag == tag.Name);
                if (track != null)
                {
                    track.SetVideoFrameState(frameNumber, new Emotion {
                        Neutral = 1
                    });

                    uint childIndex = (uint)this.tagsListView.Children.IndexOf(track);
                    if (childIndex > 5)
                    {
                        // Bring towards the top so it becomes visible
                        this.tagsListView.Children.Move(childIndex, 5);
                    }
                }
            }

            this.UpdateTagFilters();
        }
        private async Task ProcessPeopleInsightsAsync(ImageAnalyzer analyzer, int frameNumber)
        {
            foreach (var item in analyzer.SimilarFaceMatches)
            {
                bool    demographicsChanged = false;
                Visitor personInVideo;
                Guid    persistedFaceId = item.SimilarPersistedFace.PersistedFaceId.GetValueOrDefault();
                if (this.peopleInVideo.TryGetValue(persistedFaceId, out personInVideo))
                {
                    personInVideo.Count++;

                    if (this.pendingIdentificationAttemptCount.ContainsKey(persistedFaceId))
                    {
                        // This is a face we haven't identified yet. See how many times we have tried it, if we need to do it again or stop trying
                        if (this.pendingIdentificationAttemptCount[persistedFaceId] <= 5)
                        {
                            string personName = GetDisplayTextForPersonAsync(analyzer, item);
                            if (string.IsNullOrEmpty(personName))
                            {
                                // Increment the times we have tried and failed to identify this person
                                this.pendingIdentificationAttemptCount[persistedFaceId]++;
                            }
                            else
                            {
                                // Bingo! Let's remove it from the list of pending identifications
                                this.pendingIdentificationAttemptCount.Remove(persistedFaceId);

                                VideoTrack existingTrack = (VideoTrack)this.peopleListView.Children.FirstOrDefault(f => (Guid)((FrameworkElement)f).Tag == persistedFaceId);
                                if (existingTrack != null)
                                {
                                    existingTrack.DisplayText = ShowAgeAndGender ?
                                                                string.Format("{0}, {1}", personName, Math.Floor(item.Face.FaceAttributes.Age.GetValueOrDefault())) :
                                                                personName;
                                }
                            }
                        }
                        else
                        {
                            // Give up
                            this.pendingIdentificationAttemptCount.Remove(persistedFaceId);
                        }
                    }
                }
                else
                {
                    // New person... let's catalog it.

                    // Crop the face, enlarging the rectangle so we frame it better
                    double heightScaleFactor = 1.8;
                    double widthScaleFactor  = 1.8;
                    var    biggerRectangle   = new Microsoft.Azure.CognitiveServices.Vision.Face.Models.FaceRectangle
                    {
                        Height = Math.Min((int)(item.Face.FaceRectangle.Height * heightScaleFactor), FrameRelayVideoEffect.LatestSoftwareBitmap.PixelHeight),
                        Width  = Math.Min((int)(item.Face.FaceRectangle.Width * widthScaleFactor), FrameRelayVideoEffect.LatestSoftwareBitmap.PixelWidth)
                    };
                    biggerRectangle.Left = Math.Max(0, item.Face.FaceRectangle.Left - (int)(item.Face.FaceRectangle.Width * ((widthScaleFactor - 1) / 2)));
                    biggerRectangle.Top  = Math.Max(0, item.Face.FaceRectangle.Top - (int)(item.Face.FaceRectangle.Height * ((heightScaleFactor - 1) / 1.4)));

                    var croppedImage = await Util.GetCroppedBitmapAsync(analyzer.GetImageStreamCallback, biggerRectangle.ToRect());

                    if (croppedImage == null || biggerRectangle.Height == 0 && biggerRectangle.Width == 0)
                    {
                        // Couldn't get a shot of this person
                        continue;
                    }

                    demographicsChanged = true;

                    string personName = GetDisplayTextForPersonAsync(analyzer, item);
                    if (string.IsNullOrEmpty(personName))
                    {
                        if (ShowAgeAndGender)
                        {
                            personName = item.Face.FaceAttributes.Gender?.ToString();
                        }

                        // Add the person to the list of pending identifications so we can try again on some future frames
                        this.pendingIdentificationAttemptCount.Add(persistedFaceId, 1);
                    }

                    personInVideo = new Visitor {
                        UniqueId = persistedFaceId
                    };
                    this.peopleInVideo.Add(persistedFaceId, personInVideo);
                    this.demographics.Visitors.Add(personInVideo);

                    // Update the demographics stats.
                    this.UpdateDemographics(item);

                    VideoTrack videoTrack = new VideoTrack
                    {
                        Tag         = persistedFaceId,
                        CroppedFace = croppedImage,
                        DisplayText = ShowAgeAndGender ? string.Format("{0}, {1}", personName, Math.Floor(item.Face.FaceAttributes.Age.GetValueOrDefault())) : personName,
                        Duration    = (int)this.videoPlayer.NaturalDuration.TimeSpan.TotalSeconds,
                    };

                    videoTrack.Tapped += this.TimelineTapped;

                    this.peopleListView.Children.Insert(0, videoTrack);
                }

                // Update the timeline for this person
                VideoTrack track = (VideoTrack)this.peopleListView.Children.FirstOrDefault(f => (Guid)((FrameworkElement)f).Tag == persistedFaceId);
                if (track != null)
                {
                    track.SetVideoFrameState(frameNumber, item.Face.FaceAttributes.Emotion);

                    uint childIndex = (uint)this.peopleListView.Children.IndexOf(track);
                    if (childIndex > 5)
                    {
                        // Bring to towards the top so it becomes visible
                        this.peopleListView.Children.Move(childIndex, 5);
                    }
                }

                if (demographicsChanged)
                {
                    this.ageGenderDistributionControl.UpdateData(this.demographics);
                }

                this.overallStatsControl.UpdateData(this.demographics);
            }
        }
        private async Task ProcessCurrentVideoFrame()
        {
            int frameNumber = (int)this.videoPlayer.Position.TotalSeconds;

            if (this.processedFrames.Contains(frameNumber))
            {
                return;
            }

            Guid videoIdBeforeProcessing = this.currentVideoId;

            var analyzer = new ImageAnalyzer(await Util.GetPixelBytesFromSoftwareBitmapAsync(FrameRelayVideoEffect.LatestSoftwareBitmap));

            DateTime start = DateTime.Now;

            // Compute Emotion, Age and Gender
            await Task.WhenAll(analyzer.DetectEmotionAsync(), analyzer.DetectFacesAsync(detectFaceAttributes: true));

            // Compute Face Identification and Unique Face Ids
            await Task.WhenAll(analyzer.IdentifyFacesAsync(), analyzer.FindSimilarPersistedFacesAsync());

            foreach (var item in analyzer.SimilarFaceMatches)
            {
                if (videoIdBeforeProcessing != this.currentVideoId)
                {
                    // Media source changed while we were processing. Make sure we are in a clear state again.
                    await this.ResetStateAsync();

                    break;
                }

                bool    demographicsChanged = false;
                Visitor personInVideo;
                if (this.peopleInVideo.TryGetValue(item.SimilarPersistedFace.PersistedFaceId, out personInVideo))
                {
                    personInVideo.Count++;

                    if (this.pendingIdentificationAttemptCount.ContainsKey(item.SimilarPersistedFace.PersistedFaceId))
                    {
                        // This is a face we haven't identified yet. See how many times we have tried it, if we need to do it again or stop trying
                        if (this.pendingIdentificationAttemptCount[item.SimilarPersistedFace.PersistedFaceId] <= 5)
                        {
                            string personName = await GetDisplayTextForPersonAsync(analyzer, item);

                            if (string.IsNullOrEmpty(personName))
                            {
                                // Increment the times we have tried and failed to identify this person
                                this.pendingIdentificationAttemptCount[item.SimilarPersistedFace.PersistedFaceId]++;
                            }
                            else
                            {
                                // Bingo! Let's remove it from the list of pending identifications
                                this.pendingIdentificationAttemptCount.Remove(item.SimilarPersistedFace.PersistedFaceId);

                                VideoTrack existingTrack = (VideoTrack)this.peopleListView.Children.FirstOrDefault(f => (Guid)((FrameworkElement)f).Tag == item.SimilarPersistedFace.PersistedFaceId);
                                if (existingTrack != null)
                                {
                                    existingTrack.DisplayText = string.Format("{0}, {1}", personName, Math.Floor(item.Face.FaceAttributes.Age));
                                }
                            }
                        }
                        else
                        {
                            // Give up
                            this.pendingIdentificationAttemptCount.Remove(item.SimilarPersistedFace.PersistedFaceId);
                        }
                    }
                }
                else
                {
                    // New person... let's catalog it.

                    // Crop the face, enlarging the rectangle so we frame it better
                    double    heightScaleFactor = 1.8;
                    double    widthScaleFactor  = 1.8;
                    Rectangle biggerRectangle   = new Rectangle
                    {
                        Height = Math.Min((int)(item.Face.FaceRectangle.Height * heightScaleFactor), FrameRelayVideoEffect.LatestSoftwareBitmap.PixelHeight),
                        Width  = Math.Min((int)(item.Face.FaceRectangle.Width * widthScaleFactor), FrameRelayVideoEffect.LatestSoftwareBitmap.PixelWidth)
                    };
                    biggerRectangle.Left = Math.Max(0, item.Face.FaceRectangle.Left - (int)(item.Face.FaceRectangle.Width * ((widthScaleFactor - 1) / 2)));
                    biggerRectangle.Top  = Math.Max(0, item.Face.FaceRectangle.Top - (int)(item.Face.FaceRectangle.Height * ((heightScaleFactor - 1) / 1.4)));

                    var croppedImage = await Util.GetCroppedBitmapAsync(analyzer.GetImageStreamCallback, biggerRectangle);

                    if (croppedImage == null || biggerRectangle.Height == 0 && biggerRectangle.Width == 0)
                    {
                        // Couldn't get a shot of this person
                        continue;
                    }

                    demographicsChanged = true;

                    string personName = await GetDisplayTextForPersonAsync(analyzer, item);

                    if (string.IsNullOrEmpty(personName))
                    {
                        personName = item.Face.FaceAttributes.Gender;

                        // Add the person to the list of pending identifications so we can try again on some future frames
                        this.pendingIdentificationAttemptCount.Add(item.SimilarPersistedFace.PersistedFaceId, 1);
                    }

                    personInVideo = new Visitor {
                        UniqueId = item.SimilarPersistedFace.PersistedFaceId
                    };
                    this.peopleInVideo.Add(item.SimilarPersistedFace.PersistedFaceId, personInVideo);
                    this.demographics.Visitors.Add(personInVideo);

                    // Update the demographics stats.
                    this.UpdateDemographics(item);

                    VideoTrack videoTrack = new VideoTrack
                    {
                        Tag         = item.SimilarPersistedFace.PersistedFaceId,
                        CroppedFace = croppedImage,
                        DisplayText = string.Format("{0}, {1}", personName, Math.Floor(item.Face.FaceAttributes.Age)),
                        Duration    = (int)this.videoPlayer.NaturalDuration.TimeSpan.TotalSeconds,
                    };

                    videoTrack.Tapped += this.TimelineTapped;

                    this.peopleListView.Children.Add(videoTrack);
                }

                // Update the timeline for this person
                VideoTrack track = (VideoTrack)this.peopleListView.Children.FirstOrDefault(f => (Guid)((FrameworkElement)f).Tag == item.SimilarPersistedFace.PersistedFaceId);
                if (track != null)
                {
                    Emotion matchingEmotion = CoreUtil.FindFaceClosestToRegion(analyzer.DetectedEmotion, item.Face.FaceRectangle);
                    if (matchingEmotion == null)
                    {
                        matchingEmotion = new Emotion {
                            Scores = new Scores {
                                Neutral = 1
                            }
                        };
                    }

                    track.SetVideoFrameState(frameNumber, matchingEmotion.Scores);
                }

                if (demographicsChanged)
                {
                    this.ageGenderDistributionControl.UpdateData(this.demographics);
                }

                this.overallStatsControl.UpdateData(this.demographics);
            }

            debugText.Text = string.Format("Latency: {0:0}ms", (DateTime.Now - start).TotalMilliseconds);

            this.processedFrames.Add(frameNumber);
        }