private async Task AddImageInsightsToViewModel(StorageFolder rootFolder, ImageInsights insights) { // Load image from file BitmapImage bitmapImage = new BitmapImage(); await bitmapImage.SetSourceAsync((await(await rootFolder.GetFileAsync(insights.ImageId)).OpenStreamForReadAsync()).AsRandomAccessStream()); bitmapImage.DecodePixelHeight = 360; // Create the view models ImageInsightsViewModel insightsViewModel = new ImageInsightsViewModel(insights, bitmapImage); this.AllResults.Add(insightsViewModel); this.FilteredResults.Add(insightsViewModel); foreach (var tag in insights.VisionInsights.Tags) { TagFilterViewModel tvm = this.TagFilters.FirstOrDefault(t => t.Tag == tag); if (tvm == null) { tvm = new TagFilterViewModel(tag); this.TagFilters.Add(tvm); } tvm.Count++; } foreach (var faceInsights in insights.FaceInsights) { FaceFilterViewModel fvm = this.FaceFilters.FirstOrDefault(f => f.FaceId == faceInsights.UniqueFaceId); if (fvm == null) { StorageFile file = (await rootFolder.GetFileAsync(insights.ImageId)); ImageSource croppedFaced = await Util.GetCroppedBitmapAsync( file.OpenStreamForReadAsync, new FaceRectangle { Height = faceInsights.FaceRectangle.Height, Width = faceInsights.FaceRectangle.Width, Left = faceInsights.FaceRectangle.Left, Top = faceInsights.FaceRectangle.Top }); fvm = new FaceFilterViewModel(faceInsights.UniqueFaceId, croppedFaced); this.FaceFilters.Add(fvm); } fvm.Count++; } var distinctEmotions = insights.FaceInsights.Select(f => f.TopEmotion).Distinct(); foreach (var emotion in distinctEmotions) { EmotionFilterViewModel evm = this.EmotionFilters.FirstOrDefault(f => f.Emotion == emotion); if (evm == null) { evm = new EmotionFilterViewModel(emotion); this.EmotionFilters.Add(evm); } evm.Count++; } }
public static async Task <ImageInsights> ProcessImageAsync(Func <Task <Stream> > imageStream, string imageId) { ImageAnalyzer analyzer = new ImageAnalyzer(imageStream); analyzer.ShowDialogOnFaceApiErrors = true; // trigger vision, face and emotion requests await Task.WhenAll(analyzer.AnalyzeImageAsync(null, visualFeatures: DefaultVisualFeatureTypes), analyzer.DetectFacesAsync(detectFaceAttributes: true)); // trigger face match against previously seen faces await analyzer.FindSimilarPersistedFacesAsync(); ImageInsights result = new ImageInsights { ImageId = imageId }; // assign computer vision results result.VisionInsights = new VisionInsights { Caption = analyzer.AnalysisResult.Description?.Captions.FirstOrDefault()?.Text, Tags = analyzer.AnalysisResult.Tags != null?analyzer.AnalysisResult.Tags.Select(t => t.Name).ToArray() : new string[0] }; // assign face api and emotion api results List <FaceInsights> faceInsightsList = new List <FaceInsights>(); foreach (var face in analyzer.DetectedFaces) { FaceInsights faceInsights = new FaceInsights { FaceRectangle = face.FaceRectangle, Age = face.FaceAttributes.Age.GetValueOrDefault(), Gender = face.FaceAttributes.Gender?.ToString() ?? string.Empty, TopEmotion = Util.EmotionToRankedList(face.FaceAttributes.Emotion).First().Key }; SimilarFaceMatch similarFaceMatch = analyzer.SimilarFaceMatches.FirstOrDefault(s => s.Face.FaceId == face.FaceId); if (similarFaceMatch != null) { faceInsights.UniqueFaceId = similarFaceMatch.SimilarPersistedFace.PersistedFaceId.GetValueOrDefault(); } faceInsightsList.Add(faceInsights); } result.FaceInsights = faceInsightsList.ToArray(); return(result); }
private async Task ProcessImagesAsync(StorageFolder rootFolder, bool forceProcessing = false) { this.progressRing.IsActive = true; this.landingMessage.Visibility = Visibility.Collapsed; this.filterTab.Visibility = Visibility.Visible; this.reprocessImagesButton.IsEnabled = true; this.FilteredResults.Clear(); this.AllResults.Clear(); this.TagFilters.Clear(); this.EmotionFilters.Clear(); this.FaceFilters.Clear(); List <ImageInsights> insightsList = new List <ImageInsights>(); if (!forceProcessing) { // see if we have pre-computed results and if so load it from the json file try { StorageFile insightsResultFile = (await rootFolder.TryGetItemAsync("ImageInsights.json")) as StorageFile; if (insightsResultFile != null) { using (StreamReader reader = new StreamReader(await insightsResultFile.OpenStreamForReadAsync())) { insightsList = JsonConvert.DeserializeObject <List <ImageInsights> >(await reader.ReadToEndAsync()); foreach (var insights in insightsList) { await AddImageInsightsToViewModel(rootFolder, insights); } } } } catch { // We will just compute everything again in case of errors } } if (!insightsList.Any()) { // start with fresh face lists await FaceListManager.ResetFaceLists(); // enumerate through the images and extract the insights QueryOptions fileQueryOptions = new QueryOptions(CommonFileQuery.DefaultQuery, new[] { ".png", ".jpg", ".bmp", ".jpeg", ".gif" }); StorageFileQueryResult queryResult = rootFolder.CreateFileQueryWithOptions(fileQueryOptions); var queryFileList = this.limitProcessingToggleButton.IsChecked.Value ? await queryResult.GetFilesAsync(0, 50) : await queryResult.GetFilesAsync(); foreach (var item in queryFileList) { // Resize (if needed) in order to reduce network latency. Then store the result in a temporary file. StorageFile resizedFile = await ApplicationData.Current.TemporaryFolder.CreateFileAsync("ImageCollectionInsights.jpg", CreationCollisionOption.GenerateUniqueName); var resizeTransform = await Util.ResizePhoto(await item.OpenStreamForReadAsync(), 720, resizedFile); // Send the file for processing ImageInsights insights = await ImageProcessor.ProcessImageAsync(resizedFile.OpenStreamForReadAsync, item.Name); // Delete resized file await resizedFile.DeleteAsync(); // Adjust all FaceInsights coordinates based on the transform function between the original and resized photos foreach (var faceInsight in insights.FaceInsights) { faceInsight.FaceRectangle.Left = (int)(faceInsight.FaceRectangle.Left * resizeTransform.Item1); faceInsight.FaceRectangle.Top = (int)(faceInsight.FaceRectangle.Top * resizeTransform.Item2); faceInsight.FaceRectangle.Width = (int)(faceInsight.FaceRectangle.Width * resizeTransform.Item1); faceInsight.FaceRectangle.Height = (int)(faceInsight.FaceRectangle.Height * resizeTransform.Item2); } insightsList.Add(insights); await AddImageInsightsToViewModel(rootFolder, insights); } // save to json StorageFile jsonFile = await rootFolder.CreateFileAsync("ImageInsights.json", CreationCollisionOption.ReplaceExisting); using (StreamWriter writer = new StreamWriter(await jsonFile.OpenStreamForWriteAsync())) { string jsonStr = JsonConvert.SerializeObject(insightsList, Formatting.Indented); await writer.WriteAsync(jsonStr); } } List <TagFilterViewModel> tagsGroupedByCountAndSorted = new List <TagFilterViewModel>(); foreach (var group in this.TagFilters.GroupBy(t => t.Count).OrderByDescending(g => g.Key)) { tagsGroupedByCountAndSorted.AddRange(group.OrderBy(t => t.Tag)); } this.TagFilters.Clear(); this.TagFilters.AddRange(tagsGroupedByCountAndSorted); var sortedEmotions = this.EmotionFilters.OrderByDescending(e => e.Count).ToArray(); this.EmotionFilters.Clear(); this.EmotionFilters.AddRange(sortedEmotions); var sortedFaces = this.FaceFilters.OrderByDescending(f => f.Count).ToArray(); this.FaceFilters.Clear(); this.FaceFilters.AddRange(sortedFaces); this.progressRing.IsActive = false; }
public ImageInsightsViewModel(ImageInsights insights, ImageSource imageSource) { this.Insights = insights; this.ImageSource = imageSource; }