public IEnumerable <int> PeekInternal(IFilter filter, int peekCount, CancellationToken token) { this.isInProgress = true; var lastReportedProgress = 0; yield return(lastReportedProgress); lastReportedProgress += 20; int[] lastPercents = new int[this.LogFiles.Count]; var merged = HeapMerger.Merge( token, Comparer <FullCosmosDataItem> .Create((d1, d2) => d1.Item.Time.CompareTo(d2.Item.Time)), this.LogFiles.Cast <IEnumerable <FullCosmosDataItem> >().ToArray()); for (var i = 0; i < this.LogFiles.Count; i++) { var fileIndex = this.files.Put(this.LogFiles[i].FileName); this.FileMetaData.Put(new FileCompressMetaData()); Debug.Assert(fileIndex == i, $"The file index {fileIndex} doesn't equal to i {i}"); } var count = 0; foreach (var item in merged) { if (token.IsCancellationRequested) { yield break; } if (++count > peekCount && peekCount >= 0) { yield break; } if (filter.Match(item.Item.Item, item.Item.Template)) { item.Item.Item.FileIndex = item.SourceIndex; this.AddItem(item.Item); yield break; } lastPercents[item.SourceIndex] = item.Item.Percent; var totalPercent = (int)lastPercents.Average(); if (totalPercent < lastReportedProgress) { continue; } yield return(lastReportedProgress); lastReportedProgress += 20; } }
private IEnumerable <int> LoadInternal(IFilter filter, CancellationToken token) { var lastReportedProgress = 0; yield return(lastReportedProgress); if (this.LogFiles.Count == 0) { yield return(100); yield break; } // 1 file split to 5 steps. n file to 5 * n. var reportInterval = Math.Max(1, 100 / (this.LogFiles.Count * 5)); var firstReportCount = 100; int count = 0; lastReportedProgress += reportInterval; int[] lastPercents = new int[this.LogFiles.Count]; IEnumerable <MergedItem <FullCosmosDataItem> > merged; if (filter == null) { merged = HeapMerger.Merge( token, Comparer <FullCosmosDataItem> .Create((d1, d2) => d1.Item.Time.CompareTo(d2.Item.Time)), this.LogFiles.Cast <IEnumerable <FullCosmosDataItem> >().ToArray()); } else { merged = HeapMerger.Merge( token, Comparer <FullCosmosDataItem> .Create((d1, d2) => d1.Item.Time.CompareTo(d2.Item.Time)), this.LogFiles.Select(f => f.TakeWhile(i => !token.IsCancellationRequested) .Where(i => filter.Match(i.Item, i.Template))).ToArray()); } for (var i = 0; i < this.LogFiles.Count; i++) { var fileIndex = this.files.Put(this.LogFiles[i].FileName); this.FileMetaData.Put(new FileCompressMetaData()); Debug.Assert(fileIndex == i, $"The file index {fileIndex} doesn't equal to i {i}"); } // each time we iterate through the merged, it will refresh all files underneath and load in new InternalItems; foreach (var item in merged) { if (token.IsCancellationRequested) { yield break; } count++; item.Item.Item.FileIndex = item.SourceIndex; this.AddItem(item.Item); lastPercents[item.SourceIndex] = item.Item.Percent; var totalPercent = (int)lastPercents.Average(); if (totalPercent < lastReportedProgress && count != firstReportCount) { continue; } yield return(lastReportedProgress); lastReportedProgress += reportInterval; } yield return(100); }