private async Task<IEnumerable<Measurement>> GetManyAveragesAsync( DateTime Start, int Count, Func<DateTime, int, DateTime> AddQuanta, Func<DateTime, Task<Measurement>> GetAverageAsync) { var results = new Measurement[Count]; for (int i = 0; i < Count; i++) { var time = AddQuanta(Start, i); results[i] = await GetAverageAsync(time); } return results; }
/// <summary> /// Gets the day averages for the given days. /// </summary> private async Task<IEnumerable<Measurement>> GetRealDayAveragesAsync(DateTime StartDay, int Count) { if (dayAverages == null) await FetchDayAveragesAsync(); // We want to compute _only_ those averages which // are not present yet in the cache, but we also // want to do that in parallel. var results = new Measurement[Count]; int regionSize = 0; for (int i = 0; i < Count; i++) { var key = StartDay.AddDays(i); if (dayAverages.TryGetValue(key, out results[i])) { if (regionSize > 0) { if (hourAverages == null) await FetchHourAveragesAsync(); int regionStart = i - regionSize; var regionStartDay = StartDay.AddDays(regionStart); await ComputeHourAveragesAsync( regionStartDay, 24 * regionSize); for (int j = regionStart; j < i; j++) { results[j] = await GetRealDayAverageAsync(StartDay.AddDays(j)); } regionSize = 0; } } else { // This item is not present in the cache. // Don't compute it just yet, though. // We'd much rather group the aggregations // together, and do them in parallel. regionSize++; } } if (regionSize > 0) { if (hourAverages == null) await FetchHourAveragesAsync(); int regionStart = Count - regionSize; var regionStartDay = StartDay.AddDays(regionStart); await ComputeHourAveragesAsync( regionStartDay, 24 * regionSize); for (int j = regionStart; j < Count; j++) { results[j] = await GetRealDayAverageAsync(StartDay.AddDays(j)); } } return results; }
/// <summary> /// Gets the hour averages for a sizeable number of hours. /// </summary> private async Task<IEnumerable<Measurement>> GetRealHourAveragesAsync(DateTime Start, int Count) { if (hourAverages == null) await FetchHourAveragesAsync(); // We want to compute _only_ those averages which // are already present in the cache. var results = new Measurement[Count]; int regionSize = 0; for (int i = 0; i < Count; i++) { var key = Start.AddHours(i); if (hourAverages.TryGetValue(key, out results[i])) { if (regionSize > 0) { int regionStart = i - regionSize; await ComputeHourAveragesAsync( Start, regionStart, regionSize, results); regionSize = 0; } } else { // This item is not present in the cache. // Don't compute it just yet, though. // We'd much rather group the aggregations // together, and do them in parallel. regionSize++; } } if (regionSize > 0) { int regionStart = Count - regionSize; await ComputeHourAveragesAsync( Start, regionStart, regionSize, results); } return results; }
/// <summary> /// Computes hour averages for a sizeable number of hours. /// </summary> private async Task<Measurement[]> ComputeHourAveragesAsync(DateTime Start, int Count) { var results = new Measurement[Count]; await ComputeHourAveragesAsync(Start, 0, Count, results); return results; }
/// <summary> /// Computes hour averages for a sizeable number of hours. /// Results are stored in the given array. /// </summary> private async Task ComputeHourAveragesAsync(DateTime Start, int Offset, int Count, Measurement[] Target) { if (Count < ParallelComputeSize) { // Don't even try to parallelize if it's not worth the effort. for (int i = 0; i < Count; i++) { Target[i + Offset] = await GetRealHourAverageAsync(Start.AddHours(i)); } return; } // Since we'll be aggregating lots of data, we'll // distribute the workload across multiple threads. int rem = Count % PrefetchSize; int iters = Count / PrefetchSize; for (int i = 0; i < iters; i++) { int startIndex = Offset + i * PrefetchSize; await FetchMeasurementsAsync(Start.AddHours(startIndex)); await ComputeInCacheHourAveragesAsync( Start, startIndex, PrefetchSize, Target); } if (rem > 0) { int startIndex = Offset + Count - rem; await FetchMeasurementsAsync(Start.AddHours(startIndex)); await ComputeInCacheHourAveragesAsync( Start, startIndex, rem, Target); } // Update the hour-averages dictionary. for (int i = 0; i < Count; i++) { var item = Target[i + Offset]; hourAverages[item.Time] = item; } }
private Task ComputeInCacheHourAveragesAsync( DateTime Start, int Offset, int Count, Measurement[] Target) { int taskCount = Count / ParallelComputeSize; int rem = Count % ParallelComputeSize; var tasks = new Task[taskCount + (rem > 0 ? 1 : 0)]; for (int i = 0; i < taskCount; i++) { int startIndex = Offset + i * ParallelComputeSize; tasks[i] = Task.Run(() => ComputeInCacheHourAverages( Start, startIndex, ParallelComputeSize, Target)); } if (rem > 0) { int startIndex = Offset + taskCount * ParallelComputeSize; tasks[taskCount] = Task.Run(() => ComputeInCacheHourAverages( Start, startIndex, rem, Target)); } return Task.WhenAll(tasks); }
/// <summary> /// Computes the given number of /// </summary> private void ComputeInCacheHourAverages( DateTime Start, int Offset, int Count, Measurement[] Target) { for (int i = 0; i < Count; i++) { var hour = Start.AddHours(i + Offset); Target[i + Offset] = MeasurementAggregation.Aggregate(hourData[hour], SensorId, hour); } }