private ITimeSeriesAggregation[] TimeSeriesAggregationFactory() { var bucket = new ITimeSeriesAggregation[_types.Length]; for (int i = 0; i < _types.Length; i++) { var type = _types[i]; var name = _names?[i]; switch (type) { case AggregationType.Average: bucket[i] = new AverageAggregation(name); continue; case AggregationType.Percentile: Debug.Assert(_percentile.HasValue, $"Invalid {nameof(AggregationType.Percentile)} aggregation method. 'percentile' argument has no value"); bucket[i] = new PercentileAggregation(name, _percentile.Value); continue; case AggregationType.Slope: bucket[i] = new SlopeAggregation(name); continue; case AggregationType.StandardDeviation: bucket[i] = new StandardDeviationAggregation(name); continue; default: bucket[i] = new TimeSeriesAggregation(type, name); break; } } return(bucket); }
public static List <SingleResult> GetAggregatedValues(TimeSeriesReader reader, RangeGroup rangeSpec, AggregationMode mode) { var aggStates = new TimeSeriesAggregation(mode); // we always will aggregate here by Min, Max, First, Last, Sum, Count, Mean var results = new List <SingleResult>(); foreach (var it in reader.SegmentsOrValues()) { if (it.IndividualValues != null) { AggregateIndividualItems(it.IndividualValues); } else { //We might need to close the old aggregation range and start a new one MaybeMoveToNextRange(it.Segment.Start); // now we need to see if we can consume the whole segment, or // if the range it cover needs to be broken up to multiple ranges. // For example, if the segment covers 3 days, but we have group by 1 hour, // we still have to deal with the individual values if (it.Segment.End > rangeSpec.End) { AggregateIndividualItems(it.Segment.Values); } else { var span = it.Segment.Summary.SegmentValues.Span; aggStates.Segment(span); } } } if (aggStates.Any) { var result = new SingleResult { Timestamp = rangeSpec.Start, Values = new Memory <double>(aggStates.Values.ToArray()), Status = TimeSeriesValuesSegment.Live, Type = SingleResultType.RolledUp // TODO: Tag = "" }; TimeSeriesStorage.AssertNoNanValue(result); results.Add(result); } return(results); void MaybeMoveToNextRange(DateTime ts) { if (rangeSpec.WithinRange(ts)) { return; } if (aggStates.Any) { var result = new SingleResult { Timestamp = rangeSpec.Start, Values = new Memory <double>(aggStates.Values.ToArray()), Status = TimeSeriesValuesSegment.Live, Type = SingleResultType.RolledUp // TODO: Tag = "" }; TimeSeriesStorage.AssertNoNanValue(result); results.Add(result); } rangeSpec.MoveToNextRange(ts); aggStates.Init(); } void AggregateIndividualItems(IEnumerable <SingleResult> items) { foreach (var cur in items) { if (cur.Status == TimeSeriesValuesSegment.Dead) { continue; } MaybeMoveToNextRange(cur.Timestamp); aggStates.Step(cur.Values.Span); } } }