/// <summary> /// Add all samples from this query response. Combine overlapping time buckets as they are encountered. /// This method IS threadsafe against itself. /// </summary> /// <param name="response"></param> public void AddMachineResponse(CounterQueryResponse response) { if (response == null) { throw new ArgumentNullException("response"); } if (response.RequestDetails != null) { lock (this.requestDetails) { this.requestDetails.AddRange(response.RequestDetails); } } if (response.Samples == null) { return; } foreach (var sample in response.Samples) { var baseSample = sample; var hashKey = this.dimensionSet.CreateKey(sample.Dimensions); var sampleTimeRange = CreateTimeRange(baseSample); var rangesToRemove = new List<TimeRange>(); SampleCombiner combiner = null; SortedList<TimeRange, SampleCombiner> aggregatedBuckets; // grab the appropriate bucket list lock (this.dataDictionary) { if (!this.dataDictionary.TryGetValue(hashKey, out aggregatedBuckets)) { aggregatedBuckets = new SortedList<TimeRange, SampleCombiner>(); this.dataDictionary.Add(hashKey, aggregatedBuckets); } } lock (aggregatedBuckets) { // The buckets are ordered by start time - thus it is safe to merge and continue to // walk forward as we cannot ever merge which requires a backwards reprocess foreach (var bucket in aggregatedBuckets) { var existingRange = bucket.Key; // did we get past the end of the range we are interested in? if (existingRange.Start > sampleTimeRange.End) { break; } if (existingRange.IntersectsWith(sampleTimeRange)) { sampleTimeRange = TimeRange.Merge(sampleTimeRange, existingRange); rangesToRemove.Add(bucket.Key); // if this is the first merge, just add this sample if (combiner == null) { combiner = bucket.Value; combiner.AddSample(sample); combiner.MachineCount += SampleCombiner.ExtractMachineCount(sample); } else { // this is a N-merge (N > 1), thus sample is already accounted for in the combiner. Merge the values combiner.Merge(bucket.Value); } } } // if there was no merge, then create a new bucket with this sample if (combiner == null) { combiner = new SampleCombiner(sample) { MachineCount = SampleCombiner.ExtractMachineCount(sample) }; } // remove the merged items and add the new item foreach (var range in rangesToRemove) { aggregatedBuckets.Remove(range); } aggregatedBuckets.Add(sampleTimeRange, combiner); } } }
public void CounterAggregatorCalculatesPercentileAfterAggregation() { var aggregator = new CounterAggregator(DefaultDimensions); var response = new CounterQueryResponse { HttpResponseCode = 200, Samples = new List<DataSample> { new DataSample { Name = "bob", StartTime = DateTime.Now.ToMillisecondTimestamp(), EndTime = DateTime.Now.ToMillisecondTimestamp(), Dimensions = new Dictionary<string, string> {{AnyDimensionName, "tacos"}}, SampleType = DataSampleType.Histogram, Histogram = new Dictionary<long, uint> {{1,1},{2,1},{3,1},{4,1},{5,1},{6,1},{7,1},{8,1},{9,1},{10,1}} } } }; // by default, we do not apply percentile filtering aggregator.AddMachineResponse(response); var defaultValue = aggregator.Samples.First(); Assert.AreEqual(DataSampleType.Histogram, defaultValue.SampleType); // now that the client asked for filtering, we calculate the 99.999% percentile (should be the max value from earlier) aggregator.ApplyPercentileCalculationAggregation(new Dictionary<string, string> {{"Percentile", "99.999"}}); var aggregatedValue = aggregator.Samples.First(); Assert.AreEqual(DataSampleType.Percentile, aggregatedValue.SampleType); Assert.AreEqual(10, aggregatedValue.PercentileValue); }
private CounterQueryResponse CreateResponse(DateTime startTime, int bucketTimeInMinutes, int deltaBetweenBucketStarts, int numBuckets, int uniqueDimensions = 1, string dimensionValue = null) { var response = new CounterQueryResponse {Samples = new List<DataSample>()}; var bucketStart = startTime; for (int dim = 0; dim < uniqueDimensions; dim++) { for (int i = 0; i < numBuckets; i++) { response.Samples.Add( new DataSample { HitCount = 1, Dimensions = new Dictionary<string, string>{{AnyDimensionName, dimensionValue ?? dim.ToString()}}, SampleType = DataSampleType.HitCount, StartTime = bucketStart.ToMillisecondTimestamp(), EndTime = bucketStart.AddMinutes(bucketTimeInMinutes) .ToMillisecondTimestamp() }); bucketStart = bucketStart.AddMinutes(deltaBetweenBucketStarts); } } return response; }
public void CounterAggregatorRejectsPerMachinePercentiles() { var aggregator = new CounterAggregator(DefaultDimensions); var response = new CounterQueryResponse { HttpResponseCode = 200, Samples = new List<DataSample> { new DataSample { Name = "bob", StartTime = DateTime.Now.ToMillisecondTimestamp(), EndTime = DateTime.Now.ToMillisecondTimestamp(), Dimensions = new Dictionary<string, string> {{AnyDimensionName, "tacos"}}, SampleType = DataSampleType.Percentile, Percentile = 40, PercentileValue = 11, } } }; Assert.Throws<ArgumentException>(() => aggregator.AddMachineResponse(response)); }