/// <summary> /// Safely write a value to a HistogramCounter using the the current time. /// If the counter is null no operation will be performed. /// </summary> /// <param name="counter"></param> /// <param name="value"></param> /// <param name="dims"></param> public static void SafeAddValue(this HistogramCounter counter, long value, DimensionSpecification dims) { if (counter != null) { counter.AddValue(value, dims, DateTime.Now); } }
public IEnumerable <DataSample> QueryData(DimensionSpecification filterDims, QuerySpecification querySpec) { Events.Write.BeginQueryData(this, filterDims, querySpec); if (filterDims == null) { throw new ArgumentNullException("filterDims"); } // EndQueryData has to be called in the data enumerators because the enumeration does not occur // until iteration begins on the IEnumerable we return. if (querySpec.QueryType != QueryType.Normal && typeof(TInternal) != typeof(InternalHistogram)) { throw new NotSupportedException("Cannot get percentiles for non-histograms."); } switch (querySpec.QueryType) { case QueryType.Normal: { return(querySpec.Combine ? this.PopulateCombinedDataSamples(filterDims, PopulateNormalSample, querySpec) : this.PopulateBucketedDataSamples(filterDims, PopulateNormalSample, querySpec)); } case QueryType.Percentile: { CheckPercentileValue(querySpec.Percentile); return(querySpec.Combine ? this.PopulateCombinedDataSamples(filterDims, PopulatePercentileSample, querySpec) : this.PopulateBucketedDataSamples(filterDims, PopulatePercentileSample, querySpec)); } case QueryType.Average: { return(querySpec.Combine ? this.PopulateCombinedDataSamples(filterDims, PopulateAverageSample, querySpec) : this.PopulateBucketedDataSamples(filterDims, PopulateAverageSample, querySpec)); } case QueryType.Maximum: { return(querySpec.Combine ? this.PopulateCombinedDataSamples(filterDims, PopulateMaximumSample, querySpec) : this.PopulateBucketedDataSamples(filterDims, PopulateMaximumSample, querySpec)); } case QueryType.Minimum: { return(querySpec.Combine ? this.PopulateCombinedDataSamples(filterDims, PopulateMinimumSample, querySpec) : this.PopulateBucketedDataSamples(filterDims, PopulateMinimumSample, querySpec)); } default: throw new NotSupportedException("Unknown query type " + querySpec.QueryType); } }
private IEnumerable <DataSample> PopulateBucketedDataSamples(DimensionSpecification filterDims, Action <DataSample, TInternal, QuerySpecification> sampleAction, QuerySpecification querySpec) { using (SharedLock.OpenShared(this.dataLock)) { var bucketQuery = new BucketQuery(this, filterDims); foreach (var bucket in bucketQuery) { foreach (var match in (querySpec.IsCrossQuery ? bucket.GetMatchesSplitByDimension(filterDims, querySpec.CrossQueryDimension) : bucket.GetMatches(filterDims))) { if (match.DataCount == 0) { continue; } var sample = new DataSample { Name = this.Name, Dimensions = match.DimensionValues.Data, StartTime = bucket.StartTime.ToMillisecondTimestamp(), EndTime = bucket.EndTime.ToMillisecondTimestamp(), }; sampleAction(sample, match.Data, querySpec); yield return(sample); } } } Events.Write.EndQueryData(this); }
/// <summary> /// Safely increment a HitCounter by the given amount using the current time. /// If the counter is null no operation will be performed. /// </summary> /// <param name="counter">Counter to increment.</param> /// <param name="amount">Amount to increment by.</param> /// <param name="dims">Dimensions to use for incrementing.</param> public static void SafeIncrement(this HitCounter counter, long amount, DimensionSpecification dims) { if (counter != null) { counter.Increment(amount, dims, DateTime.Now); } }
/// <summary> /// Safely increment a HitCounter by one using the given timestamp. /// If the counter is null no operation will be performed. /// </summary> /// <param name="counter">Counter to increment.</param> /// <param name="dims">Dimensions to use for incrementing.</param> /// <param name="timestamp">Timestamp for written value.</param> public static void SafeIncrement(this HitCounter counter, DimensionSpecification dims, DateTime timestamp) { if (counter != null) { counter.Increment(1, dims, timestamp); } }
/// <summary> /// Get all values matching the provided dimensions. /// </summary> /// <param name="dims">Dimensions to filter with. Not all dimensions are required.</param> /// <returns>An enumeration of resulting matches. The enumeration will always be one item long.</returns> public IEnumerable <MatchResult> GetMatches(DimensionSpecification dims) { this.Pin(); try { var key = this.DimensionSet.CreateKey(dims); // If we have no data simply provide an empty match. if (this.data == null || this.data.Count == 0) { return(new[] { new MatchResult(dims, string.Empty) { Data = null, } }); } else { var result = new MatchResult(dims, string.Empty); foreach (var kvp in this.data.GetMatchingPairs(key)) { result.UpdateData(kvp.Value); } return(new[] { result }); } } finally { this.Unpin(); } }
internal MatchResult(DimensionSpecification dims, string splitValue) { this.DimensionValues = new DimensionSpecification(dims); this.SplitDimensionValue = splitValue; this.Data = null; this.DataCount = 0; }
/// <summary> /// Get all values for a particular dimension. /// </summary> /// <param name="dimensionName">Name of the dimension.</param> /// <param name="filterDims">Filter dimensions</param> /// <returns>An enumerator of values for the dimension (may be empty).</returns> public IEnumerable <string> GetDimensionValues(string dimensionName, DimensionSpecification filterDims) { if (filterDims == null) { throw new ArgumentNullException("filterDims"); } return(this.DataSet.GetDimensionValues(dimensionName, filterDims)); }
private void GetTimesFromDimensions(DimensionSpecification dims, out DateTime start, out DateTime end) { start = DateTime.MinValue; end = DateTime.MaxValue; var userSpecifiedStartTime = false; var userSpecifiedEndTime = false; string time; if (dims.TryGetValue(ReservedDimensions.StartTimeDimension, out time)) { start = DateTime.Parse(time).ToUniversalTime(); userSpecifiedStartTime = true; } else if (this.data.Count > 0) { // Find the oldest data we have loaded. DateTime earliest = this.data.Values[0].StartTime; for (var i = 0; i < this.data.Count; ++i) { var bucket = this.data.Values[i]; if (!bucket.Loaded) { break; } earliest = bucket.StartTime; } start = earliest; } if (dims.TryGetValue(ReservedDimensions.EndTimeDimension, out time)) { end = DateTime.Parse(time).ToUniversalTime(); userSpecifiedEndTime = true; } else if (this.data.Count > 0) { end = this.data.Values[0].EndTime; } if (start >= end) { // if the user asked for one half of the time range [start -> inf] or [inf -> end] we will not // penalize them with an argument exception if we don't have that time bucket if (!userSpecifiedStartTime || !userSpecifiedEndTime) { // guarantee there will be no results start = DateTime.MaxValue; end = DateTime.MaxValue; return; } throw new ArgumentOutOfRangeException("start", "Start time is greater than or equal to end time."); } }
internal void BeginQueryData(IDataSet dataSet, DimensionSpecification filterDimensions, QuerySpecification querySpec) { if (this.IsEnabled(EventLevel.Verbose, EventKeywords.None)) { this.BeginQueryData(dataSet.Name, string.Join("; ", (from pair in filterDimensions select pair.Key + '=' + pair.Value)), querySpec.ToString()); } }
public void AddValue(DimensionSpecification dimensions, long value) { // We use stackalloc here because this method is used extremely frequently, and this saves a tremendous // amount of overhead in terms of short-lived garbage data. uint *keyData = stackalloc uint[this.DimensionSet.dimensions.Length]; this.DimensionSet.PopulateKeyArray(dimensions, keyData); this.AddValue(keyData, value); }
internal static DimensionSpecification ExtractQuerySpec(DimensionSpecification queryParameters, out QuerySpecification querySpec) { var filterDimensions = new DimensionSpecification(); querySpec = new QuerySpecification(); foreach (var param in queryParameters) { if (string.Equals(param.Key, ReservedDimensions.DimensionDimension, StringComparison.OrdinalIgnoreCase)) { querySpec.CrossQueryDimension = param.Value; // Ensure that split dimension is not also sent a filter dimension if (queryParameters.ContainsKey(param.Value)) { return(null); } } else if (string.Equals(param.Key, ReservedDimensions.AggregateSamplesDimension, StringComparison.OrdinalIgnoreCase)) { querySpec.Combine = Convert.ToBoolean(param.Value); } else if (string.Equals(param.Key, ReservedDimensions.PercentileDimension, StringComparison.OrdinalIgnoreCase)) { if (string.Equals(param.Value, ReservedDimensions.PercentileDimensionValueForAverage, StringComparison.OrdinalIgnoreCase)) { querySpec.QueryType = QueryType.Average; } else if (string.Equals(param.Value, ReservedDimensions.PercentileDimensionValueForMaximum, StringComparison.OrdinalIgnoreCase)) { querySpec.QueryType = QueryType.Maximum; } else if (string.Equals(param.Value, ReservedDimensions.PercentileDimensionValueForMinimum, StringComparison.OrdinalIgnoreCase)) { querySpec.QueryType = QueryType.Minimum; } else { querySpec.QueryType = QueryType.Percentile; querySpec.Percentile = double.Parse(param.Value); } } else { filterDimensions.Add(param.Key, param.Value); } } return(filterDimensions); }
private DimensionSpecification ProcessQueryParameters(DimensionSpecification queryParameters, out QuerySpecification querySpec) { var filterDimensions = ExtractQuerySpec(queryParameters, out querySpec); if (querySpec.QueryType == QueryType.Average && !this.DataSet.SupportsAverageQuery) { querySpec.QueryType = QueryType.Normal; } else if (querySpec.QueryType == QueryType.Percentile && !this.DataSet.SupportsPercentileQuery) { querySpec.QueryType = QueryType.Normal; } return(filterDimensions); }
public DimensionSpecification KeyToDimensionSpecification(Key key) { if (key == null) { throw new ArgumentNullException("key"); } var dimensionSpec = new DimensionSpecification(); for (var i = 0; i < this.dimensions.Length; ++i) { dimensionSpec.Add(this.dimensions[i].Name, this.dimensions[i].IndexToString(key[i])); } return(dimensionSpec); }
/// <summary> /// Get all values matching the provided dimensions, split by the given dimension key (e.g. cross query). /// </summary> /// <param name="dims">Dimensions to filter with. Not all dimensions are required.</param> /// <param name="splitDimensionKey">The dimension to use for split/cross querying.</param> /// <returns>An enumeration of resulting matches.</returns> public IEnumerable <MatchResult> GetMatchesSplitByDimension(DimensionSpecification dims, string splitDimensionKey) { this.Pin(); try { var offset = this.DimensionSet.GetOffsetOfDimension(splitDimensionKey); if (offset < 0) { throw new KeyNotFoundException(splitDimensionKey); } // Filter for matching dimensions though matching the split-by-dimension as a wildcard (retrieve all). var splitDimension = this.DimensionSet.dimensions[offset]; var key = this.DimensionSet.CreateKey(dims); key.Values[offset] = Key.WildcardDimensionValue; var allMatches = this.data.GetMatchingPairs(key); // now make a single pass over the tree to sort the results by the splitKey dimension var splitByDimensionValues = new Dictionary <uint, MatchResult>(); // note: this forcibly enumerates the entire key tree (as opposed to lazily enumerating as needed by the caller of this // API. Too bad. We need to sort into buckets one-pass versus making N bucketing passes. foreach (var match in allMatches) { MatchResult result; var splitByKeyValue = match.Key.Values[offset]; if (!splitByDimensionValues.TryGetValue(splitByKeyValue, out result)) { var splitKey = splitDimension.IndexToString(match.Key.Values[offset]); result = new MatchResult(dims, splitDimension.IndexToString(match.Key.Values[offset])); result.DimensionValues[splitDimensionKey] = splitKey; splitByDimensionValues.Add(splitByKeyValue, result); } result.UpdateData(match.Value); } return(splitByDimensionValues.Values); } finally { this.Unpin(); } }
public void AddValue(DimensionSpecification dims, long value) { if (this.Sealed) { throw new InvalidOperationException("Attempt to write to sealed bucket."); } this.Pin(); try { this.data.AddValue(dims, value); } finally { this.Unpin(); } }
/// <summary> /// Execute a query against the counter data. /// </summary> /// <param name="queryParameters">Query parameters.</param> /// <returns>An enumeration of <see cref="DataSample"/>s matching the query parameters.</returns> public IEnumerable <DataSample> Query(DimensionSpecification queryParameters) { QuerySpecification querySpec; if (queryParameters == null) { throw new ArgumentNullException("queryParameters"); } var filterDims = this.ProcessQueryParameters(queryParameters, out querySpec); if (filterDims == null) { return(null); } return(this.DataSet.QueryData(filterDims, querySpec)); }
public void AddValue(long value, DimensionSpecification dims, DateTime timestamp) { timestamp = timestamp.ToUniversalTime(); DataBucket <TInternal> bucket; using (SharedLock.OpenExclusive(this.dataLock)) { bucket = this.GetOrCreateDataBucket(timestamp, true); if (bucket == null) { Events.Write.RejectedAttemptToWriteAncientData(this.Name, this.earliestUnsealedBucketTime, timestamp); return; } } bucket.AddValue(dims, value); }
private void WriteRoundedSizeValue(long value, DimensionSpecification dims, DateTime timestamp) { if (value < 0) { value = 0; } if (value > this.roundingFactor) { long remainder = value % this.roundingFactor; if (remainder >= (this.roundingFactor / 2)) { remainder = -remainder; // will roll up } value = value - remainder; } this.histogram.AddValue(value, dims, timestamp); }
public IEnumerable <string> GetDimensionValues(string dimensionName, DimensionSpecification filterDims) { if (ReservedDimensions.StartTimeDimension.Equals(dimensionName, StringComparison.OrdinalIgnoreCase)) { return(this.GetTimestampValues(b => b.StartTime)); } if (ReservedDimensions.EndTimeDimension.Equals(dimensionName, StringComparison.OrdinalIgnoreCase)) { return(this.GetTimestampValues(b => b.EndTime)); } var values = new HashSet <string>(StringComparer.OrdinalIgnoreCase); if (!this.HaveDimension(dimensionName)) { throw new KeyNotFoundException(dimensionName); } // If all dimensions are provided we'll end up just adding the single value back for the given dimension // and pushing that out -- is this a neat hack to test if a dimension value exists, or is it ridiculous? // Going with ridiculous today. int matched = (from dim in filterDims.Keys where this.HaveDimension(dim) select dim).Count(); if (matched == this.DimensionSet.dimensions.Length) { throw new ArgumentException("All known dimensions were supplied in filter.", "filterDims"); } using (SharedLock.OpenShared(this.dataLock)) { var bucketQuery = new BucketQuery(this, filterDims); foreach (var bucket in bucketQuery) { foreach (var value in bucket.GetDimensionValues(dimensionName, filterDims)) { values.Add(value); } } } return(values); }
public BucketQuery(DataSet <TInternal> dataSet, DimensionSpecification filterDimensions) { this.dataSet = dataSet; DateTime start, end; dataSet.GetTimesFromDimensions(filterDimensions, out start, out end); foreach (var bucket in dataSet.data.Values) { if (bucket.StartTicks < start.Ticks || bucket.EndTicks > end.Ticks) { continue; } this.buckets.Add(new BucketPair { Bucket = bucket, WasLoaded = bucket.Loaded }); } }
private void WriteRoundedSignificantDigitsValue(long value, DimensionSpecification dims, DateTime timestamp) { if (this.roundingFactor > 0 && value > this.minimumRoundingValue) { long factor = 0; while (value > this.minimumRoundingValue) { factor += 1; var lastDigit = value % 10; if (lastDigit >= 5) { value += (10 - lastDigit); } value /= 10; } value *= (long)Math.Pow(10, factor); } this.histogram.AddValue(value, dims, timestamp); }
/// <summary> /// Provides all matching values for a named dimension. If the dimension is unknown no data will be returned. /// </summary> /// <param name="dimensionName">The dimension.</param> /// <param name="filterDims">Dimensions to filter by.</param> /// <returns>An enumeration of all known values. Values may be repeated.</returns> public IEnumerable <string> GetDimensionValues(string dimensionName, DimensionSpecification filterDims) { this.Pin(); try { var offset = this.DimensionSet.GetOffsetOfDimension(dimensionName); if (offset < 0) { yield break; } Key filter = this.DimensionSet.CreateKey(filterDims); foreach (var kvp in this.data.GetMatchingPairs(filter)) { yield return(this.DimensionSet.GetDimensionValueAtOffset(kvp.Key, offset)); } } finally { this.Unpin(); } }
public void AddValue(long value, DimensionSpecification dims, DateTime timestamp) { this.writeValue(value, dims, timestamp); }
private IEnumerable <DataSample> PopulateCombinedDataSamples(DimensionSpecification filterDims, Action <DataSample, TInternal, QuerySpecification> sampleAction, QuerySpecification querySpec) { var combinedData = new Dictionary <string, CombinedSample>(); long start = long.MaxValue; long end = long.MinValue; using (SharedLock.OpenShared(this.dataLock)) { var bucketQuery = new BucketQuery(this, filterDims); foreach (var bucket in bucketQuery) { foreach (var match in (querySpec.IsCrossQuery ? bucket.GetMatchesSplitByDimension(filterDims, querySpec.CrossQueryDimension) : bucket.GetMatches(filterDims))) { if (match.DataCount == 0) { continue; } CombinedSample value; if (!combinedData.TryGetValue(match.SplitDimensionValue, out value)) { value = new CombinedSample { Data = match.Data, Dimensions = match.DimensionValues, }; combinedData[match.SplitDimensionValue] = value; } else { value.Data.MergeFrom(match.Data); } if (bucket.StartTicks < start) { start = bucket.StartTicks; } if (bucket.EndTicks > end) { end = bucket.EndTicks; } } } } foreach (var value in combinedData.Values) { var sample = new DataSample { Name = this.Name, StartTime = new DateTime(start, DateTimeKind.Utc).ToMillisecondTimestamp(), EndTime = new DateTime(end, DateTimeKind.Utc).ToMillisecondTimestamp(), Dimensions = value.Dimensions.Data, }; sampleAction(sample, value.Data, querySpec); yield return(sample); } Events.Write.EndQueryData(this); }
private void WriteUnroundedValue(long value, DimensionSpecification dims, DateTime timestamp) { this.histogram.AddValue(value, dims, timestamp); }
/// <summary> /// Increment the counter by one for the provided dimensions using the current time for the data point. /// </summary> /// <param name="dims">Full set of dimension values for the counter.</param> public void Increment(DimensionSpecification dims) { this.Increment(dims, DateTime.Now); }
/// <summary> /// Increment the counter by one for the provided dimensions. /// </summary> /// <param name="dims">Full set of dimension values for the counter.</param> /// <param name="timestamp">Timestamp to use for the data point.</param> public void Increment(DimensionSpecification dims, DateTime timestamp) { this.Increment(1, dims, timestamp); }
/// <summary> /// Increment the counter for the provided dimensions using the current time for the data point. /// </summary> /// <param name="amount">Amount to increment the counter by.</param> /// <param name="dims">Full set of dimension values for the counter.</param> public void Increment(long amount, DimensionSpecification dims) { this.Increment(amount, dims, DateTime.Now); }
/// <summary> /// Increment the counter for the provided dimensions. /// </summary> /// <param name="amount">Amount to increment the counter by.</param> /// <param name="dims">Full set of dimension values for the counter.</param> /// <param name="timestamp">Timestamp to use for the data point.</param> public void Increment(long amount, DimensionSpecification dims, DateTime timestamp) { this.hitCounter.AddValue(amount, dims, timestamp); }