public void SetUp() { this.dimensionSet = new DimensionSet(new HashSet<Dimension> { new Dimension("foo"), new Dimension("bar"), new Dimension("baz"), }); dimensions = new DimensionSpecification(); this.storagePath = Path.Combine(Environment.CurrentDirectory, "msTemp"); if (Directory.Exists(this.storagePath)) { Directory.Delete(this.storagePath, true); } Directory.CreateDirectory(this.storagePath); foreach (var d in this.dimensionSet.Dimensions) { dimensions[d.Name] = DimValue; } this.properties = new MockSharedDataSetProperties(); }
/// <summary> /// A simple function that takes a string and does a ToUpper /// </summary> /// <param name="input"></param> /// <param name="context"></param> /// <returns></returns> public string FunctionHandler(string input, ILambdaContext context) { var envProvider = new EnvironmentProvider(EnvironmentConfigurationProvider.Config, new ResourceFetcher()); var logger = new MetricsLogger(); var dimensionSet = new DimensionSet(); dimensionSet.AddDimension("Service", "Aggregator"); dimensionSet.AddDimension("Region", "us-west-2"); logger.PutDimensions(dimensionSet); logger.SetNamespace("EMFLambda"); logger.PutMetric("ProcessingLatency", 101, Unit.MILLISECONDS); logger.PutMetric("ProcessingLatency", 100, Unit.MILLISECONDS); logger.PutMetric("ProcessingLatency", 99, Unit.MILLISECONDS); logger.PutMetric("Count", 3, Unit.COUNT); logger.PutProperty("AccountId", "123456789"); logger.PutProperty("RequestId", "422b1569-16f6-4a03-b8f0-fe3fd9b100f8"); logger.PutProperty("DeviceId", "61270781-c6ac-46f1-baf7-22c808af8162"); Dictionary <string, object> payLoad = new Dictionary <string, object> { { "sampleTime", 123456789 }, { "temperature", 273.0 }, { "pressure", 101.3 } }; logger.PutProperty("Payload", payLoad); logger.Flush(); return(input?.ToUpper()); }
public void SetUp() { this.dimensionSet = new DimensionSet(new HashSet <Dimension> { new Dimension("foo"), new Dimension("bar"), new Dimension("baz"), }); dimensions = new DimensionSpecification(); this.storagePath = Path.Combine(Environment.CurrentDirectory, "msTemp"); if (Directory.Exists(this.storagePath)) { Directory.Delete(this.storagePath, true); } Directory.CreateDirectory(this.storagePath); foreach (var d in this.dimensionSet.Dimensions) { dimensions[d.Name] = DimValue; } this.properties = new MockSharedDataSetProperties(); }
public override void Setup() { this.dataManager.CompactionConfiguration = new DataCompactionConfiguration(new[] { new DataIntervalConfiguration(TimeSpan.FromMinutes(5), TimeSpan.MaxValue), }); this.dataManager.MaximumDataAge = TimeSpan.Zero; // Don't want data getting deleted here. // Make a counter and write some stuff. this.dimensionSet = new DimensionSet(new HashSet <Dimension> { new Dimension(AnyDimension), }); var counter = this.dataManager.CreateHitCounter(AnyCounter, this.dimensionSet).Result; var dims = new DimensionSpecification(); var timestamp = DateTime.Now; dims[AnyDimension] = "a"; counter.Increment(dims, timestamp); dims[AnyDimension] = "b"; counter.Increment(2, dims, timestamp); dims[AnyDimension] = "c"; counter.Increment(dims, timestamp); // We need to force a seal by setting some value waaaay in the future. counter.Increment(dims, timestamp.AddYears(10)); this.memoryStreamManager = new RecyclableMemoryStreamManager(1 << 17, 1 << 20, 1 << 24); }
private static TestBufferedKeyedData GetTestData(int keyCount, DimensionSet dimensionSet) { return (new TestBufferedKeyedData( new byte[TestBufferedKeyedData.GetBufferSizeForKeyCount(keyCount, dimensionSet)], 0, 0, dimensionSet, true)); }
/// <summary> /// Run a distributed (tiered) query based on the parameters in the original request. Split the sources into N blocks (N is determined /// by the MaxFanout setting). Client will select a (random) leader for each block and send the request downstream. Client will merge all responses /// and in the case of failure will include diagnostics for each server in the chunk /// </summary> /// <param name="counterName">Name of the counter to query.</param> /// <param name="request">Base request to be distributed into blocks.</param> /// <param name="queryParameters">Query parameters for the counter.</param> /// <param name="dimensionSet">DimensionSet used to quickly merge per-sample query dimensions.</param> /// <returns>Aggregated response optionally returning per-source diagnostics. </returns> public async Task <CounterQueryResponse> CounterQuery(string counterName, TieredRequest request, IDictionary <string, string> queryParameters, DimensionSet dimensionSet) { if (string.IsNullOrEmpty(counterName)) { throw new ArgumentException("No counter name specified", "counterName"); } if (request == null) { throw new ArgumentNullException("request"); } if (dimensionSet == null) { throw new ArgumentNullException("dimensionSet"); } var counterAggregator = new CounterAggregator(dimensionSet); // if the client requested a percentile and there are multiple sources we let the aggregator apply that filtering after data collection if (HasSources(request)) { queryParameters = counterAggregator.ApplyPercentileCalculationAggregation(queryParameters); } var command = Protocol.BuildCounterRequestCommand(RestCommands.CounterQueryCommand, counterName, queryParameters); return(await this.Execute(request, command, counterAggregator.AddMachineResponse, () => counterAggregator.GetResponse(ShouldMergeTimeBuckets(queryParameters)))); }
private static void EmitMetrics(ILogger logger, IMetricsLogger metrics) { var dimensionSet = new DimensionSet(); dimensionSet.AddDimension("Service", "Aggregator"); dimensionSet.AddDimension("Region", "us-west-2"); metrics.SetDimensions(dimensionSet); metrics.PutMetric("ProcessingLatency", 101, Unit.MILLISECONDS); metrics.PutMetric("ProcessingLatency", 100, Unit.MILLISECONDS); metrics.PutMetric("ProcessingLatency", 99, Unit.MILLISECONDS); metrics.PutMetric("Count", 3, Unit.COUNT); metrics.PutProperty("AccountId", "123456789"); metrics.PutProperty("RequestId", "422b1569-16f6-4a03-b8f0-fe3fd9b100f8"); metrics.PutProperty("DeviceId", "61270781-c6ac-46f1-baf7-22c808af8162"); Dictionary <string, object> payLoad = new Dictionary <string, object> { { "sampleTime", 123456789 }, { "temperature", 273.0 }, { "pressure", 101.3 } }; metrics.PutProperty("Payload", payLoad); logger.LogInformation("Flushing"); metrics.Flush(); }
public void MatchIsFalseIfFilterHasMoreOrLessValuesThanKey() { const string anyDim1 = "d1"; const string anyDim2 = "d2"; const string anyDim3 = "d3"; var dimensionSet = new DimensionSet(new HashSet <Dimension> { new Dimension(anyDim1), new Dimension(anyDim2), new Dimension(anyDim3), }); var keyValueSet = new DimensionSpecification { { anyDim1, "val1" }, { anyDim2, "val2" }, { anyDim3, "val3" } }; bool allDimensionsProvided; Key key = dimensionSet.CreateKey(keyValueSet, out allDimensionsProvided); var smallerDimensionSet = new DimensionSet(new HashSet <Dimension> { new Dimension(anyDim1), new Dimension(anyDim2) }); Key smallerFilter = smallerDimensionSet.CreateKey(keyValueSet, out allDimensionsProvided); Assert.IsFalse(smallerFilter.Matches(key)); var largerDimensionSet = new DimensionSet(new HashSet <Dimension>(dimensionSet.Dimensions) { new Dimension ("anotherDim") }); Key largerFilter = largerDimensionSet.CreateKey(keyValueSet, out allDimensionsProvided); Assert.IsFalse(largerFilter.Matches(key)); }
public void CanConvertBufferWithSameSizeSet() { const int elementsPerDepth = 100; using (var set = this.Generate(new [] { elementsPerDepth, elementsPerDepth, elementsPerDepth }, true)) { var originalDimSet = set.DimensionSet; var newDimSet = new DimensionSet(originalDimSet); var originalKeys = new List <Key>(set.Select(kvp => kvp.Key.Clone() as Key)); set.Convert(newDimSet); Assert.AreSame(newDimSet, set.DimensionSet); int currentKey = 0; foreach (var kvp in set) { for (var i = 0; i < newDimSet.dimensions.Length; ++i) { var newDimension = newDimSet.dimensions[i]; var originalDimension = (from d in originalDimSet.dimensions where d.Equals(newDimension) select d).First(); var originalDimIndex = Array.IndexOf(originalDimSet.dimensions, originalDimension); var originalKey = originalKeys[currentKey]; Assert.AreEqual(originalDimension.IndexToString(originalKey[originalDimIndex]), newDimension.IndexToString(kvp.Key[i])); } ++currentKey; } Assert.AreEqual(originalKeys.Count, currentKey); } }
public override void Setup() { this.dataManager.CompactionConfiguration = new DataCompactionConfiguration(new[] { new DataIntervalConfiguration(TimeSpan.FromMinutes(5), TimeSpan.MaxValue), }); this.dataManager.MaximumDataAge = TimeSpan.Zero; // Don't want data getting deleted here. // Make a counter and write some stuff. this.dimensionSet = new DimensionSet(new HashSet<Dimension> { new Dimension(AnyDimension), }); var counter = this.dataManager.CreateHitCounter(AnyCounter, this.dimensionSet).Result; var dims = new DimensionSpecification(); var timestamp = DateTime.Now; dims[AnyDimension] = "a"; counter.Increment(dims, timestamp); dims[AnyDimension] = "b"; counter.Increment(2, dims, timestamp); dims[AnyDimension] = "c"; counter.Increment(dims, timestamp); // We need to force a seal by setting some value waaaay in the future. counter.Increment(dims, timestamp.AddYears(10)); this.memoryStreamManager = new RecyclableMemoryStreamManager(1 << 17, 1 << 20, 1 << 24); }
private static TestBufferedKeyedData GetTestData(int keyCount, DimensionSet dimensionSet) { return new TestBufferedKeyedData( new byte[TestBufferedKeyedData.GetBufferSizeForKeyCount(keyCount, dimensionSet)], 0, 0, dimensionSet, true); }
public CounterAggregator(DimensionSet dimensionSet) { if (dimensionSet == null) { throw new ArgumentNullException("dimensionSet"); } this.dimensionSet = dimensionSet; }
public CounterAggregator(DimensionSet dimensionSet) { if (dimensionSet == null) { throw new ArgumentNullException("dimensionSet"); } this.dimensionSet = dimensionSet; }
private void WriteComplexSampleData() { var rng = new Random(); histogramKeys = new int[BucketCount][][]; totalCountBySample = new Dictionary <long, uint>(); this.dimensionSet = new DimensionSet(new HashSet <Dimension> { new Dimension("one"), new Dimension("two"), }); this.sharedProperties.CompactionConfiguration = new DataCompactionConfiguration(new[] { new DataIntervalConfiguration(TimeSpan.FromMinutes(1), TimeSpan.MaxValue), }); this.histogramDataSet = new DataSet <InternalHistogram>("/AggTest", null, this.dimensionSet, this.sharedProperties); var dimSpec = new DimensionSpecification(); var startTime = new DateTime(2014, 7, 4, 0, 0, 0, DateTimeKind.Utc); // no DateTimeKind.MERKUH? Sigh. for (var i = 0; i < BucketCount; ++i) { histogramKeys[i] = new int[DimensionOneCount][]; var ts = startTime + TimeSpan.FromTicks(i * this.sharedProperties.CompactionConfiguration.DefaultBucketTicks); for (var d1 = 0; d1 < DimensionOneCount; ++d1) { histogramKeys[i][d1] = new int[DimensionTwoCount]; dimSpec["one"] = d1.ToString(); for (var d2 = 0; d2 < DimensionTwoCount; ++d2) { dimSpec["two"] = d2.ToString(); var val = rng.Next(MaxValue); this.histogramDataSet.AddValue(val, dimSpec, ts); histogramKeys[i][d1][d2] = val; if (!totalCountBySample.ContainsKey(val)) { totalCountBySample[val] = 0; } totalCountBySample[val]++; } } } this.histogramDataSet.Flush(); }
public void SetUp() { if (this.memoryStreamManager == null) { this.memoryStreamManager = new RecyclableMemoryStreamManager(1 << 17, 1 << 20, 1 << 24); } this.dimensionSet = DimensionSetTests.CreateDimensionSet(2); this.dataStore = new TestKeyedDataStore(this.dimensionSet, this.memoryStreamManager); }
public override DimensionSet <double> Convert( DimensionSet <BinaryRepresentation> source, DimensionalFunction function) { var doubles = source.Select((x, dimension) => BinaryHelper.DecodeBinary(x, function.GetDomain().GetDefinitionForDimension(dimension + 1), function.Precision)); return(new DimensionSet <double>(doubles)); }
public AppSettings(string apppath) { _AppPath = apppath; _connections = new List <CDBConnection>(); _precision = -1; _precisionDecimals = -1; _dimensions = new DimensionSet(); _dim = new DimensionSet(); }
public void SetUp() { if (this.memoryStreamManager == null) { this.memoryStreamManager = new RecyclableMemoryStreamManager(1 << 17, 1 << 20, 1 << 24); } this.dimensionSet = DimensionSetTests.CreateDimensionSet(2); this.dataStore = new TestKeyedDataStore(this.dimensionSet, this.memoryStreamManager); }
public void SetUp() { this.dimensionSet = new DimensionSet(new HashSet <Dimension>()); this.dimensionSpec = new DimensionSpecification(); this.sharedProperties = new MockSharedDataSetProperties(); this.bucketSpan = this.sharedProperties.CompactionConfiguration.Default.Interval; this.firstBucketTimestamp = DateTime.UtcNow; this.secondBucketTimestamp = this.firstBucketTimestamp + this.bucketSpan; this.thirdBucketTimestamp = this.secondBucketTimestamp + this.bucketSpan; }
private void RecreateDataBucket(DimensionSet set = null) { if (this.bucket != null) { this.bucket.Dispose(); } this.bucket = new DataBucket <InternalHitCount>( new DimensionSet(set ?? this.twoDimensionSet), this.timestamp, DefaultBucketTimeSpanTicks, this.currentDirectory, this.properties.MemoryStreamManager); }
public void SetUp() { this.dimensionSet = new DimensionSet(new HashSet<Dimension>()); this.dimensionSpec = new DimensionSpecification(); this.sharedProperties = new MockSharedDataSetProperties(); this.bucketSpan = this.sharedProperties.CompactionConfiguration.Default.Interval; this.firstBucketTimestamp = DateTime.UtcNow; this.secondBucketTimestamp = this.firstBucketTimestamp + this.bucketSpan; this.thirdBucketTimestamp = this.secondBucketTimestamp + this.bucketSpan; }
static void Main(string[] args) { var init = true; var configuration = new Configuration { LogGroupName = "/Canary/Dotnet/CloudWatchAgent/Metrics", EnvironmentOverride = Environments.ECS, AgentEndPoint = "tcp://127.0.0.1:25888" }; var loggerFactory = LoggerFactory.Create(builder => builder .SetMinimumLevel(LogLevel.Information) .AddConsole()); EnvironmentConfigurationProvider.Config = configuration; // get the assembly version (this does not reflect NuGet pre-releases) var packageVersion = GetPackageVersion(); while (true) { using (var logger = new MetricsLogger(loggerFactory)) { logger.SetNamespace("Canary"); var dimensionSet = new DimensionSet(); dimensionSet.AddDimension("Runtime", "Dotnet"); dimensionSet.AddDimension("Platform", "ECS"); dimensionSet.AddDimension("Agent", "CloudWatchAgent"); dimensionSet.AddDimension("Version", packageVersion); logger.SetDimensions(dimensionSet); using (var currentProcess = System.Diagnostics.Process.GetCurrentProcess()) { // https://github.com/dotnet/corefx/blob/3633ea2c6bf9d52029681efeedd84fd7a06eb6ba/src/System.Diagnostics.Process/src/System/Diagnostics/ProcessManager.Linux.cs#L137 logger.PutMetric("Memory.RSS", currentProcess.WorkingSet64, Unit.BYTES); } logger.PutMetric("Invoke", 1, Unit.NONE); if (init) { init = false; logger.PutMetric("Init", 1, Unit.NONE); } logger.PutMetric("Memory.HeapUsed", GC.GetTotalMemory(false), Unit.BYTES); } Thread.Sleep(1_000); } }
public void SetUp() { this.stream = new MemoryStream(); this.streamManager = new RecyclableMemoryStreamManager(1 << 17, 2, 1 << 24); this.dimensions = new DimensionSet(new HashSet<Dimension>(new[] {new Dimension(AnyDimension)})); this.data = new KeyedDataStore<InternalHitCount>(this.dimensions, this.streamManager); var hitCount = new InternalHitCount(); hitCount.AddValue(AnyDataValue); this.data.AddValue(new DimensionSpecification {{AnyDimension, AnyDimensionValue}}, AnyDataValue); this.data.Merge(); }
public void MetricDirective_AddDimensions_AddRange_Returns_ValidJson() { MetricDirective metricDirective = new MetricDirective(); DimensionSet ds = new DimensionSet("Region", "US-West-2"); ds.AddRange(new DimensionSet("Instance", "instance-1")); metricDirective.CustomDimensionSets.Add(ds); string jsonString = JsonConvert.SerializeObject(metricDirective); Assert.Equal("{\"Namespace\":\"aws-embedded-metrics\",\"Metrics\":[],\"Dimensions\":[[\"Region\",\"Instance\"]]}", jsonString); }
public PersistedDataHeader(string name, DateTime start, DateTime end, PersistedDataType dataType, IEnumerable<PersistedDataSource> sources, DimensionSet dimensionSet, uint dataCount) { this.Name = name; this.StartTime = new DateTimeOffset(start, TimeSpan.Zero); this.EndTime = new DateTimeOffset(end, TimeSpan.Zero); this.DataType = dataType; this.Sources = new List<PersistedDataSource>(sources); this.DimensionSet = dimensionSet; this.DataCount = dataCount; }
private void RecreateFileBackedDataBucket(DimensionSet set = null) { if (this.bucket != null) { this.bucket.Dispose(); } this.bucket = new DataBucket <InternalHitCount>( new DimensionSet(set ?? this.twoDimensionSet), this.bucket.Filename, this.properties.MemoryStreamManager, null); }
public void CanConvertBufferToLargerDimensionSet() { const int elementsPerDepth = 100; using (var set = this.Generate(new [] { elementsPerDepth, elementsPerDepth, elementsPerDepth }, true)) { var originalDimSet = set.DimensionSet; var fourthDimension = new Dimension("4"); var newDimSet = new DimensionSet(new HashSet <Dimension> { originalDimSet.dimensions[0], originalDimSet.dimensions[1], originalDimSet.dimensions[2], fourthDimension, }); var originalKeys = new List <Key>(set.Select(kvp => kvp.Key.Clone() as Key)); set.Convert(newDimSet); Assert.AreSame(newDimSet, set.DimensionSet); int currentKey = 0; var fourthDimensionIndex = Array.IndexOf(newDimSet.dimensions, fourthDimension); foreach (var kvp in set) { for (var i = 0; i < newDimSet.dimensions.Length; ++i) { var originalKey = originalKeys[currentKey]; if (i == fourthDimensionIndex) { Assert.AreEqual(Key.WildcardDimensionValue, kvp.Key[i]); } else { var newDimension = newDimSet.dimensions[i]; var originalDimension = (from d in originalDimSet.dimensions where d.Equals(newDimension) select d).First(); var originalDimIndex = Array.IndexOf(originalDimSet.dimensions, originalDimension); Assert.AreEqual(originalDimension.IndexToString(originalKey[originalDimIndex]), newDimension.IndexToString(kvp.Key[i])); } } ++currentKey; } Assert.AreEqual(originalKeys.Count, currentKey); Assert.AreEqual(0, fourthDimension.Values.Count()); } }
public async Task GetBucketedDataCombinesDataAcrossDimensionValuesIfFilterValueIsNotProvided() { var anyTimestamp = DateTime.Now; const string FirstDimension = "1st"; const string SecondDimension = "2nd"; const string ThirdDimension = "3rd"; var dimensions = new DimensionSet(new HashSet <Dimension> { new Dimension(FirstDimension), new Dimension(SecondDimension), new Dimension(ThirdDimension) }); var counter = await this.dataManager.CreateHitCounter(AnyCounterName, dimensions); var dimValues = new DimensionSpecification(); for (int i = 0; i < 10; ++i) { dimValues[FirstDimension] = i.ToString(); for (int j = 0; j < 10; ++j) { dimValues[SecondDimension] = j.ToString(); for (int k = 0; k < 10; ++k) { dimValues[ThirdDimension] = k.ToString(); counter.Increment(dimValues, anyTimestamp); } } } var sample = this.BucketedDataQuery(counter, new DimensionSpecification()).First(); Assert.AreEqual(DataSampleType.HitCount, sample.SampleType); Assert.AreEqual((ulong)1000, sample.HitCount); dimValues.Clear(); dimValues[FirstDimension] = "8"; // take one tenth of things by filter. sample = this.BucketedDataQuery(counter, dimValues).First(); Assert.AreEqual((ulong)100, sample.HitCount); dimValues[SecondDimension] = "6"; sample = this.BucketedDataQuery(counter, dimValues).First(); Assert.AreEqual((ulong)10, sample.HitCount); dimValues[ThirdDimension] = "7"; sample = this.BucketedDataQuery(counter, dimValues).First(); Assert.AreEqual((ulong)1, sample.HitCount); }
/// <summary> /// Adds default dimensions and properties from the specified environment into the specified metrics context. /// </summary> /// <param name="context">the context to configure with environment information</param> private void ConfigureContextForEnvironment(MetricsContext context) { if (context.HasDefaultDimensions) { return; } var defaultDimensions = new DimensionSet(); defaultDimensions.AddDimension("ServiceName", _environment.Name); defaultDimensions.AddDimension("ServiceType", _environment.Type); context.DefaultDimensions = defaultDimensions; _environment.ConfigureContext(context); }
void OnDeserialized(StreamingContext context) { _conn = _connection; _precisionDecimals = _precision; if (_connections == null) { _connections = new List <CDBConnection>(); } if (_dimensions == null) { _dimensions = new DimensionSet(); } _dim = _dimensions; }
private DimensionSet <BinaryRepresentation> GetNeighbour(DimensionSet <BinaryRepresentation> subject) { var randomDimension = new Random().Next(0, subject.Count() - 1); var alteredRepresentation = new StringBuilder(subject.ElementAt(randomDimension).AsString()); var randomBit = new Random().Next(0, alteredRepresentation.Length); alteredRepresentation[randomBit] = alteredRepresentation[randomBit] == '0' ? '1' : '0'; var neighbour = subject.ToList(); neighbour[randomDimension] = BinaryRepresentation.Create(alteredRepresentation.ToString()); return(new DimensionSet <BinaryRepresentation>(neighbour)); }
public void SetUp() { this.stream = new MemoryStream(); this.streamManager = new RecyclableMemoryStreamManager(1 << 17, 2, 1 << 24); this.dimensions = new DimensionSet(new HashSet <Dimension>(new[] { new Dimension(AnyDimension) })); this.data = new KeyedDataStore <InternalHitCount>(this.dimensions, this.streamManager); var hitCount = new InternalHitCount(); hitCount.AddValue(AnyDataValue); this.data.AddValue(new DimensionSpecification { { AnyDimension, AnyDimensionValue } }, AnyDataValue); this.data.Merge(); }
public void CreateKeySetsAllDimensionsProvidedFalseIfSomeDimensionsAreNotProvided() { const string anyDim1 = "d1"; const string anyDim2 = "d2"; var dimensionSet = new DimensionSet(new HashSet <Dimension> { new Dimension(anyDim1), new Dimension(anyDim2) }); var partialDimensionValues = new DimensionSpecification { { anyDim1, "anyVal1" } }; bool allDimensionsProvided; dimensionSet.CreateKey(partialDimensionValues, out allDimensionsProvided); Assert.AreEqual(false, allDimensionsProvided); }
public void CreateKeySetsAllDimensionsProvidedTrueIfAllDimensionsAreInDictionary() { const string anyDim1 = "d1"; const string anyDim2 = "d2"; var dimensionSet = new DimensionSet(new HashSet <Dimension> { new Dimension(anyDim1), new Dimension(anyDim2) }); var allDimensionValues = new DimensionSpecification { { anyDim1, "anyVal1" }, { anyDim2, "anyVal2" } }; bool allDimensionsProvided; dimensionSet.CreateKey(allDimensionValues, out allDimensionsProvided); Assert.AreEqual(true, allDimensionsProvided); }
/// <summary> /// Execute a counter query against a single server. /// </summary> /// <param name="counterName">Name of the counter to query.</param> /// <param name="server">Server to query.</param> /// <param name="queryParameters">Optional query parameters for the counter.</param> /// <returns>single response aggregated from all servers</returns> public async Task <CounterQueryResponse> CounterQuery(string counterName, ServerInfo server, IDictionary <string, string> queryParameters = null) { if (server == null) { throw new ArgumentNullException("server"); } var request = new TieredRequest { Sources = new List <ServerInfo> { server }, }; return(await this.CounterQuery(counterName, request, queryParameters, DimensionSet.FromQueryParameters(queryParameters))); }
public void MatchIsFalseIfValuesDiffer() { const string anyDim1 = "d1"; const string anyDim2 = "d2"; const string anyDim3 = "d3"; var dimensionSet = new DimensionSet(new HashSet <Dimension> { new Dimension(anyDim1), new Dimension(anyDim2), new Dimension(anyDim3), }); var keyValueSet = new DimensionSpecification { { anyDim1, "val1" }, { anyDim2, "val2" }, { anyDim3, "val3" } }; bool allDimensionsProvided; Key key = dimensionSet.CreateKey(keyValueSet, out allDimensionsProvided); // Longer values foreach (var d in dimensionSet.Dimensions) { var filterValueSet = new DimensionSpecification(keyValueSet); filterValueSet[d.Name] = "valAnyOther"; Key filter = dimensionSet.CreateKey(filterValueSet, out allDimensionsProvided); Assert.IsFalse(filter.Matches(key)); } // Same length foreach (var d in dimensionSet.Dimensions) { var filterValueSet = new DimensionSpecification(keyValueSet); filterValueSet[d.Name] = "valX"; Key filter = dimensionSet.CreateKey(filterValueSet, out allDimensionsProvided); Assert.IsFalse(filter.Matches(key)); } // Shorter length foreach (var d in dimensionSet.Dimensions) { var filterValueSet = new DimensionSpecification(keyValueSet); filterValueSet[d.Name] = "v"; Key filter = dimensionSet.CreateKey(filterValueSet, out allDimensionsProvided); Assert.IsFalse(filter.Matches(key)); } }
public void MatchIsTrueIfKeyAndFilterAreSame() { const string anyDim1 = "d1"; const string anyDim2 = "d2"; const string anyDim3 = "d3"; var dimensionSet = new DimensionSet(new HashSet <Dimension> { new Dimension(anyDim1), new Dimension(anyDim2), new Dimension(anyDim3), }); var keyValueSet = new DimensionSpecification { { anyDim1, "val1" }, { anyDim2, "val2" }, { anyDim3, "val3" } }; bool allDimensionsProvided; Key key = dimensionSet.CreateKey(keyValueSet, out allDimensionsProvided); Assert.IsTrue(key.Matches(key)); }
public void CanSerializeAndDeserializeDimensionSets() { var set = new DimensionSet(new HashSet <Dimension> { new Dimension("one"), new Dimension("two", new HashSet <string> { "2", "ii" }), new Dimension("three"), }); string json; var serializer = new JsonSerializer(); using (var writer = new StringWriter()) { serializer.Serialize(writer, set); json = writer.ToString(); } using (var reader = new StringReader(json)) using (var jsonReader = new JsonTextReader(reader)) { var readSet = serializer.Deserialize <DimensionSet>(jsonReader); Assert.IsTrue(readSet.Equals(set)); foreach (var dim in set.Dimensions) { var readDim = readSet.Dimensions.First(d => d.Name.Equals(dim.Name)); if (dim.AllowedValues != null) { Assert.AreEqual(dim.AllowedValues.Count, readDim.AllowedValues.Count); foreach (var v in dim.AllowedValues) { Assert.IsTrue(readDim.AllowedValues.Contains(v)); } } else { Assert.IsNull(readDim.AllowedValues); } } } }
public void SetUp() { this.properties = new MockSharedDataSetProperties(); this.oneDimensionSet = new DimensionSet(new HashSet<Dimension> { new Dimension("one"), }); this.twoDimensionSet = new DimensionSet(new HashSet<Dimension> { new Dimension("one"), new Dimension("two") }); this.timestamp = DateTime.UtcNow; this.currentDirectory = Directory.GetCurrentDirectory(); this.RecreateDataBucket(); if (File.Exists(this.bucket.Filename)) { File.Delete(this.bucket.Filename); this.RecreateDataBucket(); } }
/// <summary> /// Ctor. /// </summary> /// <param name="source">Source dimension set.</param> /// <param name="destination">Destination dimension set.</param> public KeyConverter(DimensionSet source, DimensionSet destination) { if (object.ReferenceEquals(source, destination)) { this.skipConversion = true; } this.source = source; this.destination = destination; this.offsetMap = new int[destination.dimensions.Length]; for (int d = 0; d < destination.dimensions.Length; ++d) { this.offsetMap[d] = UnmappedOffset; for (int s = 0; s < source.dimensions.Length; ++s) { if (destination.dimensions[d].Equals(source.dimensions[s])) { this.offsetMap[d] = s; break; } } } }
public unsafe void TruncatedWrittenDataThrowsPersistedDataException() { const int maxDims = 5; const int maxValuesPerDim = 5; var dimHashSet = new HashSet<Dimension>(); for (var i = 0; i < maxDims; ++i) { dimHashSet.Clear(); for (var di = 0; di < i; ++di) { dimHashSet.Add(new Dimension(di.ToString())); } for (var v = 0; v < maxValuesPerDim; ++v) { for (var dv = 0; dv < v; ++dv) { foreach (var dim in dimHashSet) { dim.StringToIndex(dv.ToString()); } } byte[] data; var dimSet = new DimensionSet(dimHashSet); using (var ms = new MemoryStream()) { dimSet.Write(new BufferWriter(ms)); data = ms.GetBuffer(); var dataLength = ms.Length; fixed (byte* buffer = data) { var readDS = new DimensionSet(new BufferReader(buffer, dataLength)); Assert.IsTrue(dimSet.Equals(readDS)); foreach (var dim in dimSet.dimensions) { var readDim = readDS.Dimensions.First(d => d.Name.Equals(dim.Name)); foreach (var val in dim.Values) { Assert.IsTrue(readDim.Values.Contains(val)); } } for (var badLength = dataLength - 1; badLength > 0; --badLength) { try { new DimensionSet(new BufferReader(buffer, badLength)); Assert.Fail(); // Can't use assert.throws because of the pointer usage here. } catch (PersistedDataException) { } } } } } } }
public async Task GetBucketedDataCombinesDataAcrossDimensionValuesIfFilterValueIsNotProvided() { var anyTimestamp = DateTime.Now; const string FirstDimension = "1st"; const string SecondDimension = "2nd"; const string ThirdDimension = "3rd"; var dimensions = new DimensionSet(new HashSet<Dimension> { new Dimension(FirstDimension), new Dimension(SecondDimension), new Dimension(ThirdDimension) }); var counter = await this.dataManager.CreateHitCounter(AnyCounterName, dimensions); var dimValues = new DimensionSpecification(); for (int i = 0; i < 10; ++i) { dimValues[FirstDimension] = i.ToString(); for (int j = 0; j < 10; ++j) { dimValues[SecondDimension] = j.ToString(); for (int k = 0; k < 10; ++k) { dimValues[ThirdDimension] = k.ToString(); counter.Increment(dimValues, anyTimestamp); } } } var sample = this.BucketedDataQuery(counter, new DimensionSpecification()).First(); Assert.AreEqual(DataSampleType.HitCount, sample.SampleType); Assert.AreEqual((ulong)1000, sample.HitCount); dimValues.Clear(); dimValues[FirstDimension] = "8"; // take one tenth of things by filter. sample = this.BucketedDataQuery(counter, dimValues).First(); Assert.AreEqual((ulong)100, sample.HitCount); dimValues[SecondDimension] = "6"; sample = this.BucketedDataQuery(counter, dimValues).First(); Assert.AreEqual((ulong)10, sample.HitCount); dimValues[ThirdDimension] = "7"; sample = this.BucketedDataQuery(counter, dimValues).First(); Assert.AreEqual((ulong)1, sample.HitCount); }
private void RecreateDataBucket(DimensionSet set = null) { if (this.bucket != null) { this.bucket.Dispose(); } this.bucket = new DataBucket<InternalHitCount>( new DimensionSet(set ?? this.twoDimensionSet), this.timestamp, DefaultBucketTimeSpanTicks, this.currentDirectory, this.properties.MemoryStreamManager); }
public void MatchIsFalseIfValuesDiffer() { const string anyDim1 = "d1"; const string anyDim2 = "d2"; const string anyDim3 = "d3"; var dimensionSet = new DimensionSet(new HashSet<Dimension> { new Dimension(anyDim1), new Dimension(anyDim2), new Dimension(anyDim3), }); var keyValueSet = new DimensionSpecification {{anyDim1, "val1"}, {anyDim2, "val2"}, {anyDim3, "val3"}}; bool allDimensionsProvided; Key key = dimensionSet.CreateKey(keyValueSet, out allDimensionsProvided); // Longer values foreach (var d in dimensionSet.Dimensions) { var filterValueSet = new DimensionSpecification(keyValueSet); filterValueSet[d.Name] = "valAnyOther"; Key filter = dimensionSet.CreateKey(filterValueSet, out allDimensionsProvided); Assert.IsFalse(filter.Matches(key)); } // Same length foreach (var d in dimensionSet.Dimensions) { var filterValueSet = new DimensionSpecification(keyValueSet); filterValueSet[d.Name] = "valX"; Key filter = dimensionSet.CreateKey(filterValueSet, out allDimensionsProvided); Assert.IsFalse(filter.Matches(key)); } // Shorter length foreach (var d in dimensionSet.Dimensions) { var filterValueSet = new DimensionSpecification(keyValueSet); filterValueSet[d.Name] = "v"; Key filter = dimensionSet.CreateKey(filterValueSet, out allDimensionsProvided); Assert.IsFalse(filter.Matches(key)); } }
public void CanSerializeAndDeserializeDimensionSets() { var set = new DimensionSet(new HashSet<Dimension> { new Dimension("one"), new Dimension("two", new HashSet<string> {"2", "ii"}), new Dimension("three"), }); string json; var serializer = new JsonSerializer(); using (var writer = new StringWriter()) { serializer.Serialize(writer, set); json = writer.ToString(); } using (var reader = new StringReader(json)) using (var jsonReader = new JsonTextReader(reader)) { var readSet = serializer.Deserialize<DimensionSet>(jsonReader); Assert.IsTrue(readSet.Equals(set)); foreach (var dim in set.Dimensions) { var readDim = readSet.Dimensions.First(d => d.Name.Equals(dim.Name)); if (dim.AllowedValues != null) { Assert.AreEqual(dim.AllowedValues.Count, readDim.AllowedValues.Count); foreach (var v in dim.AllowedValues) { Assert.IsTrue(readDim.AllowedValues.Contains(v)); } } else { Assert.IsNull(readDim.AllowedValues); } } } }
public void CanConvertBufferToLargerDimensionSet() { const int elementsPerDepth = 100; using (var set = this.Generate(new [] {elementsPerDepth, elementsPerDepth, elementsPerDepth}, true)) { var originalDimSet = set.DimensionSet; var fourthDimension = new Dimension("4"); var newDimSet = new DimensionSet(new HashSet<Dimension> { originalDimSet.dimensions[0], originalDimSet.dimensions[1], originalDimSet.dimensions[2], fourthDimension, }); var originalKeys = new List<Key>(set.Select(kvp => kvp.Key.Clone() as Key)); set.Convert(newDimSet); Assert.AreSame(newDimSet, set.DimensionSet); int currentKey = 0; var fourthDimensionIndex = Array.IndexOf(newDimSet.dimensions, fourthDimension); foreach (var kvp in set) { for (var i = 0; i < newDimSet.dimensions.Length; ++i) { var originalKey = originalKeys[currentKey]; if (i == fourthDimensionIndex) { Assert.AreEqual(Key.WildcardDimensionValue, kvp.Key[i]); } else { var newDimension = newDimSet.dimensions[i]; var originalDimension = (from d in originalDimSet.dimensions where d.Equals(newDimension) select d).First(); var originalDimIndex = Array.IndexOf(originalDimSet.dimensions, originalDimension); Assert.AreEqual(originalDimension.IndexToString(originalKey[originalDimIndex]), newDimension.IndexToString(kvp.Key[i])); } } ++currentKey; } Assert.AreEqual(originalKeys.Count, currentKey); Assert.AreEqual(0, fourthDimension.Values.Count()); } }
public void CanConvertBufferToSmallerDimensionSet() { const int elementsPerDepth = 100; using (var set = this.Generate(new [] {elementsPerDepth, elementsPerDepth, elementsPerDepth}, true)) { var originalDimSet = set.DimensionSet; var newDimSet = new DimensionSet(new HashSet<Dimension> {originalDimSet.dimensions[0], originalDimSet.dimensions[1]}); var originalKeys = new List<Key>(set.Select(kvp => kvp.Key.Clone() as Key)); set.Convert(newDimSet); Assert.AreSame(newDimSet, set.DimensionSet); int currentKey = 0; foreach (var kvp in set) { for (var i = 0; i < newDimSet.dimensions.Length; ++i) { var originalKey = originalKeys[currentKey]; var newDimension = newDimSet.dimensions[i]; var originalDimension = (from d in originalDimSet.dimensions where d.Equals(newDimension) select d).FirstOrDefault(); if (originalDimension != null) { var originalDimIndex = Array.IndexOf(originalDimSet.dimensions, originalDimension); Assert.AreEqual(originalDimension.IndexToString(originalKey[originalDimIndex]), newDimension.IndexToString(kvp.Key[i])); } } ++currentKey; } Assert.AreEqual(originalKeys.Count, currentKey); } }
public void DataMayOnlyBeConvertedOnce() { var dimSet1 = new DimensionSet(new HashSet<Dimension> {new Dimension("one")}); var dimSet2 = new DimensionSet(new HashSet<Dimension> {new Dimension("one")}); var conversions = new[] {new[] {dimSet1, dimSet1}, new[] {dimSet1, dimSet2}, new[] {dimSet2, dimSet1}}; foreach (var pair in conversions) { using (var data = GetTestData(1, pair[0])) { data.Seal(); data.Convert(pair[1]); Assert.Throws<NotSupportedException>(() => data.Convert(pair[0])); Assert.Throws<NotSupportedException>(() => data.Convert(pair[1])); } } }
public async Task BatchQueryMultipleQueriesForSameCounterWithDifferentParametersWorksFine() { // Fill up the taco truck var dimensionSet = new DimensionSet(new HashSet<Dimension> {new Dimension("Filling")}); var counter = await this.dataManager.CreateHitCounter("/Tacos", dimensionSet); var chickenTacos = new DimensionSpecification {{"Filling", "Chicken"}}; var beefTacos = new DimensionSpecification {{"Filling", "Beef"}}; var veggieTacos = new DimensionSpecification {{"Filling", "TOFU"}}; var baconTacos = new DimensionSpecification {{"Filling", "bacon"}}; counter.Increment(100, chickenTacos); counter.Increment(200, beefTacos); counter.Increment(300, veggieTacos); this.dataManager.Flush(); var data = new BatchQueryRequest(); data.Queries.Add(new BatchCounterQuery {CounterName = "/Tacos", UserContext = "TotalTacos"}); data.Queries.Add(new BatchCounterQuery { CounterName = "/Tacos", UserContext = "CluckCluck", QueryParameters = chickenTacos.Data }); data.Queries.Add(new BatchCounterQuery { CounterName = "/Tacos", UserContext = "BACON!", QueryParameters = baconTacos.Data }); var response = await this.httpClient.PostAsync( TestUtils.GetUri(this.server, RestCommands.BatchQueryCommand, string.Empty), GetRequestPayload(data)); Assert.AreEqual(HttpStatusCode.OK, response.StatusCode); using (var readerStream = new ReaderStream(await response.Content.ReadAsStreamAsync())) { var reader = readerStream.CreateBondedCompactBinaryReader<BatchQueryResponse>(); var responseData = reader.Deserialize(); Assert.AreEqual(3, responseData.Responses.Count); // unfiltered should have 100+200+300 hitcount VerifyHitCounter(responseData, "TotalTacos", 600); //only 100 chicken tacos VerifyHitCounter(responseData, "CluckCluck", 100); //sadly, there is no bacon... VerifyHitCounter(responseData, "BACON!", -1); } }
public void MatchIsTrueIfSomeSameValuesAreProvidedInFilter() { const string anyDim1 = "d1"; const string anyDim2 = "d2"; const string anyDim3 = "d3"; var dimensionSet = new DimensionSet(new HashSet<Dimension> { new Dimension(anyDim1), new Dimension(anyDim2), new Dimension(anyDim3), }); var keyValueSet = new DimensionSpecification {{anyDim1, "val1"}, {anyDim2, "val2"}, {anyDim3, "val3"}}; bool allDimensionsProvided; Key key = dimensionSet.CreateKey(keyValueSet, out allDimensionsProvided); Key filter = dimensionSet.CreateKey(new DimensionSpecification {{anyDim2, "val2"}}, out allDimensionsProvided); Assert.IsTrue(filter.Matches(key)); }
public async void BatchQueryAggregatesFanoutCorrectly() { const string tacoTruck = "/Tacos"; const string competingBurritoTruck = "/Burritos"; var dimensionSet = new DimensionSet(new HashSet<Dimension>()); var counter = await this.dataManager.CreateHitCounter(tacoTruck, dimensionSet); // locally store dim1 counter.Increment(100, new DimensionSpecification()); this.dataManager.Flush(); var data = new BatchQueryRequest(); data.Queries.Add(new BatchCounterQuery { CounterName = tacoTruck, UserContext = tacoTruck}); data.Queries.Add(new BatchCounterQuery { CounterName = competingBurritoTruck, UserContext = competingBurritoTruck }); data.Sources.Add(new ServerInfo {Hostname = "a", Port = 42}); data.Sources.Add(new ServerInfo {Hostname = "b", Port = 42}); var sampleStart = DateTime.Now; var sampleEnd = sampleStart.AddHours(1); // remotely return 100 for dim2 only DistributedQueryClient.RequesterFactory = new MockHttpRequesterFactory(message => { var batchResponse = new BatchQueryResponse(); batchResponse.RequestDetails.Add(new RequestDetails { Server = new ServerInfo { Hostname = "bob", Port = 42 }, HttpResponseCode = 200 }); var counterResponse = new CounterQueryResponse { HttpResponseCode = 200, UserContext = competingBurritoTruck, Samples = new List<DataSample> { new DataSample { HitCount = 100, Dimensions = new Dictionary<string, string>(), SampleType = DataSampleType.HitCount, StartTime = sampleStart.ToMillisecondTimestamp(), EndTime = sampleEnd.ToMillisecondTimestamp() } } }; batchResponse.Responses.Add(counterResponse); return MockDataFactory.CreateResponse(batchResponse); }); var response = await this.httpClient.PostAsync(TestUtils.GetUri(this.server, RestCommands.BatchQueryCommand, string.Empty), GetRequestPayload(data)); Assert.AreEqual(HttpStatusCode.OK, response.StatusCode); var responseData = await MockDataFactory.ReadResponseData<BatchQueryResponse>(response); Assert.IsNotNull(responseData); Assert.AreEqual(3, responseData.RequestDetails.Count); Assert.IsTrue(responseData.RequestDetails.All(x => x.HttpResponseCode == 200)); Assert.AreEqual(2, responseData.Responses.Count); Assert.AreEqual(1, responseData.Responses.Count(x => x.UserContext.Equals(tacoTruck) && x.Samples[0].HitCount == 100)); Assert.AreEqual(1, responseData.Responses.Count(x => x.UserContext.Equals(competingBurritoTruck) && x.Samples[0].HitCount == 200)); }
public void CreateKeySetsAllDimensionsProvidedTrueIfAllDimensionsAreInDictionary() { const string anyDim1 = "d1"; const string anyDim2 = "d2"; var dimensionSet = new DimensionSet(new HashSet<Dimension> {new Dimension(anyDim1), new Dimension(anyDim2)}); var allDimensionValues = new DimensionSpecification {{anyDim1, "anyVal1"}, {anyDim2, "anyVal2"}}; bool allDimensionsProvided; dimensionSet.CreateKey(allDimensionValues, out allDimensionsProvided); Assert.AreEqual(true, allDimensionsProvided); }
public void ConstructorHasDimensionsOrderedByDistinctValueCount() { var bigDimension = new Dimension("big"); bigDimension.StringToIndex("1"); bigDimension.StringToIndex("2"); bigDimension.StringToIndex("3"); var mediumDimension = new Dimension("medium"); mediumDimension.StringToIndex("1"); mediumDimension.StringToIndex("2"); var smallDimension = new Dimension("small"); smallDimension.StringToIndex("1"); var set = new DimensionSet(new HashSet<Dimension> {smallDimension, mediumDimension, bigDimension}); Assert.AreSame(set.dimensions[0], bigDimension); Assert.AreSame(set.dimensions[1], mediumDimension); Assert.AreSame(set.dimensions[2], smallDimension); // Now make 'small' actually the largest. smallDimension.StringToIndex("2"); smallDimension.StringToIndex("3"); smallDimension.StringToIndex("4"); var newSet = new DimensionSet(set); Assert.AreEqual(newSet.dimensions[0], smallDimension); Assert.AreEqual(newSet.dimensions[1], bigDimension); Assert.AreEqual(newSet.dimensions[2], mediumDimension); }
public void SealAndReleaseAreThreadSafe() { var filterableDimension = new DimensionSet(new HashSet<Dimension> { new Dimension("thing") }); using ( var filterableBucket = new DataBucket<InternalHitCount>(filterableDimension, this.timestamp, DefaultBucketTimeSpanTicks, this.currentDirectory, properties.MemoryStreamManager)) { var allDims = new DimensionSpecification {{"thing", "thing"}}; Parallel.For(0, 10, (i) => filterableBucket.AddValue(allDims, i)); Parallel.For(0, 100, (i) => { switch (i % 3) { case 0: foreach (var item in filterableBucket.GetMatches(allDims)) { Assert.IsNotNull(item); } break; case 1: filterableBucket.AddValue(allDims, 11); break; case 2: filterableBucket.ReleaseData(); break; } }); } }
public void MatchIsFalseIfFilterHasMoreOrLessValuesThanKey() { const string anyDim1 = "d1"; const string anyDim2 = "d2"; const string anyDim3 = "d3"; var dimensionSet = new DimensionSet(new HashSet<Dimension> { new Dimension(anyDim1), new Dimension(anyDim2), new Dimension(anyDim3), }); var keyValueSet = new DimensionSpecification {{anyDim1, "val1"}, {anyDim2, "val2"}, {anyDim3, "val3"}}; bool allDimensionsProvided; Key key = dimensionSet.CreateKey(keyValueSet, out allDimensionsProvided); var smallerDimensionSet = new DimensionSet(new HashSet<Dimension> {new Dimension(anyDim1), new Dimension(anyDim2)}); Key smallerFilter = smallerDimensionSet.CreateKey(keyValueSet, out allDimensionsProvided); Assert.IsFalse(smallerFilter.Matches(key)); var largerDimensionSet = new DimensionSet(new HashSet<Dimension>(dimensionSet.Dimensions) { new Dimension ("anotherDim") }); Key largerFilter = largerDimensionSet.CreateKey(keyValueSet, out allDimensionsProvided); Assert.IsFalse(largerFilter.Matches(key)); }
private void RecreateFileBackedDataBucket(DimensionSet set = null) { if (this.bucket != null) { this.bucket.Dispose(); } this.bucket = new DataBucket<InternalHitCount>( new DimensionSet(set ?? this.twoDimensionSet), this.bucket.Filename, this.properties.MemoryStreamManager, null); }
public void CreateKeySetsAllDimensionsProvidedFalseIfSomeDimensionsAreNotProvided() { const string anyDim1 = "d1"; const string anyDim2 = "d2"; var dimensionSet = new DimensionSet(new HashSet<Dimension> {new Dimension(anyDim1), new Dimension(anyDim2)}); var partialDimensionValues = new DimensionSpecification {{anyDim1, "anyVal1"}}; bool allDimensionsProvided; dimensionSet.CreateKey(partialDimensionValues, out allDimensionsProvided); Assert.AreEqual(false, allDimensionsProvided); }
public async Task GetBucketedDataSplitsDataByDimension() { var anyTimestamp = DateTime.Now; const string FirstDimension = "1st"; const string SecondDimension = "2nd"; const string ThirdDimension = "3rd"; var dimensions = new DimensionSet(new HashSet<Dimension> { new Dimension(FirstDimension), new Dimension(SecondDimension), new Dimension(ThirdDimension) }); var counter = await this.dataManager.CreateHitCounter(AnyCounterName, dimensions); var dimValues = new DimensionSpecification(); for (int i = 0; i < 10; ++i) { dimValues[FirstDimension] = i.ToString(); for (int j = 0; j < 10; ++j) { dimValues[SecondDimension] = j.ToString(); for (int k = 0; k < 10; ++k) { dimValues[ThirdDimension] = k.ToString(); counter.Increment(dimValues, anyTimestamp); } } } // Slice without any other filtering. dimValues.Clear(); dimValues[ReservedDimensions.DimensionDimension] = FirstDimension; var samples = this.BucketedDataQuery(counter, dimValues, 10); var returnedDimensionValues = new HashSet<string>(); foreach (var baseSample in samples) { var sample = baseSample; Assert.AreEqual((ulong)100, sample.HitCount); Assert.AreEqual(1, sample.Dimensions.Count); Assert.IsTrue(sample.Dimensions.ContainsKey(FirstDimension)); Assert.IsFalse(returnedDimensionValues.Contains(sample.Dimensions[FirstDimension])); returnedDimensionValues.Add(sample.Dimensions[FirstDimension]); } Assert.AreEqual(10, returnedDimensionValues.Count); // Now slice by one dimension and split by another. dimValues[FirstDimension] = "4"; dimValues[ReservedDimensions.DimensionDimension] = SecondDimension; samples = this.BucketedDataQuery(counter, dimValues, 10); returnedDimensionValues.Clear(); foreach (var baseSample in samples) { var sample = baseSample; Assert.AreEqual((ulong)10, sample.HitCount); Assert.AreEqual(2, sample.Dimensions.Count); Assert.AreEqual("4", sample.Dimensions[FirstDimension]); Assert.IsTrue(sample.Dimensions.ContainsKey(SecondDimension)); Assert.IsFalse(returnedDimensionValues.Contains(sample.Dimensions[SecondDimension])); returnedDimensionValues.Add(sample.Dimensions[SecondDimension]); } Assert.AreEqual(10, returnedDimensionValues.Count); }
public void SplitByDimensionWithFiltersWorksProperly() { var filterableDimension = new DimensionSet(new HashSet<Dimension> {new Dimension("thing"), new Dimension("meat")}); using ( var filterableBucket = new DataBucket<InternalHitCount>(filterableDimension, this.timestamp, DefaultBucketTimeSpanTicks, this.currentDirectory, this.properties.MemoryStreamManager)) { var queryDimensions = new DimensionSpecification(); queryDimensions["thing"] = "thingOne"; queryDimensions["meat"] = "bacon"; filterableBucket.AddValue(queryDimensions, 100); queryDimensions["thing"] = "thingTwo"; queryDimensions["meat"] = "pepperoni"; filterableBucket.AddValue(queryDimensions, 200); filterableBucket.Seal(); // thingOne and thingTwo will match with no filter Assert.AreEqual(2, filterableBucket.GetMatchesSplitByDimension(new DimensionSpecification(), "thing") .Sum(match => match.DataCount)); // only thingOne matches bacon var bestMatchFilter = new DimensionSpecification {{"meat", "bacon"}}; Assert.AreEqual(1, filterableBucket.GetMatchesSplitByDimension(bestMatchFilter, "thing") .Sum(match => match.DataCount)); } }
public static IPersistedDataAggregator CreateAggregatorForSampleType(MetricSystem.PersistedDataType dataType, string name, DimensionSet dimensionSet, IEnumerable<string> sources, DateTime startTime, DateTime endTime, RecyclableMemoryStreamManager streamManager) { switch (dataType) { case MetricSystem.PersistedDataType.HitCount: return new PersistedDataAggregator<InternalHitCount>(name, dimensionSet, sources, startTime, endTime, streamManager); case MetricSystem.PersistedDataType.VariableEncodedHistogram: return new PersistedDataAggregator<InternalHistogram>(name, dimensionSet, sources, startTime, endTime, streamManager); default: throw new ArgumentException("Invalid data type", "dataType"); } }