public void CombinedBucketsAreMergedCorrectly() { var ts1 = new DateTime(2014, 05, 10, 0, 0, 0); var ts2 = new DateTime(2014, 05, 10, 0, 5, 0); var sharedDimensions = new DimensionSpecification { { "one", "a1" }, { "two", "a2" } }; var bucket1Dimensions = new DimensionSpecification { { "one", "b1" }, { "two", "b2" } }; var bucket2Dimensions = new DimensionSpecification { { "one", "c1" }, { "two", "c2" } }; var bucket1 = new DataBucket <InternalHitCount>(new DimensionSet(this.twoDimensionSet), ts1, TimeSpan.FromMinutes(5).Ticks, null, this.properties.MemoryStreamManager); bucket1.AddValue(sharedDimensions, 867); bucket1.AddValue(bucket1Dimensions, 5309); var bucket2 = new DataBucket <InternalHitCount>(new DimensionSet(this.twoDimensionSet), ts2, TimeSpan.FromMinutes(5).Ticks, null, this.properties.MemoryStreamManager); bucket2.AddValue(sharedDimensions, 867); bucket2.AddValue(bucket2Dimensions, 42); bucket1.Seal(); bucket2.Seal(); var bucket3 = new DataBucket <InternalHitCount>(new[] { bucket1, bucket2 }, new DimensionSet(this.twoDimensionSet), ts1, TimeSpan.FromMinutes(10).Ticks, null, this.properties.MemoryStreamManager); var match = bucket3.GetMatches(bucket1Dimensions).First().Data; Assert.AreEqual((ulong)5309, match.HitCount); match = bucket3.GetMatches(bucket2Dimensions).First().Data; Assert.AreEqual((ulong)42, match.HitCount); match = bucket3.GetMatches(sharedDimensions).First().Data; Assert.AreEqual((ulong)867 * 2, match.HitCount); bucket1.Dispose(); bucket2.Dispose(); bucket3.Dispose(); }
public void HistogramCompactionIsCorrect() { const int numBuckets = 5; const int samplesPerBucket = 100; var buckets = new DataBucket <InternalHistogram> [numBuckets]; for (int i = 0; i < numBuckets; i++) { var newBucket = new DataBucket <InternalHistogram>(this.dimensionSet, DateTime.Now, TimeSpan.FromMinutes(1).Ticks, this.storagePath, this.properties.MemoryStreamManager); buckets[i] = newBucket; for (int sample = 0; sample < samplesPerBucket; sample++) { newBucket.AddValue(this.dimensions, sample); } newBucket.Seal(); } var compactedBucket = new DataBucket <InternalHistogram>(buckets, this.dimensionSet, DateTime.Now, TimeSpan.FromMinutes(60).Ticks, this.storagePath, this.properties.MemoryStreamManager); var data = compactedBucket.GetMatches(this.dimensions); var singleVal = data.First().Data; Assert.IsNotNull(singleVal); Assert.AreEqual(numBuckets * samplesPerBucket, (int)singleVal.SampleCount); compactedBucket.Dispose(); }
public void SealAndReleaseAreThreadSafe() { var filterableDimension = new DimensionSet(new HashSet <Dimension> { new Dimension("thing") }); using ( var filterableBucket = new DataBucket <InternalHitCount>(filterableDimension, this.timestamp, DefaultBucketTimeSpanTicks, this.currentDirectory, properties.MemoryStreamManager)) { var allDims = new DimensionSpecification { { "thing", "thing" } }; Parallel.For(0, 10, (i) => filterableBucket.AddValue(allDims, i)); Parallel.For(0, 100, (i) => { switch (i % 3) { case 0: foreach (var item in filterableBucket.GetMatches(allDims)) { Assert.IsNotNull(item); } break; case 1: filterableBucket.AddValue(allDims, 11); break; case 2: filterableBucket.ReleaseData(); break; } }); } }
public void HistogramCompactionIsCorrect() { const int numBuckets = 5; const int samplesPerBucket = 100; var buckets = new DataBucket<InternalHistogram>[numBuckets]; for (int i = 0; i < numBuckets; i++) { var newBucket = new DataBucket<InternalHistogram>(this.dimensionSet, DateTime.Now, TimeSpan.FromMinutes(1).Ticks, this.storagePath, this.properties.MemoryStreamManager); buckets[i] = newBucket; for (int sample = 0; sample < samplesPerBucket; sample++) { newBucket.AddValue(this.dimensions, sample); } newBucket.Seal(); } var compactedBucket = new DataBucket<InternalHistogram>(buckets, this.dimensionSet, DateTime.Now, TimeSpan.FromMinutes(60).Ticks, this.storagePath, this.properties.MemoryStreamManager); var data = compactedBucket.GetMatches(this.dimensions); var singleVal = data.First().Data; Assert.IsNotNull(singleVal); Assert.AreEqual(numBuckets * samplesPerBucket, (int)singleVal.SampleCount); compactedBucket.Dispose(); }
public void CombinedBucketsAreMergedCorrectly() { var ts1 = new DateTime(2014, 05, 10, 0, 0, 0); var ts2 = new DateTime(2014, 05, 10, 0, 5, 0); var sharedDimensions = new DimensionSpecification { {"one", "a1"}, {"two", "a2"}}; var bucket1Dimensions = new DimensionSpecification { {"one", "b1"}, {"two", "b2"}}; var bucket2Dimensions = new DimensionSpecification { {"one", "c1"}, {"two", "c2"}}; var bucket1 = new DataBucket<InternalHitCount>(new DimensionSet(this.twoDimensionSet), ts1, TimeSpan.FromMinutes(5).Ticks, null, this.properties.MemoryStreamManager); bucket1.AddValue(sharedDimensions, 867); bucket1.AddValue(bucket1Dimensions, 5309); var bucket2 = new DataBucket<InternalHitCount>(new DimensionSet(this.twoDimensionSet), ts2, TimeSpan.FromMinutes(5).Ticks, null, this.properties.MemoryStreamManager); bucket2.AddValue(sharedDimensions, 867); bucket2.AddValue(bucket2Dimensions, 42); bucket1.Seal(); bucket2.Seal(); var bucket3 = new DataBucket<InternalHitCount>(new[] {bucket1, bucket2}, new DimensionSet(this.twoDimensionSet), ts1, TimeSpan.FromMinutes(10).Ticks, null, this.properties.MemoryStreamManager); var match = bucket3.GetMatches(bucket1Dimensions).First().Data; Assert.AreEqual((ulong)5309, match.HitCount); match = bucket3.GetMatches(bucket2Dimensions).First().Data; Assert.AreEqual((ulong)42, match.HitCount); match = bucket3.GetMatches(sharedDimensions).First().Data; Assert.AreEqual((ulong)867 * 2, match.HitCount); bucket1.Dispose(); bucket2.Dispose(); bucket3.Dispose(); }
public void SealAndReleaseAreThreadSafe() { var filterableDimension = new DimensionSet(new HashSet<Dimension> { new Dimension("thing") }); using ( var filterableBucket = new DataBucket<InternalHitCount>(filterableDimension, this.timestamp, DefaultBucketTimeSpanTicks, this.currentDirectory, properties.MemoryStreamManager)) { var allDims = new DimensionSpecification {{"thing", "thing"}}; Parallel.For(0, 10, (i) => filterableBucket.AddValue(allDims, i)); Parallel.For(0, 100, (i) => { switch (i % 3) { case 0: foreach (var item in filterableBucket.GetMatches(allDims)) { Assert.IsNotNull(item); } break; case 1: filterableBucket.AddValue(allDims, 11); break; case 2: filterableBucket.ReleaseData(); break; } }); } }