public void TestGetHashMask() { Assert.Equal((uint)1, BucketCounter.GetHashMask(1)); Assert.Equal((uint)3, BucketCounter.GetHashMask(2)); Assert.Equal((uint)int.MaxValue, BucketCounter.GetHashMask(31)); Assert.Equal((uint)int.MaxValue, BucketCounter.GetHashMask(32)); }
public void TestGetHashes() { var bc = new BucketCounter(2); for (var i = 0; i <= 255; i++) { var hash = bc.GetHash(new[] { (byte)i }); Assert.True(hash < bc.RowCount); } }
public void TestHashUniqueness() { var set = new HashSet <int>(); // For SHA-1, we require 16 bits from the hash to uniquely distribute 256 values. var bc = new BucketCounter(16); for (var i = 0; i <= 255; i++) { var hash = bc.GetHash(new[] { (byte)i }); Assert.DoesNotContain(hash, set); set.Add(hash); } }
public void TestHashIncrements() { var bc = new BucketCounter(8); for (int i = 0; i <= 255; i++) { var hash = bc.GetHash(new[] { (byte)i }); var baseline = bc.Get(hash); Assert.Equal(baseline, bc.Get(hash)); bc.Increment(hash); Assert.Equal(baseline + 1, bc.Get(hash)); bc.Increment(hash); Assert.Equal(baseline + 2, bc.Get(hash)); } }
long BenchmarkIncrements(int bits, bool useStarterDictionary, int uniques, int iterations) { var sw = Stopwatch.StartNew(); for (var i = 0; i < iterations; i++) { var bc = new BucketCounter(bits, useStarterDictionary); for (var u = 0; u < uniques; u++) { bc.Increment(u); bc.Increment(u); bc.Increment(u); } } sw.Stop(); return(sw.ElapsedTicks); }
public void TestTransition() { var bc = new BucketCounter(21, true, 512); bc.Increment(1); bc.Increment(2); bc.Increment(2); bc.Increment(3); bc.Increment(3); bc.Increment(3); for (var i = 4; i < bc.MaxDictionarySize + 2; i++) { bc.Increment(i); } Assert.Equal(1, bc.Get(1)); Assert.Equal(2, bc.Get(2)); Assert.Equal(3, bc.Get(3)); for (var i = 4; i < bc.MaxDictionarySize + 2; i++) { Assert.Equal(1, bc.Get(i)); } }
/// <summary> /// Initializes a new instance of the <see cref="Timer"/> class by using /// the given configuration. /// </summary> /// <param name="builder"> /// </param> public BucketTimer(Builder builder) : base(builder.Config, builder.Context) { unit_ = builder.TimeUnit; measure_unit_ = builder.MeasureUnit; MetricContext context = builder.Context; MetricConfig config = builder .Config .WithAdditionalTag("unit", measure_unit_.Name()); count_ = new BucketCounter(config.WithAdditionalTag(kStatistic, kCount), context); max_ = new StepMaxGauge(config.WithAdditionalTag(kStatistic, kMax)); min_ = new StepMinGauge(config.WithAdditionalTag(kStatistic, kMin)); // We should not convert the value of the total time, since // it is already a time the rate reported will be the // percentage of time that was spent within the defined reporting // interval. MetricConfig time_config = config .WithAdditionalTag(kStatistic, kTotal) .WithAdditionalTag("unit", "nounit"); total_time_ = new BucketCounter(time_config, TimeUnit.Ticks, context); overflow_count_ = new BucketCounter( config .WithAdditionalTag(kStatistic, kCount) .WithAdditionalTag(kBucket, "bucket=overflow"), context); buckets_ = builder.Buckets; // Compute the size of the maximum bucket name and create a padding // format to allow lexicographically sort of buckets. int num_digits = buckets_[buckets_.Length - 1].ToString().Length; string padding = "".PadLeft(num_digits, '0'); string label = unit_.Abbreviation(); bucket_count_ = new BucketCounter[buckets_.Length]; for (int i = 0; i < buckets_.Length; i++) { bucket_count_[i] = new BucketCounter( config .WithAdditionalTag(kStatistic, kCount) .WithAdditionalTag(kBucket, "bucket={0}{1}".Fmt(buckets_[i].ToString(padding), label)), context); } Func <double, double> convert = measure => ConvertToUnit(measure, measure_unit_); var metrics = new List <IMetric> { total_time_, new StepMeasureTransformer(min_, convert), new StepMeasureTransformer(max_, convert), overflow_count_, count_ }; metrics.AddRange(bucket_count_); metrics_ = new ReadOnlyCollection <IMetric>(metrics); }