public void can_calculate_mean_rate() { _meter.Mark(); _clock.Advance(TimeUnit.Seconds, 1); _meter.Value.MeanRate.Should().Be(1); _clock.Advance(TimeUnit.Seconds, 1); _meter.Value.MeanRate.Should().Be(0.5); }
public void Can_get_apdex_value() { var metricName = "DefaultMetricValuesProviderTests_apdex"; var options = new ApdexOptions { Name = metricName, Context = Context }; _measure.Apdex.Track(options, () => _clock.Advance(TimeUnit.Seconds, 3)); _provider.GetApdexValue(Context, metricName).Frustrating.Should().Be(1); }
public void EDR_longPeriodsOfInactivityShouldNotCorruptSamplingState() { var reservoir = new DefaultForwardDecayingReservoir( 10, 0.015, 0.0, _clock, _scheduler); // add 1000 values at a rate of 10 values/second for (var i = 0; i < 1000; i++) { reservoir.Update(1000 + i); _clock.Advance(TimeUnit.Milliseconds, 100); } reservoir.GetSnapshot().Size.Should().Be(10); reservoir.GetSnapshot().Values.Should().OnlyContain(v => v >= 1000 && v < 2000); // wait for 15 hours and add another value. // this should trigger a rescale. Note that the number of samples will be reduced to 2 // because of the very small scaling factor that will make all existing priorities equal to // zero after rescale. _clock.Advance(TimeUnit.Hours, 15); reservoir.Update(2000); var snapshot = reservoir.GetSnapshot(); snapshot.Size.Should().Be(2); snapshot.Values.Should().OnlyContain(v => v >= 1000 && v < 3000); snapshot.Sum.Should().Be(snapshot.Values.Sum()); // add 1000 values at a rate of 10 values/second for (var i = 0; i < 1000; i++) { reservoir.Update(3000 + i); _clock.Advance(TimeUnit.Milliseconds, 100); } var finalSnapshot = reservoir.GetSnapshot(); finalSnapshot.Size.Should().Be(10); snapshot.Sum.Should().Be(snapshot.Values.Sum()); finalSnapshot.Values.Skip(1).Should().OnlyContain(v => v >= 3000 && v < 4000); }
public void Can_reset() { using (_apdex.NewContext()) { _clock.Advance(TimeUnit.Milliseconds, 100); } _apdex.Value.Score.Should().NotBe(0); _apdex.Value.SampleSize.Should().Be(1); _apdex.Value.Satisfied.Should().Be(1); _apdex.Value.Tolerating.Should().Be(0); _apdex.Value.Frustrating.Should().Be(0); _apdex.Reset(); _apdex.Value.Score.Should().Be(0); _apdex.Value.SampleSize.Should().Be(0); _apdex.Value.Satisfied.Should().Be(0); _apdex.Value.Tolerating.Should().Be(0); _apdex.Value.Frustrating.Should().Be(0); }
public void Can_calculate_the_hit_ratio_as_a_guage() { var cacheHitMeter = new DefaultMeterMetric(_clock, _schedular); var queryTimer = new DefaultTimerMetric(new DefaultAlgorithmRReservoir(1028), _clock); foreach (var index in Enumerable.Range(0, 1000)) { using (queryTimer.NewContext()) { _clock.Advance(TimeUnit.Milliseconds, 100); } if (index % 2 == 0) { cacheHitMeter.Mark(); } } var cacheHitRatioGauge = new HitRatioGauge(cacheHitMeter, queryTimer, value => value.OneMinuteRate); cacheHitRatioGauge.Value.Should().BeGreaterThan(0.0); }
public void Can_reset() { using (_timer.NewContext()) { _clock.Advance(TimeUnit.Milliseconds, 100); } _timer.Value.Rate.Count.Should().NotBe(0); _timer.Value.Histogram.Count.Should().NotBe(0); _timer.Reset(); _timer.Value.Rate.Count.Should().Be(0); _timer.Value.Histogram.Count.Should().Be(0); }