public void EDR_QuantiliesShouldBeBasedOnWeights() { var reservoir = new DefaultForwardDecayingReservoir( Constants.ReservoirSampling.DefaultSampleSize, Constants.ReservoirSampling.DefaultExponentialDecayFactor, _clock, _scheduler); for (var i = 0; i < 40; i++) { reservoir.Update(177); } _clock.Advance(TimeUnit.Seconds, 120); for (var i = 0; i < 10; i++) { reservoir.Update(9999); } reservoir.GetSnapshot().Size.Should().Be(50); // the first added 40 items (177) have weights 1 // the next added 10 items (9999) have weights ~6 // so, it's 40 vs 60 distribution, not 40 vs 10 reservoir.GetSnapshot().Median.Should().Be(9999); reservoir.GetSnapshot().Percentile75.Should().Be(9999); }
public void EDR_NonzeroMinimumSampleWeightEvictsSamplesDuringRescale() { var reservoir = new DefaultForwardDecayingReservoir( 10, AppMetricsReservoirSamplingConstants.DefaultExponentialDecayFactor, 0.1, _clock, new TestReservoirRescaleScheduler(_clock, TimeSpan.FromSeconds(200))); // First set of values will have the weight equal to 1 for (var i = 0; i < 5; i++) { reservoir.Update(100); } _clock.Advance(TimeUnit.Seconds, 120); // The second set of values will have weights equal to about 6 for (var i = 0; i < 5; i++) { reservoir.Update(1000); } reservoir.GetSnapshot().Size.Should().Be(10); // Trigger rescale _clock.Advance(TimeUnit.Seconds, 120); // New weights for the two value sets will be about 0.03 and 0.16 respectively, // so a minimum weight of 0.1 should have eliminated the first set. reservoir.GetSnapshot().Size.Should().Be(5); reservoir.GetSnapshot().Values.Should().OnlyContain(v => v == 1000); }
public void EDR_RecordsUserValue() { var reservoir = new DefaultForwardDecayingReservoir( Constants.ReservoirSampling.DefaultSampleSize, Constants.ReservoirSampling.DefaultExponentialDecayFactor, _clock, _scheduler); reservoir.Update(2L, "B"); reservoir.Update(1L, "A"); reservoir.GetSnapshot().MinUserValue.Should().Be("A"); reservoir.GetSnapshot().MaxUserValue.Should().Be("B"); }
public void EDR_SpotLift() { var reservoir = new DefaultForwardDecayingReservoir( Constants.ReservoirSampling.DefaultSampleSize, Constants.ReservoirSampling.DefaultExponentialDecayFactor, _clock, _scheduler); var valuesRatePerMinute = 10; var valuesIntervalMillis = (int)(TimeUnit.Minutes.ToMilliseconds(1) / valuesRatePerMinute); // mode 1: steady regime for 120 minutes for (var i = 0; i < 120 * valuesRatePerMinute; i++) { reservoir.Update(177); _clock.Advance(TimeUnit.Milliseconds, valuesIntervalMillis); } // switching to mode 2: 10 minutes more with the same rate, but larger value for (var i = 0; i < 10 * valuesRatePerMinute; i++) { reservoir.Update(9999); _clock.Advance(TimeUnit.Milliseconds, valuesIntervalMillis); } // expect that quantiles should be more about mode 2 after 10 minutes reservoir.GetSnapshot().Median.Should().Be(9999); }
public void EDR_longPeriodsOfInactivityShouldNotCorruptSamplingState() { var reservoir = new DefaultForwardDecayingReservoir( 10, 0.015, 0.0, _clock, _scheduler); // add 1000 values at a rate of 10 values/second for (var i = 0; i < 1000; i++) { reservoir.Update(1000 + i); _clock.Advance(TimeUnit.Milliseconds, 100); } reservoir.GetSnapshot().Size.Should().Be(10); reservoir.GetSnapshot().Values.Should().OnlyContain(v => v >= 1000 && v < 2000); // wait for 15 hours and add another value. // this should trigger a rescale. Note that the number of samples will be reduced to 2 // because of the very small scaling factor that will make all existing priorities equal to // zero after rescale. _clock.Advance(TimeUnit.Hours, 15); reservoir.Update(2000); var snapshot = reservoir.GetSnapshot(); snapshot.Size.Should().Be(2); snapshot.Values.Should().OnlyContain(v => v >= 1000 && v < 3000); snapshot.Sum.Should().Be(snapshot.Values.Sum()); // add 1000 values at a rate of 10 values/second for (var i = 0; i < 1000; i++) { reservoir.Update(3000 + i); _clock.Advance(TimeUnit.Milliseconds, 100); } var finalSnapshot = reservoir.GetSnapshot(); finalSnapshot.Size.Should().Be(10); snapshot.Sum.Should().Be(snapshot.Values.Sum()); finalSnapshot.Values.Skip(1).Should().OnlyContain(v => v >= 3000 && v < 4000); }
public void ExponentialDecayingReservoir() { var reservoir = new DefaultForwardDecayingReservoir( Constants.ReservoirSampling.DefaultSampleSize, Constants.ReservoirSampling.DefaultExponentialDecayFactor); foreach (var sample in _samples) { reservoir.Update(sample); } var snapshot = reservoir.GetSnapshot(); AssertValues(snapshot); }
public void EDR_HeavilyBiasedReservoirOf100OutOf1000Elements() { var reservoir = new DefaultForwardDecayingReservoir(1000, 0.01); for (var i = 0; i < 100; i++) { reservoir.Update(i); } reservoir.Size.Should().Be(100); var snapshot = reservoir.GetSnapshot(); snapshot.Size.Should().Be(100); snapshot.Values.Should().OnlyContain(v => 0 <= v && v < 100); }
public override void Setup() { _fixture = new MetricsCoreTestFixture(); _reservoir = new DefaultForwardDecayingReservoir(); _scheduler = new DefaultTaskScheduler(); _scheduler.Interval( TimeSpan.FromMilliseconds(10), TaskCreationOptions.None, () => { _reservoir.GetSnapshot(); _reservoir.Reset(); }); }
private Task Tick() { try { _reservoir.GetSnapshot(); _reservoir.Reset(); } catch (Exception) { } finally { lock (_syncLock) { if (!_disposing) { SetScheduler(); } } } return(Task.CompletedTask); }