/// <summary> /// "A common feature of the above techniques—indeed, the key technique that /// allows us to track the decayed weights efficiently—is that they maintain /// counts and other quantities based on g(ti − L), and only scale by g(t − L) /// at query time. But while g(ti −L)/g(t−L) is guaranteed to lie between zero /// and one, the intermediate values of g(ti − L) could become very large. For /// polynomial functions, these values should not grow too large, and should be /// effectively represented in practice by floating point values without loss of /// precision. For exponential functions, these values could grow quite large as /// new values of (ti − L) become large, and potentially exceed the capacity of /// common floating point types. However, since the values stored by the /// algorithms are linear combinations of g values (scaled sums), they can be /// rescaled relative to a new landmark. That is, by the analysis of exponential /// decay in Section III-A, the choice of L does not affect the final result. We /// can therefore multiply each value based on L by a factor of exp(−α(L′ − L)), /// and obtain the correct value as if we had instead computed relative to a new /// landmark L′ (and then use this new L′ at query time). This can be done with /// a linear pass over whatever data structure is being used." /// </summary> /// <param name="now"></param> /// <param name="next"></param> private void Rescale(long now, long next) { if (!_nextScaleTime.CompareAndSet(next, now + RescaleThreshold)) { return; } _lock.EnterWriteLock(); try { var oldStartTime = _startTime; _startTime = Tick(); var keys = new List <double>(_values.Keys); foreach (var key in keys) { long value; _values.TryRemove(key, out value); _values.AddOrUpdate(key * Math.Exp(-_alpha * (_startTime - oldStartTime)), value, (k, v) => v); } } finally { _lock.ExitWriteLock(); } }
/// <summary> /// "A common feature of the above techniques—indeed, the key technique that /// allows us to track the decayed weights efficiently—is that they maintain /// counts and other quantities based on g(ti − L), and only scale by g(t − L) /// at query time. But while g(ti −L)/g(t−L) is guaranteed to lie between zero /// and one, the intermediate values of g(ti − L) could become very large. For /// polynomial functions, these values should not grow too large, and should be /// effectively represented in practice by floating point values without loss of /// precision. For exponential functions, these values could grow quite large as /// new values of (ti − L) become large, and potentially exceed the capacity of /// common floating point types. However, since the values stored by the /// algorithms are linear combinations of g values (scaled sums), they can be /// rescaled relative to a new landmark. That is, by the analysis of exponential /// decay in Section III-A, the choice of L does not affect the final result. We /// can therefore multiply each value based on L by a factor of exp(−α(L′ − L)), /// and obtain the correct value as if we had instead computed relative to a new /// landmark L′ (and then use this new L′ at query time). This can be done with /// a linear pass over whatever data structure is being used." /// </summary> /// <param name="now"></param> /// <param name="next"></param> private void Rescale(long now, long next) { if (_nextScaleTime.CompareAndSet(next, now + RESCALE_THRESHOLD)) { lockForRescale(); try { var oldStartTime = _startTime; _startTime = CurrentTimeInSeconds(); double scalingFactor = Math.Exp(-_alpha * (_startTime - oldStartTime)); var keys = new List <double>(_values.Keys); foreach (double key in keys) { WeightedSample sample = null; if (_values.TryRemove(key, out sample)) { WeightedSample newSample = new WeightedSample(sample.value, sample.weight * scalingFactor); _values.AddOrUpdate(key * scalingFactor, newSample, (k, v) => v); } } } finally { unlockForRescale(); } } }
/// <summary> /// Clears all recorded values /// </summary> public void Clear() { _values.Clear(); _count.Set(0); _startTime = CurrentTimeInSeconds(); _nextScaleTime.Set(Tick() + RescaleThreshold); }
/// <summary> /// /// Creates a new ExponentiallyDecayingReservoir /// </summary> /// <param name="size">The number of samples to keep in the sampling reservoir</param> /// <param name="alpha">The exponential decay factor; the higher this is, the more biased the sample will be towards newer values</param> /// <param name="clock">the clock used to timestamp samples and track rescaling</param> public ExponentiallyDecayingReservoir(int size, double alpha, Clock clock) { _values = new ConcurrentDictionary <double, WeightedSample>(); _lock = new ReaderWriterLockSlim(LockRecursionPolicy.SupportsRecursion); _alpha = alpha; _size = size; this.clock = clock; this._count = new AtomicLong(0); this._startTime = CurrentTimeInSeconds(); this._nextScaleTime = new AtomicLong(clock.getTick() + RESCALE_THRESHOLD); }
/// <summary> /// Clears all recorded values /// </summary> public void Clear() { _values.Clear(); _count.Set(0); _startTime = Tick(); }
/// <summary> /// Clears all recorded values /// </summary> public void Clear() { _values.Clear(); _count.Set(0); _startTime = CurrentTimeInSeconds(); }