Esempio n. 1
0
        /// <summary>
        /// "A common feature of the above techniques—indeed, the key technique that
        /// allows us to track the decayed weights efficiently—is that they maintain
        /// counts and other quantities based on g(ti − L), and only scale by g(t − L)
        /// at query time. But while g(ti −L)/g(t−L) is guaranteed to lie between zero
        /// and one, the intermediate values of g(ti − L) could become very large. For
        /// polynomial functions, these values should not grow too large, and should be
        /// effectively represented in practice by floating point values without loss of
        /// precision. For exponential functions, these values could grow quite large as
        /// new values of (ti − L) become large, and potentially exceed the capacity of
        /// common floating point types. However, since the values stored by the
        /// algorithms are linear combinations of g values (scaled sums), they can be
        /// rescaled relative to a new landmark. That is, by the analysis of exponential
        /// decay in Section III-A, the choice of L does not affect the final result. We
        /// can therefore multiply each value based on L by a factor of exp(−α(L′ − L)),
        /// and obtain the correct value as if we had instead computed relative to a new
        /// landmark L′ (and then use this new L′ at query time). This can be done with
        /// a linear pass over whatever data structure is being used."
        /// </summary>
        /// <param name="now"></param>
        /// <param name="next"></param>
        private void Rescale(long now, long next)
        {
            if (_nextScaleTime.CompareAndSet(next, now + RESCALE_THRESHOLD))
            {
                lockForRescale();
                try
                {
                    var oldStartTime = _startTime;
                    _startTime = CurrentTimeInSeconds();
                    double scalingFactor = Math.Exp(-_alpha * (_startTime - oldStartTime));

                    var keys = new List <double>(_values.Keys);
                    foreach (double key in keys)
                    {
                        WeightedSample sample = null;
                        if (_values.TryRemove(key, out sample))
                        {
                            WeightedSample newSample = new WeightedSample(sample.value, sample.weight * scalingFactor);
                            _values.AddOrUpdate(key * scalingFactor, newSample, (k, v) => v);
                        }
                    }
                }
                finally
                {
                    unlockForRescale();
                }
            }
        }
Esempio n. 2
0
        /// <summary>
        /// Adds an old value with a fixed timestamp to the reservoir.
        /// </summary>
        /// <param name="value">the value to be added</param>
        /// <param name="timestamp">the epoch timestamp of value in seconds</param>
        public void Update(long value, long timestamp)
        {
            rescaleIfNeeded();
            lockForRegularUsage();
            _lock.EnterReadLock();
            try
            {
                var            itemWeight = Weight(timestamp - _startTime);
                WeightedSample sample     = new WeightedSample(value, itemWeight);
                var            random     = ThreadLocalRandom.NextNonzeroDouble();
                var            priority   = itemWeight / random;

                var newCount = _count.IncrementAndGet();

                if (newCount <= _size)
                {
                    _values.AddOrUpdate(priority, sample, (p, v) => v);
                }
                else
                {
                    var first = _values.Keys.Min();
                    if (first < priority)
                    {
                        _values.AddOrUpdate(priority, sample, (p, v) => v);

                        WeightedSample removed;
                        while (!_values.TryRemove(first, out removed))
                        {
                            first = _values.Keys.First();
                        }
                    }
                }
            }
            finally
            {
                unlockForRegularUsage();
                _lock.ExitReadLock();
            }
        }