///* "A common feature of the above techniques—indeed, the key technique that // * allows us to track the decayed weights efficiently—is that they maintain // * counts and other quantities based on g(ti − L), and only scale by g(t − L) // * at query time. But while g(ti −L)/g(t−L) is guaranteed to lie between zero // * and one, the intermediate values of g(ti − L) could become very large. For // * polynomial functions, these values should not grow too large, and should be // * effectively represented in practice by floating point values without loss of // * precision. For exponential functions, these values could grow quite large as // * new values of (ti − L) become large, and potentially exceed the capacity of // * common floating point types. However, since the values stored by the // * algorithms are linear combinations of g values (scaled sums), they can be // * rescaled relative to a new landmark. That is, by the analysis of exponential // * decay in Section III-A, the choice of L does not affect the final result. We // * can therefore multiply each value based on L by a factor of exp(−α(L′ − L)), // * and obtain the correct value as if we had instead computed relative to a new // * landmark L′ (and then use this new L′ at query time). This can be done with // * a linear pass over whatever data structure is being used." // */ private void Rescale() { bool lockTaken = false; try { [email protected](ref lockTaken); long oldStartTime = startTime.Value; this.startTime.SetValue(this.clock.Seconds); double scalingFactor = Math.Exp(-alpha * (startTime.Value - oldStartTime)); var keys = new List <double>(this.values.Keys); foreach (var key in keys) { WeightedSample sample = this.values[key]; this.values.Remove(key); double newKey = key * Math.Exp(-alpha * (startTime.Value - oldStartTime)); var newSample = new WeightedSample(sample.Value, sample.UserValue, sample.Weight * scalingFactor); this.values[newKey] = newSample; } // make sure the counter is in sync with the number of stored samples. this.count.SetValue(values.Count); } finally { if (lockTaken) { [email protected](); } } }
private void Update(long value, string userValue, long timestamp) { var lockTaken = false; try { [email protected](ref lockTaken); var itemWeight = Math.Exp(this.alpha * (timestamp - this.startTime.GetValue())); var sample = new WeightedSample(value, userValue, itemWeight); var random = 0.0; // Prevent division by 0 while (random.Equals(.0)) { random = ThreadLocalRandom.NextDouble(); } var priority = itemWeight / random; var newCount = this.count.GetValue(); newCount++; this.count.SetValue(newCount); if (newCount <= this.size) { this.values[priority] = sample; } else { var first = this.values.Keys[this.values.Count - 1]; if (first < priority) { this.values.Remove(first); this.values[priority] = sample; } } } finally { if (lockTaken) { [email protected](); } } }
private void Update(long value, string userValue, long timestamp) { bool lockTaken = false; try { [email protected](ref lockTaken); double itemWeight = Math.Exp(alpha * (timestamp - startTime.Value)); var sample = new WeightedSample(value, userValue, itemWeight); double random = .0; // Prevent division by 0 while (random.Equals(.0)) { random = ThreadLocalRandom.NextDouble(); } double priority = itemWeight / random; long newCount = count.Increment(); if (newCount <= size) { this.values[priority] = sample; } else { var first = this.values.Keys[this.values.Count - 1]; if (first < priority) { this.values.Remove(first); this.values[priority] = sample; } } } finally { if (lockTaken) { [email protected](); } } }