/// <summary> /// Trims the LRU cache if needed. /// </summary> private void Trim() { // // NB: We can have temporary oversubscription during concurrent accesses because we avoid to enter the write lock // until absolutely necessary, so _cache.Count can be a dirty read. // if (_count >= _maxCapacity) { _lock.EnterWriteLock(); try { do { var entry = _lruList.Last; if (entry.Key.TryGetTarget(out T key)) { _cache.Remove(key); } LruLinkedList.RemoveLast(ref _lruList); Interlocked.Decrement(ref _count); #if DEBUG _lastEvicted = entry; _evictionCount++; #endif } while (_count >= _maxCapacity); } finally { _lock.ExitWriteLock(); } } }
protected override void ClearCore(bool disposing) { _lock.EnterWriteLock(); try { var node = _lruList.First; while (node != null) { if (node.Key.TryGetTarget(out T key)) { _cache.Remove(key); } node = node.Next; } _count = 0; LruLinkedList.Clear(ref _lruList); #if DEBUG _accessCount = 0; _evictionCount = 0; #endif } finally { _lock.ExitWriteLock(); } }
/// <summary> /// Makes the specified node the most recently accessed one by moving it to the front of the LRU list. /// </summary> /// <param name="node">The node that was most recently accessed.</param> private void MostRecent(ILruCacheEntry <WeakReference <T>, R> node) { // // NB: We opted for a linked list approach for the LRU cache to have constant overhead for memoized function // invocation, with a minor increment upon cache pruning. Alternatively, we could sort entries by their // last access time, having less overhead during a lookup (just store the new access time) but at the // expensive of having to sort the entries in the Trim procedure. That'd cause a hiccup in lookup times. // // NB: If a ranking based eviction is desirable, resulting in higher lookup speeds but lower pruning speeds, // one can use the CreateEvictedBy* methods on WeakMemoizationCacheFactory. // _lock.EnterWriteLock(); try { while (_lruList.Last != null && !_lruList.Last.Key.TryGetTarget(out _)) { LruLinkedList.RemoveLast(ref _lruList); Interlocked.Decrement(ref _count); } LruLinkedList.MoveFirst(ref _lruList, node); } finally { _lock.ExitWriteLock(); } }
protected override void ClearCore(bool disposing) { _cache.Clear(); LruLinkedList.Clear(ref _lruList); #if DEBUG _accessCount = 0; _evictionCount = 0; _invocationCount = 0; _lastEvicted = null; #endif }
/// <summary> /// Trims the LRU cache if needed. /// </summary> private void Trim() { while (_cache.Count >= _maxCapacity) { var entry = _lruList.Last; _cache.Remove(entry.Key); LruLinkedList.RemoveLast(ref _lruList); #if DEBUG _lastEvicted = entry; _evictionCount++; #endif } }
/// <summary> /// Makes the specified node the most recently accessed one by moving it to the front of the LRU list. /// </summary> /// <param name="node">The node that was most recently accessed.</param> private void MostRecent(ILruCacheEntry <T, R> node) { // // NB: We opted for a linked list approach for the LRU cache to have constant overhead for memoized function // invocation, with a minor increment upon cache pruning. Alternatively, we could sort entries by their // last access time, having less overhead during a lookup (just store the new access time) but at the // expense of having to sort the entries in the Trim procedure. That'd cause a hiccup in lookup times. // // NB: If a ranking based eviction is desirable, resulting in higher lookup speeds but lower pruning speeds, // one can use the CreateEvictedBy* methods on MemoizationCacheFactory. // LruLinkedList.MoveFirst(ref _lruList, node); }
private int TrimBy <K>(Func <KeyValuePair <T, IValueOrError <R> >, bool> filter, Func <KeyValuePair <T, IValueOrError <R> >, K> selector, Func <K, bool> shouldTrim) { var res = 0; for (var node = _lruList.First; node != null; node = node.Next) { var kv = new KeyValuePair <T, IValueOrError <R> >(node.Key, node); if (filter(kv) && shouldTrim(selector(kv))) { LruLinkedList.Remove(ref _lruList, node); _cache.Remove(node.Key); res++; } } return(res); }
private int TrimBy <K>(Func <KeyValuePair <T, IValueOrError <R> >, bool> filter, Func <KeyValuePair <T, IValueOrError <R> >, K> selector, Func <K, bool> shouldTrim) { var res = 0; _lock.EnterWriteLock(); try { for (var node = _lruList.First; node != null; node = node.Next) { var shouldRemove = false; if (node.Key.TryGetTarget(out T key)) { var kv = new KeyValuePair <T, IValueOrError <R> >(key, node); if (filter(kv) && shouldTrim(selector(kv))) { _cache.Remove(key); shouldRemove = true; } } else { shouldRemove = true; } if (shouldRemove) { LruLinkedList.Remove(ref _lruList, node); Interlocked.Decrement(ref _count); res++; } } } finally { _lock.ExitWriteLock(); } return(res); }