/// <inheritdoc /> public virtual void ClearOfType <T>() { Type typeOfT = typeof(T); var isInterface = typeOfT.IsInterface; try { EnterWriteLock(); foreach (KeyValuePair <object, object> entry in GetDictionaryEntries() .Where(x => { // entry.Value is Lazy<object> and not null, its value may be null // remove null values as well, does not hurt // compare on exact type, don't use "is" // get non-created as NonCreatedValue & exceptions as null var value = SafeLazy.GetSafeLazyValue((Lazy <object?>)x.Value, true); // if T is an interface remove anything that implements that interface // otherwise remove exact types (not inherited types) return(value == null || (isInterface ? value is T : value.GetType() == typeOfT)); }) .ToArray()) { RemoveEntry((string)entry.Key); } } finally { ExitWriteLock(); } }
private void InsertImpl(string cacheKey, Func <object> getCacheItem, TimeSpan?timeout = null, bool isSliding = false, CacheDependency dependency = null) { // NOTE - here also we must insert a Lazy<object> but we can evaluate it right now // and make sure we don't store a null value. var result = SafeLazy.GetSafeLazy(getCacheItem); var value = result.Value; // force evaluation now - this may throw if cacheItem throws, and then nothing goes into cache if (value == null) { return; // do not store null values (backward compat) } cacheKey = GetCacheKey(cacheKey); var absolute = isSliding ? System.Web.Caching.Cache.NoAbsoluteExpiration : (timeout == null ? System.Web.Caching.Cache.NoAbsoluteExpiration : DateTime.Now.Add(timeout.Value)); var sliding = isSliding == false ? System.Web.Caching.Cache.NoSlidingExpiration : (timeout ?? System.Web.Caching.Cache.NoSlidingExpiration); try { _locker.EnterWriteLock(); //NOTE: 'Insert' on System.Web.Caching.Cache actually does an add or update! _cache.Insert(cacheKey, result, dependency, absolute, sliding, CacheItemPriority.Normal, null); } finally { if (_locker.IsWriteLockHeld) { _locker.ExitWriteLock(); } } }
public void ThreadTest() { string HelloFunc() => "Hello"; const int n = 10; SafeLazy <string> helloLazy = LazyFactory.CreateSafeLazy(HelloFunc); string[] resStrings = new string[n]; Thread[] threads = new Thread[n]; for (int i = 0; i < n; ++i) { int k = i; threads[i] = new Thread(() => { resStrings[k] = helloLazy.Get; }); } foreach (var thread in threads) { thread.Start(); } foreach (var thread in threads) { thread.Join(); } for (int i = 0; i < n; ++i) { Assert.AreEqual(HelloFunc(), resStrings[i]); } }
/// <inheritdoc /> public object?Get(string key, Func <object?> factory) { var cached = InnerCache.Get(key, () => { Lazy <object?> result = SafeLazy.GetSafeLazy(factory); var value = result.Value; // force evaluation now - this may throw if cacheItem throws, and then nothing goes into cache // do not store null values (backward compat), clone / reset to go into the cache return(value == null ? null : CheckCloneableAndTracksChanges(value)); }); return(CheckCloneableAndTracksChanges(cached)); }
/// <inheritdoc /> public void Insert(string key, Func <object?> factory, TimeSpan?timeout = null, bool isSliding = false, string[]?dependentFiles = null) => InnerCache.Insert( key, () => { Lazy <object?> result = SafeLazy.GetSafeLazy(factory); var value = result .Value; // force evaluation now - this may throw if cacheItem throws, and then nothing goes into cache // do not store null values (backward compat), clone / reset to go into the cache return(value == null ? null : CheckCloneableAndTracksChanges(value)); }, timeout, isSliding, dependentFiles);
/// <inheritdoc /> public virtual object?Get(string key) { key = GetCacheKey(key); Lazy <object?>?result; try { EnterReadLock(); result = GetEntry(key) as Lazy <object?>; // null if key not found } finally { ExitReadLock(); } return(result == null ? null : SafeLazy.GetSafeLazyValue(result)); // return exceptions as null }
public void SleepingThreadsTest() { int GetRandom() => this.random.Next(0, 100); SafeLazy <int> safeLazy = LazyFactory.CreateSafeLazy(GetRandom); const int n = 10; int[,] resultMatrix = new int[n, n]; Thread[] threads = new Thread[n]; for (int i = 0; i < n; ++i) { int k = i; threads[i] = new Thread(() => { Thread.Sleep(this.random.Next(0, 100) * 10); for (int j = 0; j < n; ++j) { resultMatrix[k, j] = safeLazy.Get; } }); } foreach (var thread in threads) { thread.Start(); } foreach (var thread in threads) { thread.Join(); } int result = resultMatrix[0, 0]; for (int i = 0; i < n; ++i) { for (int j = 0; j < n; ++j) { Assert.AreEqual(result, resultMatrix[i, j]); } } }
/// <inheritdoc /> public virtual IEnumerable <object> SearchByKey(string keyStartsWith) { var plen = CacheItemPrefix.Length + 1; IEnumerable <KeyValuePair <object, object> > entries; try { EnterReadLock(); entries = GetDictionaryEntries() .Where(x => ((string)x.Key).Substring(plen).InvariantStartsWith(keyStartsWith)) .ToArray(); // evaluate while locked } finally { ExitReadLock(); } return(entries .Select(x => SafeLazy.GetSafeLazyValue((Lazy <object?>)x.Value)) // return exceptions as null .Where(x => x != null) !); // backward compat, don't store null values in the cache }
/// <inheritdoc /> public virtual void ClearOfType <T>(Func <string, T, bool> predicate) { Type typeOfT = typeof(T); var isInterface = typeOfT.IsInterface; var plen = CacheItemPrefix.Length + 1; try { EnterWriteLock(); foreach (KeyValuePair <object, object> entry in GetDictionaryEntries() .Where(x => { // entry.Value is Lazy<object> and not null, its value may be null // remove null values as well, does not hurt // compare on exact type, don't use "is" // get non-created as NonCreatedValue & exceptions as null var value = SafeLazy.GetSafeLazyValue((Lazy <object?>)x.Value, true); if (value == null) { return(true); } // if T is an interface remove anything that implements that interface // otherwise remove exact types (not inherited types) return((isInterface ? value is T : value.GetType() == typeOfT) // run predicate on the 'public key' part only, ie without prefix && predicate(((string)x.Key).Substring(plen), (T)value)); })) { RemoveEntry((string)entry.Key); } } finally { ExitWriteLock(); } }
/// <inheritdoc /> public virtual IEnumerable <object?> SearchByRegex(string regex) { const string prefix = CacheItemPrefix + "-"; var compiled = new Regex(regex, RegexOptions.Compiled); var plen = prefix.Length; IEnumerable <KeyValuePair <object, object> > entries; try { EnterReadLock(); entries = GetDictionaryEntries() .Where(x => compiled.IsMatch(((string)x.Key).Substring(plen))) .ToArray(); // evaluate while locked } finally { ExitReadLock(); } return(entries .Select(x => SafeLazy.GetSafeLazyValue((Lazy <object?>)x.Value)) // return exceptions as null .Where(x => x != null)); // backward compatible, don't store null values in the cache }
private object GetImpl(string key, Func <object> factory, TimeSpan?timeout, bool isSliding = false, CacheDependency dependency = null) { key = GetCacheKey(key); // NOTE - because we don't know what getCacheItem does, how long it will take and whether it will hang, // getCacheItem should run OUTSIDE of the global application lock else we run into lock contention and // nasty performance issues. // So.... we insert a Lazy<object> in the cache while holding the global application lock, and then rely // on the Lazy lock to ensure that getCacheItem runs once and everybody waits on it, while the global // application lock has been released. // NOTE // The Lazy value creation may produce a null value. // Must make sure (for backward compatibility) that we pretend they are not in the cache. // So if we find an entry in the cache that already has its value created and is null, // pretend it was not there. If value is not already created, wait... and return null, that's // what prior code did. // NOTE // The Lazy value creation may throw. // So... the null value _will_ be in the cache but never returned Lazy <object> result; // Fast! // Only one thread can enter an UpgradeableReadLock at a time, but it does not prevent other // threads to enter a ReadLock in the meantime -- only upgrading to WriteLock will prevent all // reads. We first try with a normal ReadLock for maximum concurrency and take the penalty of // having to re-lock in case there's no value. Would need to benchmark to figure out whether // it's worth it, though... try { _locker.EnterReadLock(); result = _cache.Get(key) as Lazy <object>; // null if key not found } finally { if (_locker.IsReadLockHeld) { _locker.ExitReadLock(); } } var value = result == null ? null : SafeLazy.GetSafeLazyValue(result); if (value != null) { return(value); } using (var lck = new UpgradeableReadLock(_locker)) { result = _cache.Get(key) as Lazy <object>; // null if key not found // cannot create value within the lock, so if result.IsValueCreated is false, just // do nothing here - means that if creation throws, a race condition could cause // more than one thread to reach the return statement below and throw - accepted. if (result == null || SafeLazy.GetSafeLazyValue(result, true) == null) // get non-created as NonCreatedValue & exceptions as null { result = SafeLazy.GetSafeLazy(factory); var absolute = isSliding ? System.Web.Caching.Cache.NoAbsoluteExpiration : (timeout == null ? System.Web.Caching.Cache.NoAbsoluteExpiration : DateTime.Now.Add(timeout.Value)); var sliding = isSliding == false ? System.Web.Caching.Cache.NoSlidingExpiration : (timeout ?? System.Web.Caching.Cache.NoSlidingExpiration); lck.UpgradeToWriteLock(); //NOTE: 'Insert' on System.Web.Caching.Cache actually does an add or update! _cache.Insert(key, result, dependency, absolute, sliding, CacheItemPriority.Normal, null); } } // using GetSafeLazy and GetSafeLazyValue ensures that we don't cache // exceptions (but try again and again) and silently eat them - however at // some point we have to report them - so need to re-throw here // this does not throw anymore //return result.Value; value = result.Value; // will not throw (safe lazy) if (value is SafeLazy.ExceptionHolder eh) { eh.Exception.Throw(); // throw once! } return(value); }