// return true if this caller won the race to load whatever would go at key private static bool GotCompeteLock(LocalCache cache, string key) { var competeKey = key + "-cload"; if (!cache.SetNXSync(competeKey, DateTime.UtcNow)) { var x = cache.Get <DateTime>(competeKey); // Somebody abandoned the lock, clear it and try again if (DateTime.UtcNow - x > TimeSpan.FromMinutes(5)) { cache.Remove(competeKey); return(GotCompeteLock(cache, key)); } // Lost the lock competition return(false); } // winner, go do something expensive now return(true); }
/// <summary> /// /// lookup refreshes the data if necessay, passing the old data if we have it. /// /// durationSecs is the "time before stale" for the data /// serveStaleSecs is the maximum amount of time to serve data once it becomes stale /// /// Note that one unlucky caller when the data is stale will block to fill the cache, /// everybody else will get stale data though. /// </summary> public static T GetSet <T>(this LocalCache cache, string key, Func <T, MicroContext, T> lookup, int durationSecs, int serveStaleDataSecs) where T : class { var possiblyStale = cache.Get <GetSetWrapper <T> >(key); var localLockName = key; var nullLoadLock = _getSetNullLocks.AddOrUpdate(localLockName, k => new object(), (k, old) => old); if (possiblyStale == null) { // We can't prevent multiple web server's from running this (well, we can but its probably overkill) but we can // at least stop the query from running multiple times on *this* web server lock (nullLoadLock) { possiblyStale = cache.Get <GetSetWrapper <T> >(key); if (possiblyStale == null) { T data; using (var ctx = new MicroContext()) { data = lookup(null, ctx); } possiblyStale = new GetSetWrapper <T> { Data = data, StaleAfter = DateTime.UtcNow + TimeSpan.FromSeconds(durationSecs) }; cache.Set(key, possiblyStale, durationSecs + serveStaleDataSecs); Interlocked.Increment(ref totalGetSetSync); } } } if (possiblyStale.StaleAfter > DateTime.UtcNow) { return(possiblyStale.Data); } bool gotCompeteLock = false; if (Monitor.TryEnter(nullLoadLock, 0)) { // it isn't actively being refreshed; we'll check for a mutex on the cache try { gotCompeteLock = GotCompeteLock(cache, key); } finally { Monitor.Exit(nullLoadLock); } } if (gotCompeteLock) { var old = possiblyStale.Data; var task = new Task(delegate { lock (nullLoadLock) // holding this lock allows us to locally short-circuit all the other threads that come asking { try { var updated = new GetSetWrapper <T>(); using (var ctx = new MicroContext()) { updated.Data = lookup(old, ctx); updated.StaleAfter = DateTime.UtcNow + TimeSpan.FromSeconds(durationSecs); } cache.Remove(key); cache.Set(key, updated, durationSecs + serveStaleDataSecs); } finally { ReleaseCompeteLock(cache, key); } } }); task.ContinueWith(t => { if (t.IsFaulted) { Interlocked.Increment(ref totalGetSetAsyncError); Current.LogException(t.Exception); } else { Interlocked.Increment(ref totalGetSetAsyncSuccess); } }); task.Start(); } return(possiblyStale.Data); }
// called by a winner of CompeteToLoad, to make it so the next person to call CompeteToLoad will get true private static void ReleaseCompeteLock(LocalCache cache, string key) { cache.Remove(key + "-cload"); }