/// <summary> /// Releases this cached value. /// </summary> void IDisposable.Dispose() { Cache <T> .Release(this); }
/// <summary> /// Releases a cached value. /// </summary> /// <param name="cache">The cached value to release.</param> /// <exception cref="System.ArgumentNullException">The cached value to release is null.</exception> public static void Release(Cache <T> cache) { if (cache == null) { throw new ArgumentNullException("cache"); } if (cache.isFree) { return; } // No need to call this method inside the lock, which might do heavy work // there is a thread safety hole here, actually - if several different threads // are trying to free the same cache instance, OnFreed might be called several // times concurrently for the same cached value. if (IsNotificationReceiver) { (cache.Value as ICacheNotificationReceiver).OnFreed(); } while (true) { if (Interlocked.CompareExchange(ref THREAD_LOCK_TOKEN, 1, 0) == 0) { break; } } // We now hold the lock if (cache.isFree) { // Release the lock and leave - job's done already THREAD_LOCK_TOKEN = 0; return; } cache.isFree = true; var freeValues = FreeValues; var length = freeValues.Length; bool added = false; for (int i = 0; i < length; i++) { if (object.ReferenceEquals(freeValues[i], null)) { freeValues[i] = cache; added = true; break; } } if (!added && length < MaxCacheSize) { var newArr = new object[length * 2]; for (int i = 0; i < length; i++) { newArr[i] = freeValues[i]; } newArr[length] = cache; FreeValues = newArr; } // Release the lock THREAD_LOCK_TOKEN = 0; }