示例#1
0
        protected static void Unbounded_Trim1(IMemoizationCacheFactory mcf)
        {
            var res = mcf.Create <int, int>(x => x * 2, MemoizationOptions.None, EqualityComparer <int> .Default);

            Assert.AreEqual(42, res.GetOrAdd(21));
            Assert.AreEqual(1, res.Count);

            var trim = res.AsTrimmableByArgumentAndResult();

            Assert.IsNotNull(trim);

            trim.Trim(kv => kv.Key % 2 == 0);
            Assert.AreEqual(1, res.Count);

            trim.Trim(kv => kv.Key % 3 == 0);
            Assert.AreEqual(0, res.Count);

            Assert.AreEqual(42, res.GetOrAdd(21));
            Assert.AreEqual(1, res.Count);

            trim.Trim(kv => kv.Value % 2 != 0);
            Assert.AreEqual(1, res.Count);

            trim.Trim(kv => kv.Value % 3 == 0);
            Assert.AreEqual(0, res.Count);
        }
示例#2
0
        protected static void Unbounded_Trim2(IMemoizationCacheFactory mcf)
        {
            var res = mcf.Create <int, int>(x => 100 / x, MemoizationOptions.CacheException, EqualityComparer <int> .Default);

            Assert.ThrowsException <DivideByZeroException>(() => res.GetOrAdd(0));
            Assert.AreEqual(1, res.Count);

            Assert.AreEqual(50, res.GetOrAdd(2));
            Assert.AreEqual(2, res.Count);

            var trim = res.AsTrimmableByArgumentAndResult();

            Assert.IsNotNull(trim);

            trim.Trim(kv => kv.Key % 2 == 0);
            Assert.AreEqual(1, res.Count);

            var trimErr = res.AsTrimmableByArgumentAndResultOrError();

            Assert.IsNotNull(trimErr);

            trimErr.Trim(kv => kv.Value.Kind == ValueOrErrorKind.Error && kv.Key == 0);
            Assert.AreEqual(0, res.Count);

            trimErr.Trim(kv => kv.Key % 3 == 0);
            Assert.AreEqual(0, res.Count);

            Assert.ThrowsException <DivideByZeroException>(() => res.GetOrAdd(0));
            Assert.AreEqual(1, res.Count);

            Assert.AreEqual(50, res.GetOrAdd(2));
            Assert.AreEqual(2, res.Count);
        }
示例#3
0
        /// <summary>
        /// Creates a new intern cache for values of type <typeparamref name="T"/> that supports trimming entries.
        /// </summary>
        /// <typeparam name="T">The type of the values to intern.</typeparam>
        /// <param name="cacheFactory">The cache factory used to create an intern cache.</param>
        /// <param name="clone">Function to clone values in order to maintain intern cache entries. See remarks for more information.</param>
        /// <returns>Empty intern cache for values of type <typeparamref name="T"/>.</returns>
        /// <remarks>
        /// Intern caches perform an equality based lookup to locate an existing interned copy of the provided value
        /// in an internal dictionary. After looking up an interned value, the caller can discard the original value
        /// it provided to the Intern method. The interned value is kept in the intern cache using a weak reference,
        /// so it can be trimmed from the cache if no more strong references are held to it. In order to prevent the
        /// intern cache's dictionary from keeping the weak references alive, the keys in the dictionary are created
        /// using the specified <paramref name="clone"/> function. This causes the reference held in the value to be
        /// different from the one in the key, which is only used for equality checks. A trim operation locates weak
        /// references in the value slots with a dangling reference and removes the cache entry from the dictionary.
        /// </remarks>
        public static IWeakInternCache <T> CreateWeakInternCache <T>(this IMemoizationCacheFactory cacheFactory, Func <T, T> clone)
            where T : class
        {
            if (cacheFactory == null)
            {
                throw new ArgumentNullException(nameof(cacheFactory));
            }
            if (clone == null)
            {
                throw new ArgumentNullException(nameof(clone));
            }

            var cloneSafe = new Func <T, T>(x =>
            {
                var copy = clone(x);

                if (ReferenceEquals(copy, x))
                {
                    throw new InvalidOperationException("Clone function did return the same object reference.");
                }

                return(copy);
            });

            var create = new Func <T, WeakReference <T> >(x => new WeakReference <T>(cloneSafe(x)));
            var res    = Memoizer.Create(cacheFactory).Memoize(create);

            return(new Weak <T>(cloneSafe, res));
        }
示例#4
0
        /// <summary>
        /// Creates a function memoizer that uses the specified <paramref name="factory"/> to create memoization caches.
        /// </summary>
        /// <param name="factory">Memoization cache factory to use when memoizing functions using the resulting memoizer.</param>
        /// <returns>Function memoizer to speed up function invocations by caching their result.</returns>
        public static IMemoizer Create(IMemoizationCacheFactory factory)
        {
            if (factory == null)
            {
                throw new ArgumentNullException(nameof(factory));
            }

            return(new Impl(factory));
        }
示例#5
0
        /// <summary>
        /// Creates a memoization cache factory that uses locking to access function memoization caches.
        /// This can be used to make a memoization cache safe for concurrent access, at the expense of potential lock contention.
        /// </summary>
        /// <param name="factory">The memoization cache factory to wrap with synchronized access behavior.</param>
        /// <returns>A memoization cache factory that wraps the specified <paramref name="factory"/> and adds synchronized access behavior to it.</returns>
        public static IMemoizationCacheFactory Synchronized(this IMemoizationCacheFactory factory)
        {
            if (factory == null)
            {
                throw new ArgumentNullException(nameof(factory));
            }

            return(new SynchronizedFactory(factory));
        }
示例#6
0
        /// <summary>
        /// Creates a memoization cache factory that keeps function memoization caches for each thread on which the memoized function gets invoked.
        /// This is useful to reduce cache access lock contention and can be used to make a memoization cache safe for concurrent access, at the expense of keeping a cache per thread.
        /// </summary>
        /// <param name="factory">The memoization cache factory to wrap with thread-local caching behavior.</param>
        /// <param name="exposeThreadLocalView">Indicates whether the caches returned from the resulting factory provide a thread-local view on the cache, for properties like Count and methods like Clear.</param>
        /// <returns>A memoization cache factory that wraps the specified <paramref name="factory"/> and adds thread-local isolation to it.</returns>
        public static IMemoizationCacheFactory WithThreadLocal(this IMemoizationCacheFactory factory, bool exposeThreadLocalView)
        {
            if (factory == null)
            {
                throw new ArgumentNullException(nameof(factory));
            }

            return(new ThreadLocalFactory(factory, exposeThreadLocalView));
        }
示例#7
0
        /// <summary>
        /// Creates a new intern cache for values of type <typeparamref name="T"/>.
        /// </summary>
        /// <typeparam name="T">The type of the values to intern.</typeparam>
        /// <param name="cacheFactory">The cache factory used to create an intern cache.</param>
        /// <param name="comparer">Comparer to determine the equality of instances of type <typeparamref name="T"/>.</param>
        /// <returns>Empty intern cache for values of type <typeparamref name="T"/>.</returns>
        public static IInternCache <T> CreateInternCache <T>(this IMemoizationCacheFactory cacheFactory, IEqualityComparer <T> comparer = null)
            where T : class
        {
            if (cacheFactory == null)
            {
                throw new ArgumentNullException(nameof(cacheFactory));
            }

            var create = new Func <T, T>(x => x);
            var res    = Memoizer.Create(cacheFactory).Memoize(create, MemoizationOptions.None, comparer);

            return(new Strong <T>(res));
        }
示例#8
0
        protected static void Unbounded_ErrorCaching(IMemoizationCacheFactory mcf)
        {
            var n     = 0;
            var cache = mcf.Create <string, int>(s => { n++; return(s.Length); }, MemoizationOptions.CacheException, comparer: null);

            Assert.AreEqual(0, cache.Count);
            Assert.AreEqual(0, n);

            for (var i = 1; i <= 3; i++)
            {
                Assert.ThrowsException <NullReferenceException>(() => cache.GetOrAdd(argument: null));
                Assert.AreEqual(1, cache.Count);
                Assert.AreEqual(1, n);
            }
        }
示例#9
0
        protected static void Unbounded_Simple(IMemoizationCacheFactory mcf)
        {
            foreach (var options in new[] { MemoizationOptions.None, MemoizationOptions.CacheException })
            {
                var n     = 0;
                var cache = mcf.Create <string, int>(s => { n++; return(s.Length); }, options, comparer: null);

                Assert.AreEqual(0, cache.Count);
                Assert.AreEqual(0, n);

                Assert.AreEqual(3, cache.GetOrAdd("foo"));
                Assert.AreEqual(1, cache.Count);
                Assert.AreEqual(1, n);

                Assert.AreEqual(3, cache.GetOrAdd("foo"));
                Assert.AreEqual(1, cache.Count);
                Assert.AreEqual(1, n);

                Assert.AreEqual(4, cache.GetOrAdd("test"));
                Assert.AreEqual(2, cache.Count);
                Assert.AreEqual(2, n);

                cache.Clear();

                Assert.AreEqual(0, cache.Count);
                Assert.AreEqual(2, n);

                Assert.AreEqual(3, cache.GetOrAdd("foo"));
                Assert.AreEqual(1, cache.Count);
                Assert.AreEqual(3, n);

                Assert.AreEqual(3, cache.GetOrAdd("foo"));
                Assert.AreEqual(1, cache.Count);
                Assert.AreEqual(3, n);

                Assert.AreEqual(4, cache.GetOrAdd("test"));
                Assert.AreEqual(2, cache.Count);
                Assert.AreEqual(4, n);

                Assert.IsTrue(!string.IsNullOrEmpty(cache.DebugView));
            }
        }
示例#10
0
 /// <summary>
 /// Creates a new expression hasher with the specified object pool capacity and memoization cache factory.
 /// </summary>
 /// <param name="poolCapacity">The size of the internal object pool to keep data structures that are required to compute a hash code.</param>
 /// <param name="cacheFactory"></param>
 /// <remarks>
 /// The <paramref name="poolCapacity"/> parameter can be used to adjust the internal pool size when it's known that the
 /// hasher is used in specific configurations. For example, if the hasher is only used from a single thread, a pool size
 /// of <c>1</c> could suffice. In case one expects the overridden <see cref="GetHashCode(ObjectSlim)"/> method to reenter
 /// the hasher, the pool size could be adjusted to allow for such reentrancy. If possible, it is recommended to make such
 /// overridden <see cref="GetHashCode(ObjectSlim)"/> implementations call <see cref="GetHashCode(ExpressionSlim)"/> directly
 /// rather than causing the creation of a new hasher instance.
 ///
 /// Note that a pool size that's too small does not cause a functional regression; it merely can result in degraded hashing
 /// performance due to the allocation of internal data structures that cannot be reused.
 /// </remarks>
 public ExpressionSlimHasher(int poolCapacity, IMemoizationCacheFactory cacheFactory)
 {
     _pool     = new ObjectPool <ExpressionHashingVisitor>(CreateHashingVisitor, poolCapacity);
     _memoizer = Memoizer.Create(cacheFactory);
 }
示例#11
0
 protected static void Unbounded_ArgumentChecking(IMemoizationCacheFactory mcf)
 {
     Assert.ThrowsException <ArgumentNullException>(() => mcf.Create(default(Func <int, int>), MemoizationOptions.None, EqualityComparer <int> .Default));
 }
示例#12
0
 public Impl(IMemoizationCacheFactory factory) => _factory = factory;
示例#13
0
 public ThreadLocalFactory(IMemoizationCacheFactory factory, bool exposeThreadLocalView)
 {
     _factory          = factory;
     _exposeGlobalView = !exposeThreadLocalView;
 }
示例#14
0
 public ThreadLocalFactory(IMemoizationCacheFactory factory) => _factory = factory;