/// <summary> /// Builds a cache which does not automatically load values when keys are requested. /// This method does not alter the state of this <see cref="Caffeine{K, V}"/> instance, /// so it can be invoked again to create multiple independent caches. /// </summary> /// <typeparam name="K1"></typeparam> /// <typeparam name="V1"></typeparam> /// <returns></returns> public ILoadingCache <K, V> Build(CacheLoader <K, V> loader) { RequireWeightWithWeigher(); // TODO: implement the other type of loading cache.. //return (IsBounded || DoesRefreshAfterWrite) ? (ILoadingCache<K, V>)new BoundedLoadingCache<K, V>(this, loader) : (ILoadingCache<K, V>)new UnboundedLoadingCache<K, V>(this, loader); return((ILoadingCache <K, V>) new BoundedLoadingCache <K, V>(this, loader)); }
public void Refresh(K key) { RequireNonNull <K>(key); long writeTime = 0L; long startTIme = cache.StatsTicker.Ticks(); V oldValue = cache.TryGetValueQuietly(key, ref writeTime); Task <V> refresh; if (EqualityComparer <V> .Default.Equals(oldValue, default(V))) { refresh = CacheLoader.LoadAsync(key); } else { refresh = CacheLoader.ReloadAsync(key, oldValue); } refresh.ContinueWith <V>((t) => { long loadTime = cache.StatsTicker.Ticks() - startTIme; if (t.IsFaulted) { cache.StatsCounter.RecordLoadFailure(loadTime); return(default(V)); } V newValue = t.Result; bool discard = false; // TODO: add this implementation. //cache.AddOrUpdate(key, ); if (discard && cache.HasRemovalListener) { cache.NotifyRemoval(key, newValue, RemovalCause.REPLACED); } if (EqualityComparer <V> .Default.Equals(newValue, default(V))) { cache.StatsCounter.RecordLoadFailure(loadTime); } else { cache.StatsCounter.RecordLoadSuccess(loadTime); } return(newValue); }); }
public BoundedLoadingCache(Caffeine <K, V> builder, CacheLoader <K, V> loader) : base(builder, loader) { if (loader == null) { throw new ArgumentNullException("loader", "loader is a required parameter"); } mappingFunction = (key) => { // TODO: Removed all the catches, because they were really just throwing them anyway. return(loader.Load(key)); }; }
public IAsyncLoadingCache <K, V> BuildAsync(CacheLoader <K, V> loader) { return(BuildAsync((AsyncCacheLoader <K, V>)loader)); }
internal bool HasLoadAll(CacheLoader <K, V> loader) { return(loader.HasBulkLoader); }
public BoundedManualCache(Caffeine <K, V> builder, CacheLoader <K, V> loader) { cache = CacheFactory <K, V> .Instance.NewBoundedLocalCache(builder, loader, false); isWeighted = builder.IsWeighted; }
public BoundedLocalCacheStrongKeyStrongValueStatistics(Caffeine <K, V> builder, CacheLoader <K, V> loader, bool isAsync) : base(builder, loader, isAsync) { statsCounter = builder.StatsCounter.Get(); }
public BoundedLocalCacheStrongKeyStrongValueStatisticsEvictsBySize(Caffeine <K, V> builder, CacheLoader <K, V> loader, bool isAsync) : base(builder, loader, isAsync) { sketch = new FrequencySketch <K>(); if (builder.HasInitialCapacity) { long capacity = Math.Min(builder.Maximum, builder.InitialCapacity); sketch.EnsureCapacity((ulong)capacity); } accessOrderEdenDeque = builder.Evicts || builder.DoesExpireAfterAccess ? new AccessOrderDeque <Node <K, V> >() : null; accessOrderProbationDeque = new AccessOrderDeque <Node <K, V> >(); accessOrderProtectedQueue = new AccessOrderDeque <Node <K, V> >(); this.writeBuffer = new MpscGrowableArrayQueue <Task>(WRITE_BUFFER_MIN, WRITE_BUFFER_MAX); }