public void CrearCacheHorasTotales() { var cacheItemPolicy = new CacheItemPolicy { AbsoluteExpiration = DateTimeOffset.Now.AddDays(1) }; OdbcConnection con = new OdbcConnection(connectionString); string queryString = "SELECT No_Cuenta, sum(Horas_Acum) FROM [Tabla General] Group By No_Cuenta;"; OdbcCommand command = new OdbcCommand(queryString, con); con.Open(); OdbcDataReader reader = command.ExecuteReader(); //Console.WriteLine(reader.FieldCount); while (reader.Read()) { var cacheIt = new CacheItem(reader.GetString(0), reader.GetString(1)); cache.Add(cacheIt, cacheItemPolicy); } reader.Close(); var chek = cache.Get("21841180"); //Console.WriteLine(chek); /*foreach (var item in cache) * { * Console.WriteLine($"{item.Key} : {item.Value}"); * }*/ Console.WriteLine("Caché creado con éxito!"); }
public AggregateRoot Get(string id) { #if NET451 return((AggregateRoot)_cache.Get(id)); #else return((AggregateRoot)_cache.Get(id)); #endif }
public AggregateRoot Get(Guid id) { #if NET461 return((AggregateRoot)_cache.Get(id.ToString())); #else return((AggregateRoot)_cache.Get(id)); #endif }
public Task <AggregateRoot> Get(Guid id) { #if NET452 return(Task.FromResult((AggregateRoot)_cache.Get(id.ToString()))); #else return(Task.FromResult((AggregateRoot)_cache.Get(id))); #endif }
public void TestBasic() { var memoryCache = new MemoryCache("Foo"); var policy = new CacheItemPolicy(); memoryCache.AddOrGetExisting("Pop", 123, DateTimeOffset.MaxValue); memoryCache.AddOrGetExisting("Top", "Gun", DateTimeOffset.MaxValue); memoryCache.Add("Pop", 12, DateTime.MaxValue); Assert.AreEqual("Gun", memoryCache.Get("Top")); Assert.AreEqual(123, memoryCache.Get("Pop")); }
public bool RenameKey(string keyFrom, string keyTo) { lock (_lockObj) { if (_cache.Contains(keyFrom) && keyFrom != keyTo) { System.Diagnostics.Debug.WriteLine("RenameKey: from " + keyFrom + " to " + keyTo); var value = _cache.Get(keyFrom); _cache.Remove(keyFrom); //Get the existing TTL TimeSpan?ttl = null; if (_ttls.ContainsKey(keyFrom)) { if (_ttls[keyFrom].HasValue) { ttl = _ttls[keyFrom].Value.Subtract(DateTime.UtcNow); } //Remove the existing TTL _ttls.Remove(keyFrom); } Add(keyTo, value, ttl, When.Always); return(true); } return(false); } }
public object Get(object key) { if (key == null) { return(null); } var cacheKey = GetCacheKey(key); if (log.IsDebugEnabled) { log.Debug(string.Format("Fetching object '{0}' from the cache.", cacheKey)); } var obj = cache.Get(cacheKey); if (obj == null) { return(null); } var de = (DictionaryEntry)obj; if (key.Equals(de.Key)) { return(de.Value); } return(null); }
public void ExistingObject_IncrementByOneAndSetExpirationDate() { // Arrange var key = new SimpleThrottleKey("test", "key"); var limiter = new Limiter() .Limit(1) .Over(100); var cache = new MemoryCache("testing_cache"); var repository = new MemoryThrottleRepository(cache); string id = repository.CreateThrottleKey(key, limiter); var cacheItem = new MemoryThrottleRepository.ThrottleCacheItem() { Count = 1, Expiration = new DateTime(2030, 1, 1) }; cache .Set(id, cacheItem, cacheItem.Expiration); // Act repository.AddOrIncrementWithExpiration(key, limiter); // Assert var item = (MemoryThrottleRepository.ThrottleCacheItem)cache.Get(id); Assert.Equal(2L, item.Count); Assert.Equal(new DateTime(2030, 1, 1), item.Expiration); }
public object Get(string key) { if (string.IsNullOrWhiteSpace(key)) { throw new ArgumentNullException("key", "the key is null or empty."); } string cacheKey = GetCacheKey(key); object obj = cache.Get(cacheKey, regionName); if (obj == null) { return(null); } var de = (DictionaryEntry)obj; if (key == de.Key.ToString()) { return(de.Value); } else { return(null); } }
public bool TryGetValue <T>(string key, out T val) { object?result = FBackend.Get(key); if (result == null) { val = default !;
public override object Get(string key) { lock (_sync) { return(_cache.Contains(key) ? _cache.Get(key) : null); } }
protected override Task <T> GetAsync <T>( CacheKey cacheKey, CancellationToken cancellationToken) { var value = _memoryCache.Get(cacheKey.Value) as T; return(Task.FromResult(value)); }
/// <summary> /// Retrieves a value from cache by unique key. /// </summary> /// <param name="correlationId"></param> /// <param name="key">Unique key identifying a data object.</param> public async Task <object> RetrieveAsync(string correlationId, string key) { if (key == null) { throw new ArgumentNullException(nameof(key)); } return(await Task.FromResult(_standardCache.Get(key))); }
public double Decrease(string key, double num) { double operatorValue = 0; var value = _memoryCache.Get(key); if (value != null) { operatorValue = (double)value; } operatorValue -= num; _memoryCache.Set( key, value, new CacheItemPolicy { SlidingExpiration = _defaultExpireTimeSpan }); return(operatorValue); }
public static Entity GetRandomRecord(string logicalName, IOrganizationService service, Random r) { if (Cache.Get(logicalName) is List <Entity> list) { return(list[r.Next(0, list.Count - 1)]); } var query = new QueryExpression(logicalName) { NoLock = true, ColumnSet = new ColumnSet(false) }; var result = service.RetrieveMultiple(query).Entities.ToList(); var policy = new CacheItemPolicy(); Cache.Set(logicalName, result, policy); return(result[r.Next(0, result.Count - 1)]); }
// getPersonTkn() - Get a person's token. public static token getPersonTkn() { // Extract my tokenID string myTokenID = HttpContext.Current.Request.Headers["majama60"]; // Get token object from cache System.Runtime.Caching.MemoryCache memCache = System.Runtime.Caching.MemoryCache.Default; var res = memCache.Get(myTokenID); if (res == null) { return(null); } else { token myToken = (token)res; return(myToken); } }
public ISQLCacheItem GetCache(string cacheKey, out bool hasCache) { if (m_cache.ContainsKey(cacheKey)) { hasCache = true; return(m_cache[cacheKey]); } else { if (_memoryCache.Contains(cacheKey)) { hasCache = true; return(_memoryCache.Get(cacheKey) as ISQLCacheItem); } else { hasCache = false; return(null); } } }
public void NewObject_SetsCountToOneWithExpiration() { // Arrange var key = new SimpleThrottleKey("test", "key"); var limiter = new Limiter() .Limit(1) .Over(100); var cache = new MemoryCache("testing_cache"); var repository = new MemoryThrottleRepository(cache); repository.CurrentDate = () => new DateTime(2030, 1, 1); string id = repository.CreateThrottleKey(key, limiter); // Act repository.AddOrIncrementWithExpiration(key, limiter); // Assert var item = (MemoryThrottleRepository.ThrottleCacheItem)cache.Get(id); Assert.Equal(1L, item.Count); // We're testing a future date by 100 seconds which is 40 seconds + 1 minute Assert.Equal(new DateTime(2030, 1, 1, 0, 1, 40), item.Expiration); }
public T Get <T>(Guid aggregateId) where T : AggregateRoot { var idstring = aggregateId.ToString(); try { lock (_locks.GetOrAdd(idstring, _ => new object())) { T aggregate; if (IsTracked(aggregateId)) { aggregate = (T)_cache.Get(idstring); var events = _eventStore.Get(aggregateId, aggregate.Version); if (events.Any() && events.First().Version != aggregate.Version + 1) { _cache.Remove(idstring); } else { aggregate.LoadFromHistory(events); return(aggregate); } } aggregate = _repository.Get <T>(aggregateId); _cache.Add( aggregateId.ToString(), aggregate, _policyFactory.Invoke()); return(aggregate); } } catch (Exception) { _cache.Remove(idstring); throw; } }
public void TestNullKeyGet() { ICache cache = new MemoryCache(); cache.Put("nunit", "value"); object item = cache.Get(null); Assert.IsNull(item); }
public void TestCacheSliding () { var config = new NameValueCollection (); config["cacheMemoryLimitMegabytes"] = 0.ToString (); config["physicalMemoryLimitPercentage"] = 100.ToString (); config["pollingInterval"] = new TimeSpan (0, 0, 1).ToString (); using (var mc = new MemoryCache ("TestCacheSliding", config)) { Assert.AreEqual (0, mc.GetCount (), "#CSL1"); var cip = new CacheItemPolicy(); // The sliding expiration timeout has to be greater than 1 second because // .NET implementation ignores timeouts updates smaller than // CacheExpires.MIN_UPDATE_DELTA which is equal to 1. cip.SlidingExpiration = new TimeSpan (0, 0, 2); mc.Add("slidingtest", "42", cip); mc.Add("expire1", "1", cip); mc.Add("expire2", "2", cip); mc.Add("expire3", "3", cip); mc.Add("expire4", "4", cip); mc.Add("expire5", "5", cip); Assert.AreEqual (6, mc.GetCount (), "#CSL2"); for (int i = 0; i < 50; i++) { global::System.Threading.Thread.Sleep (100); var item = mc.Get ("slidingtest"); Assert.AreNotEqual (null, item, "#CSL3-" + i); } Assert.IsNull (mc.Get ("expire1"), "#CSL4-1"); Assert.IsNull (mc.Get ("expire2"), "#CSL4-2"); Assert.IsNull (mc.Get ("expire3"), "#CSL4-3"); Assert.IsNull (mc.Get ("expire4"), "#CSL4-4"); Assert.IsNull (mc.Get ("expire5"), "#CSL4-5"); Assert.AreEqual (1, mc.GetCount (), "#CSL4"); global::System.Threading.Thread.Sleep (4 * 1000); Assert.IsNull (mc.Get ("slidingtest"), "#CSL5a"); Assert.AreEqual (0, mc.GetCount (), "#CSL5"); } }
public void TestCacheExpiryOrdering () { var config = new NameValueCollection (); config["cacheMemoryLimitMegabytes"] = 0.ToString (); config["physicalMemoryLimitPercentage"] = 100.ToString (); config["pollingInterval"] = new TimeSpan (0, 0, 1).ToString (); using (var mc = new MemoryCache ("TestCacheExpiryOrdering", config)) { Assert.AreEqual (0, mc.GetCount (), "#CEO1"); // add long lived items into the cache first for (int i = 0; i < 100; i++) { var cip = new CacheItemPolicy (); cip.SlidingExpiration = new TimeSpan (0, 0, 10); mc.Add ("long-" + i, i, cip); } Assert.AreEqual (100, mc.GetCount (), "#CEO2"); // add shorter lived items into the cache, these should expire first for (int i = 0; i < 100; i++) { var cip = new CacheItemPolicy (); cip.SlidingExpiration = new TimeSpan(0, 0, 1); mc.Add ("short-" + i, i, cip); } Assert.AreEqual (200, mc.GetCount (), "#CEO3"); global::System.Threading.Thread.Sleep (4 * 1000); for (int i = 0; i < 100; i++) { Assert.IsNull (mc.Get ("short-" + i), "#CEO4-" + i); } Assert.AreEqual (100, mc.GetCount (), "#CEO4"); } }
public void TestCacheShrink () { const int HEAP_RESIZE_THRESHOLD = 8192 + 2; const int HEAP_RESIZE_SHORT_ENTRIES = 2048; const int HEAP_RESIZE_LONG_ENTRIES = HEAP_RESIZE_THRESHOLD - HEAP_RESIZE_SHORT_ENTRIES; var config = new NameValueCollection (); config["cacheMemoryLimitMegabytes"] = 0.ToString (); config["physicalMemoryLimitPercentage"] = 100.ToString (); config["pollingInterval"] = new TimeSpan (0, 0, 1).ToString (); using (var mc = new MemoryCache ("TestCacheShrink", config)) { Assert.AreEqual (0, mc.GetCount (), "#CS1"); // add some short duration entries for (int i = 0; i < HEAP_RESIZE_SHORT_ENTRIES; i++) { var expireAt = DateTimeOffset.Now.AddSeconds (3); mc.Add ("short-" + i, i.ToString (), expireAt); } Assert.AreEqual (HEAP_RESIZE_SHORT_ENTRIES, mc.GetCount (), "#CS2"); // add some long duration entries for (int i = 0; i < HEAP_RESIZE_LONG_ENTRIES; i++) { var expireAt = DateTimeOffset.Now.AddSeconds (12); mc.Add ("long-" + i, i.ToString (), expireAt); } Assert.AreEqual (HEAP_RESIZE_LONG_ENTRIES + HEAP_RESIZE_SHORT_ENTRIES, mc.GetCount(), "#CS3"); // wait for the cache thread to expire the short duration items, this will also shrink the size of the cache global::System.Threading.Thread.Sleep (5 * 1000); for (int i = 0; i < HEAP_RESIZE_SHORT_ENTRIES; i++) { Assert.IsNull (mc.Get ("short-" + i), "#CS4-" + i); } Assert.AreEqual (HEAP_RESIZE_LONG_ENTRIES, mc.GetCount (), "#CS4"); // add some new items into the cache, this will grow the cache again for (int i = 0; i < HEAP_RESIZE_LONG_ENTRIES; i++) { mc.Add("final-" + i, i.ToString (), DateTimeOffset.Now.AddSeconds (4)); } Assert.AreEqual (HEAP_RESIZE_LONG_ENTRIES + HEAP_RESIZE_LONG_ENTRIES, mc.GetCount (), "#CS5"); } }
/// <summary> /// Get Key on the MemoryCache server cache /// </summary> /// <typeparam name="T">Type of object</typeparam> /// <param name="key">Key of object</param> /// <returns>object</returns> public override T Get <T>(string key) { var data = (T)Cache.Get(key); return(data); }
/// <summary> /// Retrieve cached object based on key /// </summary> /// <param name="key"></param> /// <returns></returns> public object GetOnly(string key) { return(_cache.Get(key)); }
/// <summary> /// 获取缓存项。 /// </summary> /// <param name="key">缓存项的唯一标识符。</param> /// <returns>缓存项。</returns> public virtual object Get(string key) { return(_cache.Get(key)); }
private static void MemorySpeedPerformance() { /* * Some numbers: * 1. keep in mind, this is done with blocking => ...value).Result * 2. all tests were ran in debug mode * Remarks: * Running the async methods synchronously adds a *lot* of overhead, so the values are probably * exaggerated quite a bit (since they were ran using the async versions). * * Based on the figures, it looks like you can store 1m raw objects with 10k strings in < 1s. However * at ~70s, you can compress and store those same objects but use ~1/50th the space. Neato! * --------------------------------------------------------------------------------------------------------- * 1m iterations @ OneLong * --------------------------------------------------------------------------------------------------------- * Blocking Async No Async * Serialized Set: 6,921,999 1,962ms 2,262ms * Serialized Get: 3,529,009 929ms 1,055ms * Compression Set: 177,613,772 54,904ms 22,462ms * Decompression Get: 4,610,846 1,248ms 1,096ms * Raw Set: 3,317,033 1,046ms 1,063ms * Raw Get: 472,352 139ms 141ms * * No Async * Serialized v Raw Get: 7.47114x 5.7688x * Serialized v Raw Set: 2.08680x 1.9814x * Compression v Raw Get: 9.76146x 6.2608x * Compression v Raw Set: 53.54598x 20.6647x * * --------------------------------------------------------------------------------------------------------- * 1m iterations @ MultipleProperties w/ GenerateString(1, 1) (Ran using the synchronous methods) * (From this we can see that in the simplest case we pay the cost for compressing the data, but when it * doesn't improve the result (non-compressed size is better than compressed), compressed data retrieval is * nearly the same cost as serialized-only retrieval (because the smart bit is 0)). * --------------------------------------------------------------------------------------------------------- * Serialized Set: 8,010,415 * Serialized Get: 3,784,092 * Compression Set: 50,185,964 * Decompression Get: 4,051,573 * Raw Set: 3,437,408 * Raw Get: 465,960 * * No Async * Serialized v Raw Get: 8.1210x * Serialized v Raw Set: 2.3303x * Compression v Raw Get: 8.6951x * Compression v Raw Set: 14.5999x * * --------------------------------------------------------------------------------------------------------- * 1m iterations @ MultipleProperties w/ GenerateString(128, 128) * --------------------------------------------------------------------------------------------------------- * Serialized Set: 8,138,454 * Serialized Get: 3,425,754 * Compression Set: 316,601,410 * Decompression Get: 76,957,352 * Raw Set: 3,389,203 * Raw Get: 463,274 * * No Async * Serialized v Raw Get: 7.3946x 5.5158x * Serialized v Raw Set: 2.4012x 2.5692x * Compression v Raw Get: 166.1163x 36.6555x * Compression v Raw Set: 93.4147x 31.4675x * * --------------------------------------------------------------------------------------------------------- * 1m iterations @ MultipleProperties w/ GenerateString(10000, 10000) * --------------------------------------------------------------------------------------------------------- * Serialized Set: 53,034,383 * Serialized Get: 3,893,312 * Compression Set: 570,493,276 * Decompression Get: 483,579,832 * Raw Set: 3,247,694 * Raw Get: 462,159 * * No Async * Serialized v Raw Get: 8.424x 8.2344x * Serialized v Raw Set: 16.329x 15.6698x * Compression v Raw Get: 1,046.349x 448.3221x * Compression v Raw Set: 175.661x 99.6618x */ const int ITERATIONS = 30000; const string KEY = "impt-key"; var value = new OneLong { Id = GenerateId() }; MemoryCache cM = new MemoryCache("cM"); Stopwatch sw = Stopwatch.StartNew(); // warmup byte[] serialize = ProtoBufSerializer.Serialize(value); byte[] compress = SmartCompressor.Instance.Compress(serialize); cM.Set(KEY, compress, null); for (int i = 0; i < ITERATIONS; i++) { serialize = ProtoBufSerializer.Serialize(value); compress = SmartCompressor.Instance.Compress(serialize); cM.Set(KEY, compress, null); } long compressSet = sw.ElapsedMilliseconds; // warmup byte[] compressed = (byte[])cM.Get(KEY); byte[] decompressed = SmartCompressor.Instance.Decompress(compressed); ProtoBufSerializer.Deserialize<OneLong>(decompressed); sw = Stopwatch.StartNew(); for (int i = 0; i < ITERATIONS; i++) { compressed = (byte[])cM.Get(KEY); decompressed = SmartCompressor.Instance.Decompress(compressed); ProtoBufSerializer.Deserialize<OneLong>(decompressed); } long compressGet = sw.ElapsedMilliseconds; MemoryCache sM = new MemoryCache("sM"); // warmup serialize = ProtoBufSerializer.Serialize(value); sM.Set(KEY, serialize, null); sw = Stopwatch.StartNew(); for (int i = 0; i < ITERATIONS; i++) { serialize = ProtoBufSerializer.Serialize(value); sM.Set(KEY, serialize, null); } long serializeSet = sw.ElapsedMilliseconds; // warmup compressed = (byte[])sM.Get(KEY); ProtoBufSerializer.Deserialize<OneLong>(compressed); sw = Stopwatch.StartNew(); for (int i = 0; i < ITERATIONS; i++) { compressed = (byte[])sM.Get(KEY); ProtoBufSerializer.Deserialize<OneLong>(compressed); } long serializeGet = sw.ElapsedMilliseconds; MemoryCache rM = new MemoryCache("rM"); // warmup rM.Set(KEY, value, null); sw = Stopwatch.StartNew(); for (int i = 0; i < ITERATIONS; i++) { rM.Set(KEY, value, null); } long rawSet = sw.ElapsedMilliseconds; // warmup rM.Get(KEY); sw = Stopwatch.StartNew(); for (int i = 0; i < ITERATIONS; i++) { rM.Get(KEY); } long rawGet = sw.ElapsedMilliseconds; Console.WriteLine("Memory Speed: (operations per second)"); Console.WriteLine(" Set:"); Console.WriteLine(" Raw: {0:#,##0.0#}", (float)ITERATIONS / rawSet * 1000.0); Console.WriteLine( " Serialized: {0:#,##0.0#} ({1:0.00})%", (float)ITERATIONS / serializeSet * 1000.0, (float)rawSet / serializeSet * 100.0); Console.WriteLine( " Serialized + Compressed: {0:#,##0.0#} ({1:0.00})%", (float)ITERATIONS / compressSet * 1000.0, (float)rawSet / compressSet * 100.0); Console.WriteLine(" Get:"); Console.WriteLine(" Raw: {0:#,##0.0#}", (float)ITERATIONS / rawGet * 1000.0); Console.WriteLine( " Serialized: {0:#,##0.0#} ({1:0.00})%", (float)ITERATIONS / serializeGet * 1000.0, (float)rawGet / serializeGet * 100.0); Console.WriteLine( " Serialized + Compressed: {0:#,##0.0#} ({1:0.00})%", (float)ITERATIONS / compressGet * 1000.0, (float)rawGet / compressGet * 100.0); }
public static string GetTokenFromCache() { var cache = _cache.Get(_key) as string; return(cache); }
public void TestCacheSliding () { var config = new NameValueCollection (); config["cacheMemoryLimitMegabytes"] = 0.ToString (); config["physicalMemoryLimitPercentage"] = 100.ToString (); config["__MonoEmulateOneCPU"] = true.ToString (); // it appears that pollingInterval does nothing, so we set the Mono timer as well config["pollingInterval"] = new TimeSpan (0, 0, 1).ToString (); config["__MonoTimerPeriod"] = 1.ToString (); using (var mc = new MemoryCache ("TestCacheSliding", config)) { Assert.AreEqual (0, mc.GetCount (), "#CSL1"); var cip = new CacheItemPolicy(); cip.SlidingExpiration = new TimeSpan (0, 0, 1); mc.Add("slidingtest", "42", cip); mc.Add("expire1", "1", cip); mc.Add("expire2", "2", cip); mc.Add("expire3", "3", cip); mc.Add("expire4", "4", cip); mc.Add("expire5", "5", cip); Assert.AreEqual (6, mc.GetCount (), "#CSL2"); for (int i = 0; i < 50; i++) { global::System.Threading.Thread.Sleep (100); var item = mc.Get ("slidingtest"); Assert.AreNotEqual (null, item, "#CSL3-" + i); } Assert.AreEqual (1, mc.GetCount (), "#CSL4"); global::System.Threading.Thread.Sleep (4 * 1000); Assert.AreEqual (0, mc.GetCount (), "#CSL5"); } }
protected override T GetCache <T>(string key) => _memoryCache.Get(key).To <T>();
/// <summary> /// 获取缓存项 /// </summary> /// <param name="key">缓存KEY</param> /// <returns></returns> public T GetCache <T>(string key) { return((T)mc.Get(key)); }
public bool Exists(string key) { return(cache.Get(key) != null); }
/// <summary> /// Get cached item /// </summary> /// <typeparam name="T"></typeparam> /// <param name="key"></param> /// <returns></returns> public T Get <T>(NonNullable <string> key) where T : class { return((T)_cache.Get(CacheKey <T>(key))); }
public void Trim () { var config = new NameValueCollection (); config ["__MonoEmulateOneCPU"] = "true"; var mc = new MemoryCache ("MyCache", config); for (int i = 0; i < 10; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), null); Assert.AreEqual (10, mc.GetCount (), "#A1-1"); long trimmed = mc.Trim (50); Assert.AreEqual (5, trimmed, "#A1-2"); Assert.AreEqual (5, mc.GetCount (), "#A1-3"); mc = new MemoryCache ("MyCache", config); // Only entries 11- are considered for removal for (int i = 0; i < 11; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), null); Assert.AreEqual (11, mc.GetCount (), "#A2-1"); trimmed = mc.Trim (50); Assert.AreEqual (6, trimmed, "#A2-2"); Assert.AreEqual (5, mc.GetCount (), "#A2-3"); mc = new MemoryCache ("MyCache", config); // Only entries 11- are considered for removal for (int i = 0; i < 125; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), null); Assert.AreEqual (125, mc.GetCount (), "#A3-1"); trimmed = mc.Trim (50); Assert.AreEqual (63, trimmed, "#A3-2"); Assert.AreEqual (62, mc.GetCount (), "#A3-3"); // Testing the removal order mc = new MemoryCache ("MyCache", config); var removed = new List <string> (); var cip = new CacheItemPolicy (); cip.RemovedCallback = (CacheEntryRemovedArguments args) => { removed.Add (args.CacheItem.Key); }; for (int i = 0; i < 50; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), cip); object value; for (int i = 0; i < 50; i++) value = mc.Get ("key" + i.ToString ()); trimmed = mc.Trim (50); Assert.AreEqual (25, mc.GetCount (), "#A4-1"); Assert.AreEqual (25, trimmed, "#A4-2"); Assert.AreEqual (25, removed.Count, "#A4-3"); for (int i = 0; i < 25; i++) Assert.AreEqual ("key" + i.ToString (), removed [i], "#A5-" + i.ToString ()); }
public void Trim () { var config = new NameValueCollection (); config ["__MonoEmulateOneCPU"] = "true"; var mc = new MemoryCache ("MyCache", config); for (int i = 0; i < 10; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), null); // .NET doesn't touch the freshest 10 entries Assert.AreEqual (10, mc.GetCount (), "#A1-1"); long trimmed = mc.Trim (50); Assert.AreEqual (0, trimmed, "#A1-2"); Assert.AreEqual (10, mc.GetCount (), "#A1-3"); mc = new MemoryCache ("MyCache", config); // Only entries 11- are considered for removal for (int i = 0; i < 11; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), null); Assert.AreEqual (11, mc.GetCount (), "#A2-1"); trimmed = mc.Trim (50); Assert.AreEqual (1, trimmed, "#A2-2"); Assert.AreEqual (10, mc.GetCount (), "#A2-3"); mc = new MemoryCache ("MyCache", config); // Only entries 11- are considered for removal for (int i = 0; i < 125; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), null); Assert.AreEqual (125, mc.GetCount (), "#A3-1"); trimmed = mc.Trim (50); Assert.AreEqual (62, trimmed, "#A3-2"); Assert.AreEqual (63, mc.GetCount (), "#A3-3"); // Testing the removal order mc = new MemoryCache ("MyCache", config); var removed = new List <string> (); var cip = new CacheItemPolicy (); cip.RemovedCallback = (CacheEntryRemovedArguments args) => { removed.Add (args.CacheItem.Key); }; for (int i = 0; i < 50; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), cip); object value; for (int i = 0; i < 50; i++) value = mc.Get ("key" + i.ToString ()); trimmed = mc.Trim (50); Assert.AreEqual (25, mc.GetCount (), "#A4-1"); Assert.AreEqual (25, trimmed, "#A4-2"); Assert.AreEqual (25, removed.Count, "#A4-3"); // OK, this is odd... The list is correct in terms of entries removed but the entries // are removed in the _MOST_ frequently used order, within the group selected for removal. for (int i = 24; i >= 0; i--) { int idx = 24 - i; Assert.AreEqual ("key" + i.ToString (), removed [idx], "#A5-" + idx.ToString ()); } }
public T Get <T>(string key) { return((T)_cache.Get(key)); }
/// <summary> /// Gets the specified property for identifier. /// </summary> /// <param name="id">The identifier.</param> /// <param name="propertyName">Name of the property.</param> /// <returns></returns> public object Get(string id, string propertyName) { return(_cache.Get(getPropertyId(id, propertyName))); }
public static object Get(string key) { return(_cache.Get(key)); }