private void Insert(string key, T value, DateTime datetime) { if (value != null) { if (this.OnUpdateCacheCallback != null) { var cp = new CacheItemPolicy(); cp.AbsoluteExpiration = datetime; cp.UpdateCallback = this.OnUpdateCacheCallback; cache.Set(new CacheItem(key, value), cp); } else { cache.Set(new CacheItem(key, value), new CacheItemPolicy() { AbsoluteExpiration = datetime }); } } else { BaseCache <T> .Logger.Warning(new Log.LogMessage() .SetMessage("LocalMemoryCacheProvider> null value") .SetLevel(Log.PriorityLevel.Warning) .SetCustomKeyValue("objectType", typeof(T).ToString()) .SetCustomKeyValue("cache key", key) ); } }
protected override Task SetAsync <T>( CacheKey cacheKey, DateTimeOffset absoluteExpiration, T value, CancellationToken cancellationToken) { _memoryCache.Set( cacheKey.Value, value, absoluteExpiration); return(Task.FromResult(0)); }
public void ExistingObject_IncrementByOneAndSetExpirationDate() { // Arrange var key = new SimpleThrottleKey("test", "key"); var limiter = new Limiter() .Limit(1) .Over(100); var cache = new MemoryCache("testing_cache"); var repository = new MemoryThrottleRepository(cache); string id = repository.CreateThrottleKey(key, limiter); var cacheItem = new MemoryThrottleRepository.ThrottleCacheItem() { Count = 1, Expiration = new DateTime(2030, 1, 1) }; cache .Set(id, cacheItem, cacheItem.Expiration); // Act repository.AddOrIncrementWithExpiration(key, limiter); // Assert var item = (MemoryThrottleRepository.ThrottleCacheItem)cache.Get(id); Assert.Equal(2L, item.Count); Assert.Equal(new DateTime(2030, 1, 1), item.Expiration); }
/// <summary> /// Sets the specified identifier. /// </summary> /// <param name="id">The identifier.</param> /// <param name="propertyName">Name of the property.</param> /// <param name="value">The value.</param> /// <param name="ttl">The TTL.</param> /// <param name="overwrite">if set to <c>true</c> [overwrite].</param> public void Set(string id, string propertyName, object value, TimeSpan?ttl = null, bool overwrite = true) { if (!ttl.HasValue) { ttl = _defaultTtl; } if (overwrite) { _cache.Set(getPropertyId(id, propertyName), value, getCacheItemPolicy(ttl.Value)); } else { _cache.Add(getPropertyId(id, propertyName), value, getCacheItemPolicy(ttl.Value)); } }
/// <summary> /// GetFlight with MemoryCache (5 sek) /// </summary> private static List <Flight> GetFlight1(string departure) { string cacheItemName = "FlightSet_" + departure; // Zugriff auf Cache-Eintrag System.Runtime.Caching.MemoryCache cache = System.Runtime.Caching.MemoryCache.Default; List <Flight> flightSet = cache[cacheItemName] as List <Flight>; if (flightSet == null) // Element ist NICHT im Cache { CUI.Print($"{DateTime.Now.ToLongTimeString()}: Cache missed", ConsoleColor.Red); using (var ctx = new WWWingsContext()) { ctx.Log(); // Load flights flightSet = ctx.FlightSet.Where(x => x.Departure == departure).ToList(); } // Store flights in cache CacheItemPolicy policy = new CacheItemPolicy(); policy.AbsoluteExpiration = DateTime.Now.AddSeconds(5); //or: policy.SlidingExpiration = new TimeSpan(0,0,0,5); cache.Set(cacheItemName, flightSet, policy); } else // Data is already in cache { CUI.Print($"{DateTime.Now.ToLongTimeString()}: Cache hit", ConsoleColor.Green); } return(flightSet); }
public void Set(Guid id, AggregateRoot aggregate) { #if NET461 _cache.Add(id.ToString(), aggregate, _policyFactory.Invoke()); #else _cache.Set(id, aggregate, _cacheOptions); #endif }
public void Set(string id, AggregateRoot aggregate) { #if NET451 _cache.Add(id, aggregate, _policyFactory.Invoke()); #else _cache.Set(id, aggregate, _cacheOptions); #endif }
public Task Set(Guid id, AggregateRoot aggregate) { #if NET452 _cache.Add(id.ToString(), aggregate, _policyFactory.Invoke()); #else _cache.Set(id, aggregate, _cacheOptions); #endif return(Task.FromResult(0)); }
protected override void SetCache(string key, object value, TimeSpan?expireTime = default(TimeSpan?)) { var cachePolicy = new CacheItemPolicy(); if (expireTime != null) { cachePolicy.SlidingExpiration = expireTime.Value; } _memoryCache.Set(key, value, cachePolicy); }
public double Decrease(string key, double num) { double operatorValue = 0; var value = _memoryCache.Get(key); if (value != null) { operatorValue = (double)value; } operatorValue -= num; _memoryCache.Set( key, value, new CacheItemPolicy { SlidingExpiration = _defaultExpireTimeSpan }); return(operatorValue); }
protected override Task SetItemsAsync <T>(IEnumerable <CacheItem <T> > items, string partition) { var utcNow = DateTimeOffset.UtcNow; foreach (var item in items) { var key = item.Key; if (!string.IsNullOrEmpty(partition)) { key = $"${partition}$|{key}"; } if (item.Expiration.HasValue) { _client.Set(key, item, item.Expiration.Value, null); } else { _client.Set(key, item, null, null); } } return(Task.CompletedTask); }
/// <summary> /// Set a cache item. /// </summary> /// <typeparam name="T">item type</typeparam> /// <param name="key">item key</param> /// <param name="item">item</param> /// <returns>true if added to the cache otherwise false</returns> public bool Set <T>(NonNullable <string> key, T item) where T : class { var contains = Contains <T>(key); if (!contains) { _cache.Set(CacheItem(key, item), new CacheItemPolicy { AbsoluteExpiration = DateTime.Now.AddMilliseconds(_lifetimeMilliseconds) }); } return(!contains); }
public static void Set(string key, object value, int seconds) { CacheItemPolicy policy; if (!_policies.TryGetValue(seconds, out policy)) { policy = new CacheItemPolicy { RemovedCallback = RemovedCallback, SlidingExpiration = TimeSpan.FromSeconds(seconds) }; _policies.Add(seconds, policy); } _cache.Set(key, value, policy); }
/// <summary> /// Stores an object in the cache. /// </summary> /// <param name="cacheKey">The cache key to store the object against.</param> /// <param name="data">The data to store against the key.</param> /// <param name="duration">The duration, in seconds, to cache the data for.</param> public virtual void SetCachedObject <T>(string cacheKey, T data, int duration) { // ReSharper disable once ConvertIfStatementToNullCoalescingExpression type conflict if (data == null) { _cache.Add(cacheKey, nullReference, DateTime.Now.AddSeconds(duration)); } else { var cacheItemPolicy = new CacheItemPolicy() //TODO to settings { SlidingExpiration = TimeSpan.FromSeconds(duration) }; _cache.Set(cacheKey, data, cacheItemPolicy); } }
public static Entity GetRandomRecord(string logicalName, IOrganizationService service, Random r) { if (Cache.Get(logicalName) is List <Entity> list) { return(list[r.Next(0, list.Count - 1)]); } var query = new QueryExpression(logicalName) { NoLock = true, ColumnSet = new ColumnSet(false) }; var result = service.RetrieveMultiple(query).Entities.ToList(); var policy = new CacheItemPolicy(); Cache.Set(logicalName, result, policy); return(result[r.Next(0, result.Count - 1)]); }
/// <summary> /// 设置缓存 /// </summary> /// <param name="key">key</param> /// <param name="data">缓存值</param> /// <param name="expireTime">超时时间(按分钟),大于0有效</param> /// <returns></returns> public void Set(string key, dynamic data, int?expireTime) { if (string.IsNullOrWhiteSpace(key)) { new NullReferenceException("Set方法的{key}参数值为空。"); } if (data == null) { new NullReferenceException("Set方法的{data}参数值为空。"); } CacheItemPolicy policy = null; if (expireTime.HasValue && expireTime > 0) { policy = new CacheItemPolicy(); policy.AbsoluteExpiration = DateTime.Now + TimeSpan.FromMinutes(expireTime.Value); } cache.Set(new CacheItem(key, data), policy); }
/// <summary> /// Stores an object identified by a unique key in cache. /// </summary> /// <param name="correlationId"></param> /// <param name="key">Unique key identifying a data object.</param> /// <param name="value">The data object to store.</param> /// <param name="timeout">Time to live for the object in milliseconds.</param> public async Task <object> StoreAsync(string correlationId, string key, object value, long timeout) { if (key == null) { throw new ArgumentNullException(nameof(key)); } // Shortcut to remove entry from the cache if (value == null) { if (_standardCache.Contains(key)) { _standardCache.Remove(key); } return(null); } if (MaxSize <= _standardCache.GetCount()) { lock (_lock) { if (MaxSize <= _standardCache.GetCount()) { _standardCache.Trim(5); } } } timeout = timeout > 0 ? timeout : Timeout; _standardCache.Set(key, value, new CacheItemPolicy { SlidingExpiration = TimeSpan.FromMilliseconds(timeout) }); return(await Task.FromResult(value)); }
public void Put(string key, object value) { if (string.IsNullOrWhiteSpace(key)) { throw new ArgumentNullException("key", "the key is null or empty."); } if (!rootCacheKeyStored) { StoreRootCacheKey(); } string cacheKey = GetCacheKey(key); if (cache.Contains(key, regionName)) { cache.Set(cacheKey, new DictionaryEntry(key, value), policy, regionName); } else { cache.Add(cacheKey, new DictionaryEntry(key, value), policy, regionName); } }
private void Set(string key, object value, sys.CacheItemPolicy policy) { _cache.Set(key, value, policy); }
public void Trim () { var config = new NameValueCollection (); config ["__MonoEmulateOneCPU"] = "true"; var mc = new MemoryCache ("MyCache", config); for (int i = 0; i < 10; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), null); Assert.AreEqual (10, mc.GetCount (), "#A1-1"); long trimmed = mc.Trim (50); Assert.AreEqual (5, trimmed, "#A1-2"); Assert.AreEqual (5, mc.GetCount (), "#A1-3"); mc = new MemoryCache ("MyCache", config); // Only entries 11- are considered for removal for (int i = 0; i < 11; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), null); Assert.AreEqual (11, mc.GetCount (), "#A2-1"); trimmed = mc.Trim (50); Assert.AreEqual (6, trimmed, "#A2-2"); Assert.AreEqual (5, mc.GetCount (), "#A2-3"); mc = new MemoryCache ("MyCache", config); // Only entries 11- are considered for removal for (int i = 0; i < 125; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), null); Assert.AreEqual (125, mc.GetCount (), "#A3-1"); trimmed = mc.Trim (50); Assert.AreEqual (63, trimmed, "#A3-2"); Assert.AreEqual (62, mc.GetCount (), "#A3-3"); // Testing the removal order mc = new MemoryCache ("MyCache", config); var removed = new List <string> (); var cip = new CacheItemPolicy (); cip.RemovedCallback = (CacheEntryRemovedArguments args) => { removed.Add (args.CacheItem.Key); }; for (int i = 0; i < 50; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), cip); object value; for (int i = 0; i < 50; i++) value = mc.Get ("key" + i.ToString ()); trimmed = mc.Trim (50); Assert.AreEqual (25, mc.GetCount (), "#A4-1"); Assert.AreEqual (25, trimmed, "#A4-2"); Assert.AreEqual (25, removed.Count, "#A4-3"); for (int i = 0; i < 25; i++) Assert.AreEqual ("key" + i.ToString (), removed [i], "#A5-" + i.ToString ()); }
public void Set <T>(string key, T val, TimeSpan expiresIn) => FBackend.Set(key, val, new CacheItemPolicy { AbsoluteExpiration = ObjectCache.InfiniteAbsoluteExpiration, SlidingExpiration = expiresIn });
/// <summary> /// 将缓存项插入缓存中。 /// </summary> /// <typeparam name="T">数据类型。</typeparam> /// <param name="key">要插入的缓存项的唯一标识符。</param> /// <param name="value">该缓存项的数据。</param> public virtual void Set <T>(string key, T value) { var policy = new CacheItemPolicy(); _cache.Set(key, value, policy); }
private static void MemorySpeedPerformance() { /* * Some numbers: * 1. keep in mind, this is done with blocking => ...value).Result * 2. all tests were ran in debug mode * Remarks: * Running the async methods synchronously adds a *lot* of overhead, so the values are probably * exaggerated quite a bit (since they were ran using the async versions). * * Based on the figures, it looks like you can store 1m raw objects with 10k strings in < 1s. However * at ~70s, you can compress and store those same objects but use ~1/50th the space. Neato! * --------------------------------------------------------------------------------------------------------- * 1m iterations @ OneLong * --------------------------------------------------------------------------------------------------------- * Blocking Async No Async * Serialized Set: 6,921,999 1,962ms 2,262ms * Serialized Get: 3,529,009 929ms 1,055ms * Compression Set: 177,613,772 54,904ms 22,462ms * Decompression Get: 4,610,846 1,248ms 1,096ms * Raw Set: 3,317,033 1,046ms 1,063ms * Raw Get: 472,352 139ms 141ms * * No Async * Serialized v Raw Get: 7.47114x 5.7688x * Serialized v Raw Set: 2.08680x 1.9814x * Compression v Raw Get: 9.76146x 6.2608x * Compression v Raw Set: 53.54598x 20.6647x * * --------------------------------------------------------------------------------------------------------- * 1m iterations @ MultipleProperties w/ GenerateString(1, 1) (Ran using the synchronous methods) * (From this we can see that in the simplest case we pay the cost for compressing the data, but when it * doesn't improve the result (non-compressed size is better than compressed), compressed data retrieval is * nearly the same cost as serialized-only retrieval (because the smart bit is 0)). * --------------------------------------------------------------------------------------------------------- * Serialized Set: 8,010,415 * Serialized Get: 3,784,092 * Compression Set: 50,185,964 * Decompression Get: 4,051,573 * Raw Set: 3,437,408 * Raw Get: 465,960 * * No Async * Serialized v Raw Get: 8.1210x * Serialized v Raw Set: 2.3303x * Compression v Raw Get: 8.6951x * Compression v Raw Set: 14.5999x * * --------------------------------------------------------------------------------------------------------- * 1m iterations @ MultipleProperties w/ GenerateString(128, 128) * --------------------------------------------------------------------------------------------------------- * Serialized Set: 8,138,454 * Serialized Get: 3,425,754 * Compression Set: 316,601,410 * Decompression Get: 76,957,352 * Raw Set: 3,389,203 * Raw Get: 463,274 * * No Async * Serialized v Raw Get: 7.3946x 5.5158x * Serialized v Raw Set: 2.4012x 2.5692x * Compression v Raw Get: 166.1163x 36.6555x * Compression v Raw Set: 93.4147x 31.4675x * * --------------------------------------------------------------------------------------------------------- * 1m iterations @ MultipleProperties w/ GenerateString(10000, 10000) * --------------------------------------------------------------------------------------------------------- * Serialized Set: 53,034,383 * Serialized Get: 3,893,312 * Compression Set: 570,493,276 * Decompression Get: 483,579,832 * Raw Set: 3,247,694 * Raw Get: 462,159 * * No Async * Serialized v Raw Get: 8.424x 8.2344x * Serialized v Raw Set: 16.329x 15.6698x * Compression v Raw Get: 1,046.349x 448.3221x * Compression v Raw Set: 175.661x 99.6618x */ const int ITERATIONS = 30000; const string KEY = "impt-key"; var value = new OneLong { Id = GenerateId() }; MemoryCache cM = new MemoryCache("cM"); Stopwatch sw = Stopwatch.StartNew(); // warmup byte[] serialize = ProtoBufSerializer.Serialize(value); byte[] compress = SmartCompressor.Instance.Compress(serialize); cM.Set(KEY, compress, null); for (int i = 0; i < ITERATIONS; i++) { serialize = ProtoBufSerializer.Serialize(value); compress = SmartCompressor.Instance.Compress(serialize); cM.Set(KEY, compress, null); } long compressSet = sw.ElapsedMilliseconds; // warmup byte[] compressed = (byte[])cM.Get(KEY); byte[] decompressed = SmartCompressor.Instance.Decompress(compressed); ProtoBufSerializer.Deserialize<OneLong>(decompressed); sw = Stopwatch.StartNew(); for (int i = 0; i < ITERATIONS; i++) { compressed = (byte[])cM.Get(KEY); decompressed = SmartCompressor.Instance.Decompress(compressed); ProtoBufSerializer.Deserialize<OneLong>(decompressed); } long compressGet = sw.ElapsedMilliseconds; MemoryCache sM = new MemoryCache("sM"); // warmup serialize = ProtoBufSerializer.Serialize(value); sM.Set(KEY, serialize, null); sw = Stopwatch.StartNew(); for (int i = 0; i < ITERATIONS; i++) { serialize = ProtoBufSerializer.Serialize(value); sM.Set(KEY, serialize, null); } long serializeSet = sw.ElapsedMilliseconds; // warmup compressed = (byte[])sM.Get(KEY); ProtoBufSerializer.Deserialize<OneLong>(compressed); sw = Stopwatch.StartNew(); for (int i = 0; i < ITERATIONS; i++) { compressed = (byte[])sM.Get(KEY); ProtoBufSerializer.Deserialize<OneLong>(compressed); } long serializeGet = sw.ElapsedMilliseconds; MemoryCache rM = new MemoryCache("rM"); // warmup rM.Set(KEY, value, null); sw = Stopwatch.StartNew(); for (int i = 0; i < ITERATIONS; i++) { rM.Set(KEY, value, null); } long rawSet = sw.ElapsedMilliseconds; // warmup rM.Get(KEY); sw = Stopwatch.StartNew(); for (int i = 0; i < ITERATIONS; i++) { rM.Get(KEY); } long rawGet = sw.ElapsedMilliseconds; Console.WriteLine("Memory Speed: (operations per second)"); Console.WriteLine(" Set:"); Console.WriteLine(" Raw: {0:#,##0.0#}", (float)ITERATIONS / rawSet * 1000.0); Console.WriteLine( " Serialized: {0:#,##0.0#} ({1:0.00})%", (float)ITERATIONS / serializeSet * 1000.0, (float)rawSet / serializeSet * 100.0); Console.WriteLine( " Serialized + Compressed: {0:#,##0.0#} ({1:0.00})%", (float)ITERATIONS / compressSet * 1000.0, (float)rawSet / compressSet * 100.0); Console.WriteLine(" Get:"); Console.WriteLine(" Raw: {0:#,##0.0#}", (float)ITERATIONS / rawGet * 1000.0); Console.WriteLine( " Serialized: {0:#,##0.0#} ({1:0.00})%", (float)ITERATIONS / serializeGet * 1000.0, (float)rawGet / serializeGet * 100.0); Console.WriteLine( " Serialized + Compressed: {0:#,##0.0#} ({1:0.00})%", (float)ITERATIONS / compressGet * 1000.0, (float)rawGet / compressGet * 100.0); }
public void Trim () { var config = new NameValueCollection (); config ["__MonoEmulateOneCPU"] = "true"; var mc = new MemoryCache ("MyCache", config); for (int i = 0; i < 10; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), null); // .NET doesn't touch the freshest 10 entries Assert.AreEqual (10, mc.GetCount (), "#A1-1"); long trimmed = mc.Trim (50); Assert.AreEqual (0, trimmed, "#A1-2"); Assert.AreEqual (10, mc.GetCount (), "#A1-3"); mc = new MemoryCache ("MyCache", config); // Only entries 11- are considered for removal for (int i = 0; i < 11; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), null); Assert.AreEqual (11, mc.GetCount (), "#A2-1"); trimmed = mc.Trim (50); Assert.AreEqual (1, trimmed, "#A2-2"); Assert.AreEqual (10, mc.GetCount (), "#A2-3"); mc = new MemoryCache ("MyCache", config); // Only entries 11- are considered for removal for (int i = 0; i < 125; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), null); Assert.AreEqual (125, mc.GetCount (), "#A3-1"); trimmed = mc.Trim (50); Assert.AreEqual (62, trimmed, "#A3-2"); Assert.AreEqual (63, mc.GetCount (), "#A3-3"); // Testing the removal order mc = new MemoryCache ("MyCache", config); var removed = new List <string> (); var cip = new CacheItemPolicy (); cip.RemovedCallback = (CacheEntryRemovedArguments args) => { removed.Add (args.CacheItem.Key); }; for (int i = 0; i < 50; i++) mc.Set ("key" + i.ToString (), "value" + i.ToString (), cip); object value; for (int i = 0; i < 50; i++) value = mc.Get ("key" + i.ToString ()); trimmed = mc.Trim (50); Assert.AreEqual (25, mc.GetCount (), "#A4-1"); Assert.AreEqual (25, trimmed, "#A4-2"); Assert.AreEqual (25, removed.Count, "#A4-3"); // OK, this is odd... The list is correct in terms of entries removed but the entries // are removed in the _MOST_ frequently used order, within the group selected for removal. for (int i = 24; i >= 0; i--) { int idx = 24 - i; Assert.AreEqual ("key" + i.ToString (), removed [idx], "#A5-" + idx.ToString ()); } }
private static void MemorySizePerformance() { /* * Some numbers: * --------------------------------------------------------------------------------------------------------- * 10k objects * --------------------------------------------------------------------------------------------------------- * maxStringSize repeatStringSize serialized vs Raw compression vs raw * 1 1 92.4% 93.8% * 17 1 88.7% 90.0% * 128 1 73.9% 74.7% * 2 2 92.7% 93.8% * 17 2 86.7% 89.7% * 128 2 73.9% 74.7% * 4 4 90.5% 91.6% * 17 4 86.7% 89.7% * 128 4 73.9% 68.8% * 128 128 73.9% 56.8% * * 10,000 10,000 50.7% 1.8% * 100,000 100,000 50.1% 0.2% */ const int MAX_STRING_SIZE = 10000; const int REPEAT_STRING_SIZE = 10; const int ITERATIONS = 1000; const string KEY = "impt-key"; GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); GC.WaitForPendingFinalizers(); long mCs = GC.GetTotalMemory(true); MemoryCache cM = new MemoryCache("cM"); for (int i = 0; i < ITERATIONS; i++) { var m = new MultipleProperties { Id = GenerateId(), Name = GenerateString(MAX_STRING_SIZE, REPEAT_STRING_SIZE) }; byte[] s = ProtoBufSerializer.Serialize(m); byte[] c = SmartCompressor.Instance.CompressAsync(s).Result; cM.Set(KEY + i.ToString(CultureInfo.InvariantCulture), c, null); } long mCe = GC.GetTotalMemory(true); long compressMemory = mCe - mCs; cM.Trim(100); cM.Dispose(); // ReSharper disable once RedundantAssignment cM = null; GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); GC.WaitForPendingFinalizers(); long mSs = GC.GetTotalMemory(true); MemoryCache sM = new MemoryCache("sM"); for (int i = 0; i < ITERATIONS; i++) { var m = new MultipleProperties { Id = GenerateId(), Name = GenerateString(MAX_STRING_SIZE, REPEAT_STRING_SIZE) }; byte[] s = ProtoBufSerializer.Serialize(m); sM.Set(KEY + i.ToString(CultureInfo.InvariantCulture), s, null); } long mSe = GC.GetTotalMemory(true); long serializeMemory = mSe - mSs; sM.Trim(100); sM.Dispose(); // ReSharper disable once RedundantAssignment sM = null; GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); GC.WaitForPendingFinalizers(); long mRs = GC.GetTotalMemory(true); MemoryCache rM = new MemoryCache("rM"); for (int i = 0; i < ITERATIONS; i++) { var m = new MultipleProperties { Id = GenerateId(), Name = GenerateString(MAX_STRING_SIZE, REPEAT_STRING_SIZE) }; rM.Set(KEY + i.ToString(CultureInfo.InvariantCulture), m, null); } long mRe = GC.GetTotalMemory(true); long rawMemory = mRe - mRs; rM.Trim(100); rM.Dispose(); // ReSharper disable once RedundantAssignment rM = null; GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); GC.WaitForPendingFinalizers(); Console.WriteLine("Memory Size:"); Console.WriteLine(" Raw: {0:#,##0.#}KB", rawMemory / 1024.0); Console.WriteLine( " Serialized: {0:#,##0.#}KB ({1:0.00}%)", serializeMemory / 1024.0, ((float)serializeMemory / rawMemory) * 100); Console.WriteLine( " Serialized + Compressed: {0:#,##0.#}KB ({1:0.00}%)", compressMemory / 1024.0, ((float)compressMemory / rawMemory) * 100); }