public void Cache_scenario_write_consistency_with_multiple_merges() { var r = new Random(1234); var keys = new Queue <string>(); AddItems(keys, 200); using (var d = new FirkinDictionary <string, string>( _path, 1024 * 1024, Serialization.SerializerRepository.GetByteArraySerializer <string>(), Serialization.SerializerRepository.GetStreamSerializer <string>() )) { var dictionary = new Dictionary <string, string>(); var n = 0; var t = 0; while (keys.Any()) { n++; t++; var key = keys.Dequeue(); var v = TestUtil.GetRandomString(r); dictionary[key] = v; if (d.ContainsKey(key)) { var x = d[key]; } d[key] = v; switch (r.Next(10)) { case 1: keys.Enqueue(key); break; case 4: AddItems(keys, 10); break; } if (n >= 3000) { d.Merge(); n = 0; } if (t >= 20000) { break; } } foreach (var file in Directory.GetFiles(_path)) { _log.DebugFormat(Path.GetFileName(file)); } _log.DebugFormat("total items {0} after {1} iterations with {2} left in queue", d.Count, t, keys.Count); Assert.AreEqual(dictionary.Count, d.Count); foreach (var pair in dictionary) { Assert.AreEqual(pair.Value, d[pair.Key]); } } }
public void Memory_consumption_parallel_writes_and_merges() { var r = new Random(1234); var keys = new Queue <string>(); AddItems(keys, 200); var baseline = GC.GetTotalMemory(true); using (var d = new FirkinDictionary <string, string>(_path)) { var t = 0; var capture = ""; var done = false; var n = 0; var merger = new Thread(() => { var m = 0; while (!done) { if (n >= 5000) { m++; var before = GC.GetTotalMemory(true); Console.WriteLine( "merge {0}, before {1:0.00}MB)", m, (before - baseline) / 1024 / 1024 ); var expiredKeys = (from entry in d where entry.Value.Length != 0 && r.Next(4) == 1 select entry.Key).ToArray(); foreach (var key in expiredKeys) { d.Remove(key); } var during = GC.GetTotalMemory(true); Console.WriteLine( "merge {0}, during {1:0.00}MB)", m, (during - baseline) / 1024 / 1024 ); d.Merge(); var after = GC.GetTotalMemory(true); var c = d.Count; Console.WriteLine( "merge {0}, iteration {1}, items: {2}, after {3:0.00}MB, storage {4:0.00}bytes/item)", m, t, c, (after - baseline) / 1024 / 1024, (after - baseline) / c ); n = 0; } } }) { IsBackground = true }; merger.Start(); while (keys.Any()) { n++; t++; var key = keys.Dequeue(); var v = TestUtil.GetRandomString(r); if (d.ContainsKey(key)) { var x = d[key]; capture = ".." + x; } d[key] = v; switch (r.Next(10)) { case 1: keys.Enqueue(key); break; case 4: if (keys.Count < 200) { AddItems(keys, 10); } break; } if (t >= 1000000) { break; } if (keys.Count < 50) { AddItems(keys, 100); } } done = true; merger.Join(); Console.WriteLine("total items {0} after {1} iterations with {2} left in queue", d.Count, t, keys.Count); _log.Debug(capture.Substring(0, 10)); } }
public void Memory_consumption() { var r = new Random(1234); var keys = new Queue <string>(); AddItems(keys, 200); var baseline = GC.GetTotalMemory(true); string capture = ""; using (var d = new FirkinDictionary <string, string>(_path)) { var n = 0; var t = 0; var m = 0; while (keys.Any()) { n++; t++; var key = keys.Dequeue(); var v = TestUtil.GetRandomString(r); if (d.ContainsKey(key)) { var x = d[key]; capture = ".." + x; } d[key] = v; switch (r.Next(10)) { case 1: keys.Enqueue(key); break; case 4: if (keys.Count < 200) { AddItems(keys, 10); } break; } if (n >= 5000) { m++; var before = GC.GetTotalMemory(true); d.Merge(); var after = GC.GetTotalMemory(true); var c = d.Count; Console.WriteLine( "merge {0}, iteration {1}, items: {2}, before {3:0.00}MB, after {4:0.00}MB, storage {5:0.00}bytes/item)", m, t, c, (before - baseline) / 1024 / 1024, (after - baseline) / 1024 / 1024, (after - baseline) / c ); n = 0; } if (t >= 200000) { break; } if (keys.Count < 50) { AddItems(keys, 100); } } Console.WriteLine("total items {0} after {1} iterations with {2} left in queue", d.Count, t, keys.Count); _log.Debug(capture.Substring(0, 10)); } }