public void Read_write_delete_consistency_with_merge_before_read() { var r = new Random(1234); _hash = new FirkinHash <string>(_path, 10 * 1024); var dictionary = new Dictionary <string, byte[]>(); for (var i = 0; i < 200; i++) { var k = "k" + r.Next(100); if (r.Next(4) == 3) { dictionary.Remove(k); _hash.Delete(k); } else { var v = TestUtil.GetRandomBytes(r); dictionary[k] = v; _hash.Put(k, v.ToStream(), v.Length); } _hash.Get("k" + r.Next(100)); } _hash.Merge(); Assert.AreEqual(dictionary.Count, _hash.Count); foreach (var pair in dictionary) { Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes())); } }
public void Can_call_merge_and_enumerate_data() { _hash = new FirkinHash <string>(_path, 60); var stream = "bar1".ToStream(); _hash.Put("foo1", stream, stream.Length); stream = "bar2".ToStream(); _hash.Put("foo2", stream, stream.Length); stream = "bar3".ToStream(); _hash.Put("foo3", stream, stream.Length); stream = "bar4".ToStream(); _hash.Put("foo4", stream, stream.Length); stream = "bar1x".ToStream(); _hash.Put("foo1", stream, stream.Length); _hash.Merge(); Assert.AreEqual(new[] { "bar1x", "bar2", "bar3", "bar4" }, (from item in _hash orderby item.Key select item.Value.To <string>()).ToArray()); Assert.AreEqual(new[] { "bar1x", "bar2", "bar3", "bar4" }, (from item in _hash orderby item.Key select item.Value.To <string>()).ToArray()); }
public void Read_write_delete_consistency_with_multiple_merges() { var r = new Random(1234); _hash = new FirkinHash <string>(_path, 10 * 2048); var dictionary = new Dictionary <string, byte[]>(); for (var j = 0; j < 10; j++) { for (var i = 0; i < 1000; i++) { var k = "k" + r.Next(100); if (r.Next(4) == 3) { dictionary.Remove(k); _hash.Delete(k); } else { var v = TestUtil.GetRandomBytes(r); dictionary[k] = v; _hash.Put(k, v.ToStream(), v.Length); } _hash.Get("k" + r.Next(100)); } var skip = true; foreach (var k in _hash) { skip = !skip; if (skip) { continue; } dictionary.Remove(k.Key); _hash.Delete(k.Key); } _hash.Merge(); } Assert.AreEqual(dictionary.Count, _hash.Count); foreach (var pair in dictionary) { Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes())); } }
public void Can_call_merge_and_retrieve_data() { _hash = new FirkinHash <string>(_path, 60); var stream = "bar1".ToStream(); _hash.Put("foo1", stream, stream.Length); stream = "bar2".ToStream(); _hash.Put("foo2", stream, stream.Length); stream = "bar3".ToStream(); _hash.Put("foo3", stream, stream.Length); stream = "bar4".ToStream(); _hash.Put("foo4", stream, stream.Length); stream = "bar1x".ToStream(); _hash.Put("foo1", stream, stream.Length); _hash.Merge(); Assert.AreEqual(4, _hash.Count); Assert.AreEqual("bar3", _hash.Get("foo3").To <string>()); Assert.AreEqual("bar1x", _hash.Get("foo1").To <string>()); Assert.AreEqual("bar2", _hash.Get("foo2").To <string>()); Assert.AreEqual("bar4", _hash.Get("foo4").To <string>()); }
public void Can_call_merge_and_reload_hash_then_retrieve_data() { _hash = new FirkinHash <string>(_path, 30); var stream = "bar1".ToStream(); _hash.Put("foo1", stream, stream.Length); stream = "bar2".ToStream(); _hash.Put("foo2", stream, stream.Length); stream = "bar3".ToStream(); _hash.Put("foo3", stream, stream.Length); stream = "bar4".ToStream(); _hash.Put("foo4", stream, stream.Length); stream = "bar1x".ToStream(); _hash.Put("foo1", stream, stream.Length); _hash.Merge(); _hash.Dispose(); _log.DebugFormat("re-loading hash"); _hash = new FirkinHash <string>(_path, 30); Assert.AreEqual("bar3", _hash.Get("foo3").To <string>()); Assert.AreEqual("bar1x", _hash.Get("foo1").To <string>()); Assert.AreEqual("bar2", _hash.Get("foo2").To <string>()); Assert.AreEqual("bar4", _hash.Get("foo4").To <string>()); }
public void Can_call_merge_and_reload_hash_then_retrieve_data() { _hash = new FirkinHash<string>(_path, 30); var stream = "bar1".ToStream(); _hash.Put("foo1", stream, stream.Length); stream = "bar2".ToStream(); _hash.Put("foo2", stream, stream.Length); stream = "bar3".ToStream(); _hash.Put("foo3", stream, stream.Length); stream = "bar4".ToStream(); _hash.Put("foo4", stream, stream.Length); stream = "bar1x".ToStream(); _hash.Put("foo1", stream, stream.Length); _hash.Merge(); _hash.Dispose(); _log.DebugFormat("re-loading hash"); _hash = new FirkinHash<string>(_path, 30); Assert.AreEqual("bar3", _hash.Get("foo3").To<string>()); Assert.AreEqual("bar1x", _hash.Get("foo1").To<string>()); Assert.AreEqual("bar2", _hash.Get("foo2").To<string>()); Assert.AreEqual("bar4", _hash.Get("foo4").To<string>()); }
public void Read_write_delete_consistency_with_merge_in_middle() { var r = new Random(1234); _hash = new FirkinHash<string>(_path, 10 * 1024); var dictionary = new Dictionary<string, byte[]>(); for(var i = 0; i < 500; i++) { var k = "k" + r.Next(100); if(r.Next(4) == 3) { dictionary.Remove(k); _hash.Delete(k); } else { var v = TestUtil.GetRandomBytes(r); dictionary[k] = v; _hash.Put(k, v.ToStream(), v.Length); } _hash.Get("k" + r.Next(100)); } _hash.Merge(); for(var i = 0; i < 500; i++) { var k = "k" + r.Next(100); if(r.Next(5) == 5) { dictionary.Remove(k); _hash.Delete(k); } else { var v = TestUtil.GetRandomBytes(r); dictionary[k] = v; _hash.Put(k, v.ToStream(), v.Length); } _hash.Get("k" + r.Next(100)); } Assert.AreEqual(dictionary.Count, _hash.Count); foreach(var pair in dictionary) { Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes())); } }
public void Can_call_merge_and_retrieve_data() { _hash = new FirkinHash<string>(_path, 60); var stream = "bar1".ToStream(); _hash.Put("foo1", stream, stream.Length); stream = "bar2".ToStream(); _hash.Put("foo2", stream, stream.Length); stream = "bar3".ToStream(); _hash.Put("foo3", stream, stream.Length); stream = "bar4".ToStream(); _hash.Put("foo4", stream, stream.Length); stream = "bar1x".ToStream(); _hash.Put("foo1", stream, stream.Length); _hash.Merge(); Assert.AreEqual(4, _hash.Count); Assert.AreEqual("bar3", _hash.Get("foo3").To<string>()); Assert.AreEqual("bar1x", _hash.Get("foo1").To<string>()); Assert.AreEqual("bar2", _hash.Get("foo2").To<string>()); Assert.AreEqual("bar4", _hash.Get("foo4").To<string>()); }
public void Concurrent_read_write_delete_consistency_with_multiple_merges() { var r = new Random(1234); var id = 0; var keys = new Queue <string>(); AddKeys(keys, 200, ref id); var mergeCounter = 0; var merges = 0; _hash = new FirkinHash <string>(_path, 100 * 2048); var dictionary = new Dictionary <string, byte[]>(); var modified = new HashSet <string>(); var workers = new List <Thread>(); var faults = new List <Exception>(); var iterations = 0; var maxIterations = 10000; for (var i = 0; i < 10; i++) { var workerId = i; var worker = new Thread(() => { try { _log.DebugFormat("worker {0} started", workerId); while (iterations < maxIterations) { var j = Interlocked.Increment(ref iterations); if (j % 100 == 0) { _log.DebugFormat("iteration {0}", j); } Interlocked.Increment(ref mergeCounter); string k; lock (keys) { if (keys.Count < 10) { AddKeys(keys, 100, ref id); } k = keys.Dequeue(); } var entry = _hash.Get(k); var v = TestUtil.GetRandomBytes(r); if (entry != null) { lock (keys) { if (modified.Contains(k)) { continue; } modified.Add(k); } var v2 = entry.ReadBytes(); if (r.Next(4) == 3) { lock (dictionary) { dictionary.Remove(k); } _hash.Delete(k); } else { lock (dictionary) { dictionary[k] = v; } _hash.Put(k, v.ToStream(), v.Length); } } else { lock (dictionary) { dictionary[k] = v; } _hash.Put(k, v.ToStream(), v.Length); } lock (keys) { if (!modified.Contains(k) && r.Next(3) == 1) { keys.Enqueue(k); } } Thread.Sleep(10); } _log.DebugFormat("worker {0} finished", workerId); } catch (Exception e) { faults.Add(e); } }) { IsBackground = true }; worker.Start(); workers.Add(worker); } var start = DateTime.UtcNow; while (iterations < maxIterations) { if (faults.Any()) { throw faults.First(); } if (DateTime.UtcNow > start.AddMinutes(5)) { throw new TimeoutException(string.Format("didn't finish, merges: {0}, items: {1}, queue: {2}, existing modified: {2}", merges, _hash.Count, keys.Count, modified.Count)); } if (mergeCounter >= 2000) { merges++; mergeCounter = 0; _hash.Merge(); _log.DebugFormat("merge {0} completed", merges); } } foreach (var worker in workers) { worker.Join(); } var files = 0; foreach (var file in Directory.GetFiles(_path)) { _log.DebugFormat(Path.GetFileName(file)); if (Path.GetExtension(file) == ".data") { files++; } } _log.DebugFormat("merges: {0}, items {1}, existing modified: {2}, files: {3}", merges, _hash.Count, modified.Count, files); Assert.AreEqual(dictionary.Count, _hash.Count); foreach (var pair in dictionary) { Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes())); } }
public void Can_call_merge_and_enumerate_data() { _hash = new FirkinHash<string>(_path, 60); var stream = "bar1".ToStream(); _hash.Put("foo1", stream, stream.Length); stream = "bar2".ToStream(); _hash.Put("foo2", stream, stream.Length); stream = "bar3".ToStream(); _hash.Put("foo3", stream, stream.Length); stream = "bar4".ToStream(); _hash.Put("foo4", stream, stream.Length); stream = "bar1x".ToStream(); _hash.Put("foo1", stream, stream.Length); _hash.Merge(); Assert.AreEqual(new[] { "bar1x", "bar2", "bar3", "bar4" }, (from item in _hash orderby item.Key select item.Value.To<string>()).ToArray()); Assert.AreEqual(new[] { "bar1x", "bar2", "bar3", "bar4" }, (from item in _hash orderby item.Key select item.Value.To<string>()).ToArray()); }
public void Read_write_delete_consistency_with_multiple_merges() { var r = new Random(1234); _hash = new FirkinHash<string>(_path, 10 * 2048); var dictionary = new Dictionary<string, byte[]>(); for(var j = 0; j < 10; j++) { for(var i = 0; i < 1000; i++) { var k = "k" + r.Next(100); if(r.Next(4) == 3) { dictionary.Remove(k); _hash.Delete(k); } else { var v = TestUtil.GetRandomBytes(r); dictionary[k] = v; _hash.Put(k, v.ToStream(), v.Length); } _hash.Get("k" + r.Next(100)); } var skip = true; foreach(var k in _hash) { skip = !skip; if(skip) { continue; } dictionary.Remove(k.Key); _hash.Delete(k.Key); } _hash.Merge(); } Assert.AreEqual(dictionary.Count, _hash.Count); foreach(var pair in dictionary) { Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes())); } }
public void Concurrent_read_write_delete_consistency_with_multiple_merges() { var r = new Random(1234); var id = 0; var keys = new Queue<string>(); AddKeys(keys, 200, ref id); var mergeCounter = 0; var merges = 0; _hash = new FirkinHash<string>(_path, 100 * 2048); var dictionary = new Dictionary<string, byte[]>(); var modified = new HashSet<string>(); var workers = new List<Thread>(); var faults = new List<Exception>(); var iterations = 0; var maxIterations = 10000; for(var i = 0; i < 10; i++) { var workerId = i; var worker = new Thread(() => { try { _log.DebugFormat("worker {0} started", workerId); while(iterations < maxIterations) { var j = Interlocked.Increment(ref iterations); if(j % 100 == 0) { _log.DebugFormat("iteration {0}", j); } Interlocked.Increment(ref mergeCounter); string k; lock(keys) { if(keys.Count < 10) { AddKeys(keys, 100, ref id); } k = keys.Dequeue(); } var entry = _hash.Get(k); var v = TestUtil.GetRandomBytes(r); if(entry != null) { lock(keys) { if(modified.Contains(k)) { continue; } modified.Add(k); } var v2 = entry.ReadBytes(); if(r.Next(4) == 3) { lock(dictionary) { dictionary.Remove(k); } _hash.Delete(k); } else { lock(dictionary) { dictionary[k] = v; } _hash.Put(k, v.ToStream(), v.Length); } } else { lock(dictionary) { dictionary[k] = v; } _hash.Put(k, v.ToStream(), v.Length); } lock(keys) { if(!modified.Contains(k) && r.Next(3) == 1) { keys.Enqueue(k); } } Thread.Sleep(10); } _log.DebugFormat("worker {0} finished", workerId); } catch(Exception e) { faults.Add(e); } }) { IsBackground = true }; worker.Start(); workers.Add(worker); } var start = DateTime.UtcNow; while(iterations < maxIterations) { if(faults.Any()) { throw faults.First(); } if(DateTime.UtcNow > start.AddMinutes(5)) { throw new TimeoutException(string.Format("didn't finish, merges: {0}, items: {1}, queue: {2}, existing modified: {2}", merges, _hash.Count, keys.Count, modified.Count)); } if(mergeCounter >= 2000) { merges++; mergeCounter = 0; _hash.Merge(); _log.DebugFormat("merge {0} completed", merges); } } foreach(var worker in workers) { worker.Join(); } var files = 0; foreach(var file in Directory.GetFiles(_path)) { _log.DebugFormat(Path.GetFileName(file)); if(Path.GetExtension(file) == ".data") { files++; } } _log.DebugFormat("merges: {0}, items {1}, existing modified: {2}, files: {3}", merges, _hash.Count, modified.Count, files); Assert.AreEqual(dictionary.Count, _hash.Count); foreach(var pair in dictionary) { Assert.AreEqual(0, pair.Value.Compare(_hash.Get(pair.Key).ReadBytes())); } }