public JournaledMemTable(string baseFileName, int version) { _baseFileName = baseFileName; _version = version; _memTable = new MemTable(); // If the journal exists from a previous run, then load its data into the memtable string journalFile = Config.JournalFile(baseFileName, version); if (File.Exists(journalFile)) { var journalReader = new JournalReader(baseFileName, version); try { foreach (var pair in journalReader.Enumerate()) { _memTable.Add(pair.Key, pair.Value); } } finally { journalReader.Close(); } _journal = new JournalWriter(baseFileName, version, true); } else { _journal = new JournalWriter(baseFileName, version, false); } }
public void Close() { if (_journal != null) { _journal.Close(); } _journal = null; _memTable = null; }
public void LevelMergeDuplicateValuesTest() { string path = Path.GetFullPath("TestData\\LevelMergeDuplicateValuesTest"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); foreach (string file in Directory.GetFiles(path)) { File.Delete(file); } int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { int numToStore = j % 100; var key = new Key(new ByteArray(BitConverter.GetBytes(numToStore))); var value = new Value(BitConverter.GetBytes(j)); mt.Add(key, value); } mt.WriteToSortedBlockTable("TestData\\LevelMergeDuplicateValuesTest", 0, i); totalData += mt.Size; } var cache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Manifest mf = new Manifest("TestData\\LevelMergeDuplicateValuesTest"); SortedBlockTable.MergeTables(cache, mf, 1, new List<PageRef>{ new PageRef { Level = 0, Version = 0}, new PageRef { Level = 0, Version = 1}, new PageRef { Level = 0, Version = 2}, new PageRef { Level = 0, Version = 3} }, ExceptionHandling.ThrowAll, null); timer.Stop(); // Open the block table and scan it to check the stored values var sbt = new SortedBlockTable(cache, mf.BaseFileName, 1, 1); try { var pairs = sbt.Enumerate().ToList(); Assert.AreEqual(100, pairs.Count()); Assert.AreEqual(2400, BitConverter.ToInt32(pairs.First().Value.ValueBytes, 0)); } finally { sbt.Close(); } Console.WriteLine("Wrote a multilevel merge with duplicates at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void EnumerateFromKeys() { string path = Path.GetFullPath("TestData\\EnumerateFromKeys"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); List<KeyValuePair<Key, Value>> items = new List<KeyValuePair<Key, Value>>(); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair<Key, Value>(k0, v0)); } mt.WriteToSortedBlockTable("TestData\\EnumerateFromKeys", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\EnumerateFromKeys", 10, 10); try { var indexCache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Assert.AreEqual(10000, sbt.EnumerateFromKey(indexCache, new Key(new byte[] { 0 }, 0)).Count()); timer.Stop(); Console.WriteLine("Counted from beginning at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); items = items.OrderBy((a) => a.Key).ToList(); timer.Reset(); timer.Start(); Assert.AreEqual(5000, sbt.EnumerateFromKey(indexCache, items[5000].Key).Count()); timer.Stop(); Console.WriteLine("Counted from halfway at a throughput of {0} MB/s", (double)mt.Size / 2 / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); Assert.AreEqual(0, sbt.EnumerateFromKey(indexCache, new Key(new byte[] { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF }, 0xFF)).Count()); } finally { sbt.Close(); } }
public void DumpPrefixedSBT() { string path = Path.GetFullPath("TestData\\DumpPrefixedSBT"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\DumpPrefixedSBT", 0, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\DumpPrefixedSBT", 0, 10); foreach (var pair in sbt.EnumerateRaw()) { Console.WriteLine("Key: {0} Value: {1}", pair.Key.ToString(), pair.Value.ToString()); } }
public void AddAndLookupItems() { MemTable mt = new MemTable(); List<KeyValuePair<Key, Value>> values = new List<KeyValuePair<Key, Value>>(); for (int i = 0; i < 10000; i++) { var randomKey = Key.Random(40); var randomValue = Value.Random(256); values.Add(new KeyValuePair<Key, Value>(randomKey, randomValue)); mt.Add(randomKey, randomValue); } Value value; foreach (var pair in values) { Assert.IsTrue(mt.Lookup(pair.Key, out value)); Assert.AreEqual(pair.Value, value); } Assert.IsFalse(mt.Lookup(Key.Random(40), out value)); Assert.AreEqual(10000 * (40 + 256), mt.Size); Assert.IsTrue(mt.Full); }
public void SetItemsMultipleTimes() { MemTable mt = new MemTable(); Dictionary<Key, Value> values = new Dictionary<Key, Value>(); for (int i = 0; i < 10000; i++) { var randomKey = new Key(new ByteArray(BitConverter.GetBytes(i % 10))); var randomValue = Value.Random(256); values[randomKey] = randomValue; mt.Add(randomKey, randomValue); } Value value; foreach (var pair in values) { Assert.IsTrue(mt.Lookup(pair.Key, out value)); Assert.AreEqual(pair.Value, value); } Assert.IsFalse(mt.Lookup(Key.Random(4), out value)); Assert.AreEqual(10, mt.Enumerate().Count()); Assert.AreEqual(10, values.Count); }
public void LevelMergeOutputTest() { string path = Path.GetFullPath("TestData\\LevelMergeOutputTest"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { var randKey = Key.Random(40); var randVal = Value.Random(512); mt.Add(randKey, randVal); } mt.WriteToSortedBlockTable("TestData\\LevelMergeOutputTest", 0, i); totalData += mt.Size; } var cache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Manifest mf = new Manifest("TestData\\LevelMergeOutputTest"); SortedBlockTable.MergeTables(cache, mf, 1, new List<PageRef>{ new PageRef { Level = 0, Version = 0}, new PageRef { Level = 0, Version = 1}, new PageRef { Level = 0, Version = 2}, new PageRef { Level = 0, Version = 3} }, ExceptionHandling.ThrowAll, null); timer.Stop(); Console.WriteLine("Wrote a multilevel merge at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void TestFileOpenSpeed() { string path = Path.GetFullPath("TestData\\TestFileOpenSpeed"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\TestFileOpenSpeed", 0, 10); var openTables = new List<SortedBlockTable>(); var cache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); for (int j = 0; j < 10000; j++) { var sbt = new SortedBlockTable(cache, "TestData\\TestFileOpenSpeed", 0, 10); openTables.Add(sbt); } timer.Stop(); Console.WriteLine("Open block table {0} ms", timer.Elapsed.TotalMilliseconds / 10000); timer.Reset(); timer.Start(); for (int k = 0; k < 10000; k++) { openTables[k].Close(); } timer.Stop(); Console.WriteLine("Close block table {0} ms", timer.Elapsed.TotalMilliseconds / 10000); }
public void WriteAndDumpSBT() { string path = Path.GetFullPath("TestData\\DumpPrefixedSBT"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\DumpPrefixedSBT", 0, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\DumpPrefixedSBT", 0, 10); sbt.DumpContents((msg) => Console.WriteLine(msg)); }
public void ReadKeys() { string path = Path.GetFullPath("TestData\\ReadKeys"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\ReadKeys", 0, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\ReadKeys", 0, 10); var timer = new Stopwatch(); timer.Start(); Assert.AreEqual(10000, sbt.Enumerate().Count()); timer.Stop(); Console.WriteLine("Counted sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); // Confirm that the items are sorted. Key lastKey = Key.Empty; timer.Reset(); timer.Start(); foreach (var pair in sbt.Enumerate()) { Assert.IsTrue(lastKey.CompareTo(pair.Key) < 0); lastKey = pair.Key; } timer.Stop(); Console.WriteLine("Read & verify sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); sbt.Close(); }
public void RandomizedThreadedLookups() { string path = Path.GetFullPath("TestData\\RandomizedThreadedLookups"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); List<KeyValuePair<Key, Value>> items = new List<KeyValuePair<Key, Value>>(); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair<Key, Value>(k0, v0)); } mt.WriteToSortedBlockTable("TestData\\RandomizedThreadedLookups", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\RandomizedThreadedLookups", 10, 10); var indexCache = new RazorCache(); List<Thread> threads = new List<Thread>(); for (int t = 0; t < 10; t++) { threads.Add(new Thread((num) => { for (int k = 0; k < num_items / 10; k++) { var pair = items[k * (int)num]; Value value; Assert.IsTrue(SortedBlockTable.Lookup("TestData\\RandomizedThreadedLookups", 10, 10, indexCache, pair.Key, out value, ExceptionHandling.ThrowAll, null)); Assert.AreEqual(pair.Value, value); } })); } var timer = new Stopwatch(); timer.Start(); int threadNum = 0; threads.ForEach((t) => t.Start(threadNum++)); threads.ForEach((t) => t.Join()); timer.Stop(); Console.WriteLine("Randomized (threaded) read sbt table at a throughput of {0} MB/s (avg {1} ms per lookup)", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); sbt.Close(); }
public void RandomizedLookups() { string path = Path.GetFullPath("TestData\\RandomizedKeys"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); List<KeyValuePair<Key, Value>> items = new List<KeyValuePair<Key, Value>>(); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair<Key, Value>(k0, v0)); } mt.WriteToSortedBlockTable("TestData\\RandomizedKeys", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\RandomizedKeys", 10, 10); var indexCache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); foreach (var pair in items) { Value value; Assert.IsTrue(SortedBlockTable.Lookup("TestData\\RandomizedKeys", 10, 10, indexCache, pair.Key, out value, ExceptionHandling.ThrowAll, null)); Assert.AreEqual(pair.Value, value); } timer.Stop(); Value randomValue; Assert.IsFalse(SortedBlockTable.Lookup("TestData\\RandomizedKeys", 10, 10, indexCache, Key.Random(40), out randomValue, ExceptionHandling.ThrowAll, null)); Console.WriteLine("Randomized read sbt table at a throughput of {0} MB/s (avg {1} ms per lookup)", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); sbt.Close(); }
public void SnapshotEnumerator() { // This test is designed to highlight inefficiencies in the memtable snapshotting mechanism (fixed now with snapshot-able tree) MemTable mt = new MemTable(); for (int i = 0; i < 10000; i++) { var randomKey = new Key(new ByteArray(BitConverter.GetBytes(i))); var randomValue = Value.Random(256); mt.Add(randomKey, randomValue); } Stopwatch timer = new Stopwatch(); timer.Start(); for (int k = 0; k < 100; k++) { Assert.AreEqual(10000, mt.GetEnumerableSnapshot().Count()); } timer.Stop(); Console.WriteLine("Elapsed Time: {0}ms", timer.ElapsedMilliseconds); }
public void LevelMergeReadTest() { string path = Path.GetFullPath("TestData\\LevelMergeReadTest"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { var randKey = Key.Random(40); var randVal = Value.Random(512); mt.Add(randKey, randVal); } mt.WriteToSortedBlockTable("TestData\\LevelMergeReadTest", 0, i); totalData += mt.Size; } var tables = new List<IEnumerable<KeyValuePair<Key, Value>>>(); var sbts = new List<SortedBlockTable>(); var cache = new RazorCache(); for (int j = 0; j < num_tables_to_merge; j++) { var sbt = new SortedBlockTable(cache, "TestData\\LevelMergeReadTest", 0, j); tables.Add(sbt.Enumerate()); sbts.Add(sbt); } int ct = 0; Key key = Key.FromBytes(new byte[] { 0, 0 }); var timer = new Stopwatch(); timer.Start(); foreach (var pair in MergeEnumerator.Merge(tables, p => p.Key)) { Assert.True(key.CompareTo(pair.Key) < 0); key = pair.Key; ct++; } timer.Stop(); sbts.ForEach(s => s.Close()); Console.WriteLine("Scanned through a multilevel merge at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void WriteMemTableToSsTable() { string path = Path.GetFullPath("TestData\\WriteMemTableToSsTable"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); MemTable mt = new MemTable(); for (int i = 0; i < 10000; i++) { var randomKey = Key.Random(40); var randomValue = Value.Random(256); mt.Add(randomKey, randomValue); } var timer = new Stopwatch(); timer.Start(); mt.WriteToSortedBlockTable("TestData\\WriteMemTableToSsTable", 0, 1); timer.Stop(); Console.WriteLine("Wrote sorted table at a throughput of {0} MB/s", (double) mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0) ); }
public void LevelMergeReadTest2() { string path = Path.GetFullPath("TestData\\LevelMergeReadTest2"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { var randKey = Key.Random(40); var randVal = Value.Random(512); mt.Add(randKey, randVal); } mt.WriteToSortedBlockTable("TestData\\LevelMergeReadTest2", 0, i); totalData += mt.Size; } var cache = new RazorCache(); int ct = 0; Key key = new Key(new ByteArray(new byte[] { 0 })); var timer = new Stopwatch(); timer.Start(); foreach (var pair in SortedBlockTable.EnumerateMergedTablesPreCached(cache, "TestData\\LevelMergeReadTest2", new List<PageRef>{ new PageRef { Level = 0, Version = 0}, new PageRef { Level = 0, Version = 1}, new PageRef { Level = 0, Version = 2}, new PageRef { Level = 0, Version = 3} }, ExceptionHandling.ThrowAll, null)) { Assert.True(key.CompareTo(pair.Key) < 0); key = pair.Key; ct++; } timer.Stop(); Console.WriteLine("Scanned through a multilevel merge at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void AddAndLookupItemsPersisted() { string path = Path.GetFullPath("TestData\\AddAndLookupItemsPersisted"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); JournalWriter jw = new JournalWriter("TestData\\AddAndLookupItemsPersisted", 523, false); List<KeyValuePair<Key, Value>> values = new List<KeyValuePair<Key, Value>>(); for (int i = 0; i < 10000; i++) { var randomKey = Key.Random(40); var randomValue = Value.Random(256); values.Add(new KeyValuePair<Key, Value>(randomKey, randomValue)); jw.Add(randomKey, randomValue); } jw.Close(); MemTable mtl = new MemTable(); mtl.ReadFromJournal("TestData\\AddAndLookupItemsPersisted", 523); Value value; foreach (var pair in values) { Assert.IsTrue(mtl.Lookup(pair.Key, out value)); Assert.AreEqual(pair.Value, value); } Assert.IsFalse(mtl.Lookup(Key.Random(40), out value)); Assert.AreEqual(10000 * (40 + 256), mtl.Size); Assert.IsTrue(mtl.Full); }
public void Close() { if (_journal != null) _journal.Close(); _journal = null; _memTable = null; }