public void RandomizedThreadedLookups() { string path = Path.GetFullPath("TestData\\RandomizedThreadedLookups"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } List <KeyValuePair <Key, Value> > items = new List <KeyValuePair <Key, Value> > (); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair <Key, Value> (k0, v0)); } mt.WriteToSortedBlockTable("TestData\\RandomizedThreadedLookups", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\RandomizedThreadedLookups", 10, 10); var indexCache = new RazorCache(); List <Thread> threads = new List <Thread> (); for (int t = 0; t < 10; t++) { threads.Add(new Thread((num) => { for (int k = 0; k < num_items / 10; k++) { var pair = items [k * (int)num]; Value value; Assert.IsTrue(SortedBlockTable.Lookup("TestData\\RandomizedThreadedLookups", 10, 10, indexCache, pair.Key, out value, ExceptionHandling.ThrowAll, null)); Assert.AreEqual(pair.Value, value); } })); } var timer = new Stopwatch(); timer.Start(); int threadNum = 0; threads.ForEach((t) => t.Start(threadNum++)); threads.ForEach((t) => t.Join()); timer.Stop(); Console.WriteLine("Randomized (threaded) read sbt table at a throughput of {0} MB/s (avg {1} ms per lookup)", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); sbt.Close(); }
static void DumpFile(string baseDir, int level, int version) { RazorCache cache = new RazorCache(); var tablefile = new SortedBlockTable(cache, baseDir, level, version); try { tablefile.DumpContents(msg => Console.WriteLine(msg)); } finally { tablefile.Close(); } }
public void EnumerateFromKeys() { string path = Path.GetFullPath("TestData\\EnumerateFromKeys"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } List <KeyValuePair <Key, Value> > items = new List <KeyValuePair <Key, Value> > (); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair <Key, Value> (k0, v0)); } mt.WriteToSortedBlockTable("TestData\\EnumerateFromKeys", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\EnumerateFromKeys", 10, 10); try { var indexCache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Assert.AreEqual(10000, sbt.EnumerateFromKey(indexCache, new Key(new byte[] { 0 }, 0)).Count()); timer.Stop(); Console.WriteLine("Counted from beginning at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); items = items.OrderBy((a) => a.Key).ToList(); timer.Reset(); timer.Start(); Assert.AreEqual(5000, sbt.EnumerateFromKey(indexCache, items [5000].Key).Count()); timer.Stop(); Console.WriteLine("Counted from halfway at a throughput of {0} MB/s", (double)mt.Size / 2 / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); Assert.AreEqual(0, sbt.EnumerateFromKey(indexCache, new Key(new byte[] { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF }, 0xFF)).Count()); } finally { sbt.Close(); } }
public void LevelMergeDuplicateValuesTest() { string path = Path.GetFullPath("TestData\\LevelMergeDuplicateValuesTest"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); foreach (string file in Directory.GetFiles(path)) { File.Delete(file); } int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { int numToStore = j % 100; var key = new Key(new ByteArray(BitConverter.GetBytes(numToStore))); var value = new Value(BitConverter.GetBytes(j)); mt.Add(key, value); } mt.WriteToSortedBlockTable("TestData\\LevelMergeDuplicateValuesTest", 0, i); totalData += mt.Size; } var cache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Manifest mf = new Manifest("TestData\\LevelMergeDuplicateValuesTest"); SortedBlockTable.MergeTables(cache, mf, 1, new List<PageRef>{ new PageRef { Level = 0, Version = 0}, new PageRef { Level = 0, Version = 1}, new PageRef { Level = 0, Version = 2}, new PageRef { Level = 0, Version = 3} }, ExceptionHandling.ThrowAll, null); timer.Stop(); // Open the block table and scan it to check the stored values var sbt = new SortedBlockTable(cache, mf.BaseFileName, 1, 1); try { var pairs = sbt.Enumerate().ToList(); Assert.AreEqual(100, pairs.Count()); Assert.AreEqual(2400, BitConverter.ToInt32(pairs.First().Value.ValueBytes, 0)); } finally { sbt.Close(); } Console.WriteLine("Wrote a multilevel merge with duplicates at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void RandomizedLookups() { string path = Path.GetFullPath("TestData\\RandomizedKeys"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } List <KeyValuePair <Key, Value> > items = new List <KeyValuePair <Key, Value> > (); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair <Key, Value> (k0, v0)); } mt.WriteToSortedBlockTable("TestData\\RandomizedKeys", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\RandomizedKeys", 10, 10); var indexCache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); foreach (var pair in items) { Value value; Assert.IsTrue(SortedBlockTable.Lookup("TestData\\RandomizedKeys", 10, 10, indexCache, pair.Key, out value, ExceptionHandling.ThrowAll, null)); Assert.AreEqual(pair.Value, value); } timer.Stop(); Value randomValue; Assert.IsFalse(SortedBlockTable.Lookup("TestData\\RandomizedKeys", 10, 10, indexCache, Key.Random(40), out randomValue, ExceptionHandling.ThrowAll, null)); Console.WriteLine("Randomized read sbt table at a throughput of {0} MB/s (avg {1} ms per lookup)", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); sbt.Close(); }
public void ReadKeys() { string path = Path.GetFullPath("TestData\\ReadKeys"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\ReadKeys", 0, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\ReadKeys", 0, 10); var timer = new Stopwatch(); timer.Start(); Assert.AreEqual(10000, sbt.Enumerate().Count()); timer.Stop(); Console.WriteLine("Counted sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); // Confirm that the items are sorted. Key lastKey = Key.Empty; timer.Reset(); timer.Start(); foreach (var pair in sbt.Enumerate()) { Assert.IsTrue(lastKey.CompareTo(pair.Key) < 0); lastKey = pair.Key; } timer.Stop(); Console.WriteLine("Read & verify sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); sbt.Close(); }
static void CheckBlockTableFiles(string baseDir) { Console.WriteLine("Checking Block Table Files '{0}'", baseDir); RazorCache cache = new RazorCache(); foreach (string file in Directory.GetFiles(baseDir, "*.sbt", SearchOption.TopDirectoryOnly)) { var fileparts = Path.GetFileNameWithoutExtension(file).Split('-'); int level = int.Parse(fileparts[0]); int version = int.Parse(fileparts[1]); Console.WriteLine("Level: {0} Version: {1}", level, version); var tablefile = new SortedBlockTable(cache, baseDir, level, version); try { tablefile.ScanCheck(); } finally { tablefile.Close(); } } }
public void LevelMergeDuplicateValuesTest() { string path = Path.GetFullPath("TestData\\LevelMergeDuplicateValuesTest"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } foreach (string file in Directory.GetFiles(path)) { File.Delete(file); } int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { int numToStore = j % 100; var key = new Key(new ByteArray(BitConverter.GetBytes(numToStore))); var value = new Value(BitConverter.GetBytes(j)); mt.Add(key, value); } mt.WriteToSortedBlockTable("TestData\\LevelMergeDuplicateValuesTest", 0, i); totalData += mt.Size; } var cache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Manifest mf = new Manifest("TestData\\LevelMergeDuplicateValuesTest"); SortedBlockTable.MergeTables(cache, mf, 1, new List <PageRef> { new PageRef { Level = 0, Version = 0 }, new PageRef { Level = 0, Version = 1 }, new PageRef { Level = 0, Version = 2 }, new PageRef { Level = 0, Version = 3 } }, ExceptionHandling.ThrowAll, null); timer.Stop(); // Open the block table and scan it to check the stored values var sbt = new SortedBlockTable(cache, mf.BaseFileName, 1, 1); try { var pairs = sbt.Enumerate().ToList(); Assert.AreEqual(100, pairs.Count()); Assert.AreEqual(2400, BitConverter.ToInt32(pairs.First().Value.ValueBytes, 0)); } finally { sbt.Close(); } Console.WriteLine("Wrote a multilevel merge with duplicates at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void EnumerateFromKeys() { string path = Path.GetFullPath("TestData\\EnumerateFromKeys"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); List<KeyValuePair<Key, Value>> items = new List<KeyValuePair<Key, Value>>(); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair<Key, Value>(k0, v0)); } mt.WriteToSortedBlockTable("TestData\\EnumerateFromKeys", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\EnumerateFromKeys", 10, 10); try { var indexCache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Assert.AreEqual(10000, sbt.EnumerateFromKey(indexCache, new Key(new byte[] { 0 }, 0)).Count()); timer.Stop(); Console.WriteLine("Counted from beginning at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); items = items.OrderBy((a) => a.Key).ToList(); timer.Reset(); timer.Start(); Assert.AreEqual(5000, sbt.EnumerateFromKey(indexCache, items[5000].Key).Count()); timer.Stop(); Console.WriteLine("Counted from halfway at a throughput of {0} MB/s", (double)mt.Size / 2 / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); Assert.AreEqual(0, sbt.EnumerateFromKey(indexCache, new Key(new byte[] { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF }, 0xFF)).Count()); } finally { sbt.Close(); } }
public void ReadKeys() { string path = Path.GetFullPath("TestData\\ReadKeys"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\ReadKeys", 0, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\ReadKeys", 0, 10); var timer = new Stopwatch(); timer.Start(); Assert.AreEqual(10000, sbt.Enumerate().Count()); timer.Stop(); Console.WriteLine("Counted sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); // Confirm that the items are sorted. Key lastKey = Key.Empty; timer.Reset(); timer.Start(); foreach (var pair in sbt.Enumerate()) { Assert.IsTrue(lastKey.CompareTo(pair.Key) < 0); lastKey = pair.Key; } timer.Stop(); Console.WriteLine("Read & verify sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); sbt.Close(); }
public void RandomizedThreadedLookups() { string path = Path.GetFullPath("TestData\\RandomizedThreadedLookups"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); List<KeyValuePair<Key, Value>> items = new List<KeyValuePair<Key, Value>>(); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair<Key, Value>(k0, v0)); } mt.WriteToSortedBlockTable("TestData\\RandomizedThreadedLookups", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\RandomizedThreadedLookups", 10, 10); var indexCache = new RazorCache(); List<Thread> threads = new List<Thread>(); for (int t = 0; t < 10; t++) { threads.Add(new Thread((num) => { for (int k = 0; k < num_items / 10; k++) { var pair = items[k * (int)num]; Value value; Assert.IsTrue(SortedBlockTable.Lookup("TestData\\RandomizedThreadedLookups", 10, 10, indexCache, pair.Key, out value, ExceptionHandling.ThrowAll, null)); Assert.AreEqual(pair.Value, value); } })); } var timer = new Stopwatch(); timer.Start(); int threadNum = 0; threads.ForEach((t) => t.Start(threadNum++)); threads.ForEach((t) => t.Join()); timer.Stop(); Console.WriteLine("Randomized (threaded) read sbt table at a throughput of {0} MB/s (avg {1} ms per lookup)", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); sbt.Close(); }
public void RandomizedLookups() { string path = Path.GetFullPath("TestData\\RandomizedKeys"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); List<KeyValuePair<Key, Value>> items = new List<KeyValuePair<Key, Value>>(); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair<Key, Value>(k0, v0)); } mt.WriteToSortedBlockTable("TestData\\RandomizedKeys", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\RandomizedKeys", 10, 10); var indexCache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); foreach (var pair in items) { Value value; Assert.IsTrue(SortedBlockTable.Lookup("TestData\\RandomizedKeys", 10, 10, indexCache, pair.Key, out value, ExceptionHandling.ThrowAll, null)); Assert.AreEqual(pair.Value, value); } timer.Stop(); Value randomValue; Assert.IsFalse(SortedBlockTable.Lookup("TestData\\RandomizedKeys", 10, 10, indexCache, Key.Random(40), out randomValue, ExceptionHandling.ThrowAll, null)); Console.WriteLine("Randomized read sbt table at a throughput of {0} MB/s (avg {1} ms per lookup)", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); sbt.Close(); }
public void V1SortedBlockTableFile() { var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, @"..\FormatTestData\V1", 0, 10); Assert.AreEqual(1000, sbt.Enumerate().Count()); // Confirm that the items are sorted. KeyEx lastKey = KeyEx.Empty; foreach (var pair in sbt.Enumerate()) { Assert.IsTrue(lastKey.CompareTo(pair.Key) < 0); lastKey = pair.Key; } sbt.Close(); }