public void ReadKeys() { string path = Path.GetFullPath("TestData\\ReadKeys"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\ReadKeys", 0, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\ReadKeys", 0, 10); var timer = new Stopwatch(); timer.Start(); Assert.AreEqual(10000, sbt.Enumerate().Count()); timer.Stop(); Console.WriteLine("Counted sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); // Confirm that the items are sorted. Key lastKey = Key.Empty; timer.Reset(); timer.Start(); foreach (var pair in sbt.Enumerate()) { Assert.IsTrue(lastKey.CompareTo(pair.Key) < 0); lastKey = pair.Key; } timer.Stop(); Console.WriteLine("Read & verify sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); sbt.Close(); }
public void LevelMergeReadTest() { string path = Path.GetFullPath("TestData\\LevelMergeReadTest"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { var randKey = Key.Random(40); var randVal = Value.Random(512); mt.Add(randKey, randVal); } mt.WriteToSortedBlockTable("TestData\\LevelMergeReadTest", 0, i); totalData += mt.Size; } var tables = new List <IEnumerable <KeyValuePair <Key, Value> > >(); var sbts = new List <SortedBlockTable>(); var cache = new RazorCache(); for (int j = 0; j < num_tables_to_merge; j++) { var sbt = new SortedBlockTable(cache, "TestData\\LevelMergeReadTest", 0, j); tables.Add(sbt.Enumerate()); sbts.Add(sbt); } int ct = 0; Key key = Key.FromBytes(new byte[] { 0, 0 }); var timer = new Stopwatch(); timer.Start(); foreach (var pair in MergeEnumerator.Merge(tables, p => p.Key)) { Assert.True(key.CompareTo(pair.Key) < 0); key = pair.Key; ct++; } timer.Stop(); sbts.ForEach(s => s.Close()); Console.WriteLine("Scanned through a multilevel merge at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void LevelMergeDuplicateValuesTest() { string path = Path.GetFullPath("TestData\\LevelMergeDuplicateValuesTest"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); foreach (string file in Directory.GetFiles(path)) { File.Delete(file); } int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { int numToStore = j % 100; var key = new Key(new ByteArray(BitConverter.GetBytes(numToStore))); var value = new Value(BitConverter.GetBytes(j)); mt.Add(key, value); } mt.WriteToSortedBlockTable("TestData\\LevelMergeDuplicateValuesTest", 0, i); totalData += mt.Size; } var cache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Manifest mf = new Manifest("TestData\\LevelMergeDuplicateValuesTest"); SortedBlockTable.MergeTables(cache, mf, 1, new List<PageRef>{ new PageRef { Level = 0, Version = 0}, new PageRef { Level = 0, Version = 1}, new PageRef { Level = 0, Version = 2}, new PageRef { Level = 0, Version = 3} }, ExceptionHandling.ThrowAll, null); timer.Stop(); // Open the block table and scan it to check the stored values var sbt = new SortedBlockTable(cache, mf.BaseFileName, 1, 1); try { var pairs = sbt.Enumerate().ToList(); Assert.AreEqual(100, pairs.Count()); Assert.AreEqual(2400, BitConverter.ToInt32(pairs.First().Value.ValueBytes, 0)); } finally { sbt.Close(); } Console.WriteLine("Wrote a multilevel merge with duplicates at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void LevelMergeDuplicateValuesTest() { string path = Path.GetFullPath("TestData\\LevelMergeDuplicateValuesTest"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } foreach (string file in Directory.GetFiles(path)) { File.Delete(file); } int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { int numToStore = j % 100; var key = new Key(new ByteArray(BitConverter.GetBytes(numToStore))); var value = new Value(BitConverter.GetBytes(j)); mt.Add(key, value); } mt.WriteToSortedBlockTable("TestData\\LevelMergeDuplicateValuesTest", 0, i); totalData += mt.Size; } var cache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Manifest mf = new Manifest("TestData\\LevelMergeDuplicateValuesTest"); SortedBlockTable.MergeTables(cache, mf, 1, new List <PageRef> { new PageRef { Level = 0, Version = 0 }, new PageRef { Level = 0, Version = 1 }, new PageRef { Level = 0, Version = 2 }, new PageRef { Level = 0, Version = 3 } }, ExceptionHandling.ThrowAll, null); timer.Stop(); // Open the block table and scan it to check the stored values var sbt = new SortedBlockTable(cache, mf.BaseFileName, 1, 1); try { var pairs = sbt.Enumerate().ToList(); Assert.AreEqual(100, pairs.Count()); Assert.AreEqual(2400, BitConverter.ToInt32(pairs.First().Value.ValueBytes, 0)); } finally { sbt.Close(); } Console.WriteLine("Wrote a multilevel merge with duplicates at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void LevelMergeReadTest() { string path = Path.GetFullPath("TestData\\LevelMergeReadTest"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { var randKey = Key.Random(40); var randVal = Value.Random(512); mt.Add(randKey, randVal); } mt.WriteToSortedBlockTable("TestData\\LevelMergeReadTest", 0, i); totalData += mt.Size; } var tables = new List<IEnumerable<KeyValuePair<Key, Value>>>(); var sbts = new List<SortedBlockTable>(); var cache = new RazorCache(); for (int j = 0; j < num_tables_to_merge; j++) { var sbt = new SortedBlockTable(cache, "TestData\\LevelMergeReadTest", 0, j); tables.Add(sbt.Enumerate()); sbts.Add(sbt); } int ct = 0; Key key = Key.FromBytes(new byte[] { 0, 0 }); var timer = new Stopwatch(); timer.Start(); foreach (var pair in MergeEnumerator.Merge(tables, p => p.Key)) { Assert.True(key.CompareTo(pair.Key) < 0); key = pair.Key; ct++; } timer.Stop(); sbts.ForEach(s => s.Close()); Console.WriteLine("Scanned through a multilevel merge at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void ReadKeys() { string path = Path.GetFullPath("TestData\\ReadKeys"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\ReadKeys", 0, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\ReadKeys", 0, 10); var timer = new Stopwatch(); timer.Start(); Assert.AreEqual(10000, sbt.Enumerate().Count()); timer.Stop(); Console.WriteLine("Counted sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); // Confirm that the items are sorted. Key lastKey = Key.Empty; timer.Reset(); timer.Start(); foreach (var pair in sbt.Enumerate()) { Assert.IsTrue(lastKey.CompareTo(pair.Key) < 0); lastKey = pair.Key; } timer.Stop(); Console.WriteLine("Read & verify sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); sbt.Close(); }
public void V1SortedBlockTableFile() { var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, @"..\FormatTestData\V1", 0, 10); Assert.AreEqual(1000, sbt.Enumerate().Count()); // Confirm that the items are sorted. KeyEx lastKey = KeyEx.Empty; foreach (var pair in sbt.Enumerate()) { Assert.IsTrue(lastKey.CompareTo(pair.Key) < 0); lastKey = pair.Key; } sbt.Close(); }