static void DumpFile(string baseDir, int level, int version) { RazorCache cache = new RazorCache (); var tablefile = new SortedBlockTable (cache, baseDir, level, version); try { tablefile.DumpContents (msg => Console.WriteLine (msg)); } finally { tablefile.Close (); } }
public void LevelMergeDuplicateValuesTest() { string path = Path.GetFullPath ("TestData\\LevelMergeDuplicateValuesTest"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); foreach (string file in Directory.GetFiles(path)) File.Delete (file); int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable (); for (int j = 0; j < items_per_table; j++) { int numToStore = j % 100; var key = new Key (new ByteArray (BitConverter.GetBytes (numToStore))); var value = new Value (BitConverter.GetBytes (j)); mt.Add (key, value); } mt.WriteToSortedBlockTable ("TestData\\LevelMergeDuplicateValuesTest", 0, i); totalData += mt.Size; } var cache = new RazorCache (); var timer = new Stopwatch (); timer.Start (); Manifest mf = new Manifest ("TestData\\LevelMergeDuplicateValuesTest"); SortedBlockTable.MergeTables (cache, mf, 1, new List<PageRef> { new PageRef { Level = 0, Version = 0}, new PageRef { Level = 0, Version = 1}, new PageRef { Level = 0, Version = 2}, new PageRef { Level = 0, Version = 3} }, ExceptionHandling.ThrowAll, null); timer.Stop (); // Open the block table and scan it to check the stored values var sbt = new SortedBlockTable (cache, mf.BaseFileName, 1, 1); try { var pairs = sbt.Enumerate ().ToList (); Assert.AreEqual (100, pairs.Count ()); Assert.AreEqual (2400, BitConverter.ToInt32 (pairs.First ().Value.ValueBytes, 0)); } finally { sbt.Close (); } Console.WriteLine ("Wrote a multilevel merge with duplicates at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public static bool Lookup(string baseFileName, int level, int version, RazorCache cache, Key key, out Value value, ExceptionHandling exceptionHandling, Action <string> logger) { SortedBlockTable sbt = new SortedBlockTable(cache, baseFileName, level, version); try { int dataBlockNum = FindBlockForKey(baseFileName, level, version, cache, key); if (dataBlockNum >= 0 && dataBlockNum < sbt._dataBlocks) { byte[] block = sbt.ReadBlock(LocalThreadAllocatedBlock(), dataBlockNum); return(SearchBlockForKey(block, key, out value)); } } finally { sbt.Close(); } value = Value.Empty; return(false); }
public void EnumerateFromKeys() { string path = Path.GetFullPath ("TestData\\EnumerateFromKeys"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); List<KeyValuePair<Key, Value>> items = new List<KeyValuePair<Key, Value>> (); int num_items = 10000; var mt = new MemTable (); for (int i = 0; i < num_items; i++) { var k0 = Key.Random (40); var v0 = Value.Random (200); mt.Add (k0, v0); items.Add (new KeyValuePair<Key, Value> (k0, v0)); } mt.WriteToSortedBlockTable ("TestData\\EnumerateFromKeys", 10, 10); var cache = new RazorCache (); var sbt = new SortedBlockTable (cache, "TestData\\EnumerateFromKeys", 10, 10); try { var indexCache = new RazorCache (); var timer = new Stopwatch (); timer.Start (); Assert.AreEqual (10000, sbt.EnumerateFromKey (indexCache, new Key (new byte[] { 0 }, 0)).Count ()); timer.Stop (); Console.WriteLine ("Counted from beginning at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); items = items.OrderBy ((a) => a.Key).ToList (); timer.Reset (); timer.Start (); Assert.AreEqual (5000, sbt.EnumerateFromKey (indexCache, items [5000].Key).Count ()); timer.Stop (); Console.WriteLine ("Counted from halfway at a throughput of {0} MB/s", (double)mt.Size / 2 / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); Assert.AreEqual (0, sbt.EnumerateFromKey (indexCache, new Key (new byte[] { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF }, 0xFF)).Count ()); } finally { sbt.Close (); } }
static void CheckBlockTableFiles(string baseDir) { Console.WriteLine ("Checking Block Table Files '{0}'", baseDir); RazorCache cache = new RazorCache (); foreach (string file in Directory.GetFiles(baseDir, "*.sbt", SearchOption.TopDirectoryOnly)) { var fileparts = Path.GetFileNameWithoutExtension (file).Split ('-'); int level = int.Parse (fileparts [0]); int version = int.Parse (fileparts [1]); var tablefile = new SortedBlockTable (cache, baseDir, level, version); Console.WriteLine ("Level: {0} Version: {1}", level, version); try { tablefile.ScanCheck (); } finally { tablefile.Close (); } } }
public Key[] GetBlockTableIndex(string baseName, int level, int version) { string fileName = Config.SortedBlockTableFile(baseName, level, version); Key[] index; if (_blockIndexCache.TryGetValue(fileName, out index)) { return(index); } var sbt = new SortedBlockTable(null, baseName, level, version); try { index = sbt.GetIndex(); _blockIndexCache.Set(fileName, index); return(index); } finally { sbt.Close(); } }
Value InternalGet(Key lookupKey) { Value output = Value.Empty; var rotatedMemTable = _rotatedJournaledMemTable; // Capture copy of the rotated table if there is one. if (_currentJournaledMemTable.Lookup(lookupKey, out output)) { return(output); // Check the current memtable. } if (rotatedMemTable != null) { if (rotatedMemTable.Lookup(lookupKey, out output)) { return(output); // Check the table in rotation. } } using (var manifest = _manifest.GetLatestManifest()) { // Check the files on disk. // Must check all pages on level 0. var zeroPages = manifest.GetPagesAtLevel(0).OrderByDescending((page) => page.Version); foreach (var page in zeroPages) { if (SortedBlockTable.Lookup(_manifest.BaseFileName, page.Level, page.Version, _cache, lookupKey, out output, _exceptionHandling, _manifest.Logger)) { return(output); } } // If not found, must check pages on the higher levels, but we can use the page index to make the search quicker. for (int level = 1; level < manifest.NumLevels; level++) { var page = manifest.FindPageForKey(level, lookupKey); if (page != null && SortedBlockTable.Lookup(_manifest.BaseFileName, page.Level, page.Version, _cache, lookupKey, out output, _exceptionHandling, _manifest.Logger)) { return(output); } } } return(Value.Empty); // Returns null. }
public void LevelMergeReadTest() { string path = Path.GetFullPath ("TestData\\LevelMergeReadTest"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable (); for (int j = 0; j < items_per_table; j++) { var randKey = Key.Random (40); var randVal = Value.Random (512); mt.Add (randKey, randVal); } mt.WriteToSortedBlockTable ("TestData\\LevelMergeReadTest", 0, i); totalData += mt.Size; } var tables = new List<IEnumerable<KeyValuePair<Key, Value>>> (); var sbts = new List<SortedBlockTable> (); var cache = new RazorCache (); for (int j = 0; j < num_tables_to_merge; j++) { var sbt = new SortedBlockTable (cache, "TestData\\LevelMergeReadTest", 0, j); tables.Add (sbt.Enumerate ()); sbts.Add (sbt); } int ct = 0; Key key = Key.FromBytes (new byte[] { 0, 0 }); var timer = new Stopwatch (); timer.Start (); foreach (var pair in MergeEnumerator.Merge(tables, p => p.Key)) { Assert.True (key.CompareTo (pair.Key) < 0); key = pair.Key; ct++; } timer.Stop (); sbts.ForEach (s => s.Close ()); Console.WriteLine ("Scanned through a multilevel merge at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void RandomizedLookups() { string path = Path.GetFullPath ("TestData\\RandomizedKeys"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); List<KeyValuePair<Key, Value>> items = new List<KeyValuePair<Key, Value>> (); int num_items = 10000; var mt = new MemTable (); for (int i = 0; i < num_items; i++) { var k0 = Key.Random (40); var v0 = Value.Random (200); mt.Add (k0, v0); items.Add (new KeyValuePair<Key, Value> (k0, v0)); } mt.WriteToSortedBlockTable ("TestData\\RandomizedKeys", 10, 10); var cache = new RazorCache (); var sbt = new SortedBlockTable (cache, "TestData\\RandomizedKeys", 10, 10); var indexCache = new RazorCache (); var timer = new Stopwatch (); timer.Start (); foreach (var pair in items) { Value value; Assert.IsTrue (SortedBlockTable.Lookup ("TestData\\RandomizedKeys", 10, 10, indexCache, pair.Key, out value, ExceptionHandling.ThrowAll, null)); Assert.AreEqual (pair.Value, value); } timer.Stop (); Value randomValue; Assert.IsFalse (SortedBlockTable.Lookup ("TestData\\RandomizedKeys", 10, 10, indexCache, Key.Random (40), out randomValue, ExceptionHandling.ThrowAll, null)); Console.WriteLine ("Randomized read sbt table at a throughput of {0} MB/s (avg {1} ms per lookup)", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); sbt.Close (); }
public void TestFileOpenSpeed() { string path = Path.GetFullPath ("TestData\\TestFileOpenSpeed"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); var mt = new MemTable (); for (int i = 0; i < 10000; i++) { var k0 = Key.Random (40); var v0 = Value.Random (200); mt.Add (k0, v0); } mt.WriteToSortedBlockTable ("TestData\\TestFileOpenSpeed", 0, 10); var openTables = new List<SortedBlockTable> (); var cache = new RazorCache (); var timer = new Stopwatch (); timer.Start (); for (int j = 0; j < 10000; j++) { var sbt = new SortedBlockTable (cache, "TestData\\TestFileOpenSpeed", 0, 10); openTables.Add (sbt); } timer.Stop (); Console.WriteLine ("Open block table {0} ms", timer.Elapsed.TotalMilliseconds / 10000); timer.Reset (); timer.Start (); for (int k = 0; k < 10000; k++) { openTables [k].Close (); } timer.Stop (); Console.WriteLine ("Close block table {0} ms", timer.Elapsed.TotalMilliseconds / 10000); }
public void ReadKeys() { string path = Path.GetFullPath ("TestData\\ReadKeys"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); var mt = new MemTable (); for (int i = 0; i < 10000; i++) { var k0 = Key.Random (40); var v0 = Value.Random (200); mt.Add (k0, v0); } mt.WriteToSortedBlockTable ("TestData\\ReadKeys", 0, 10); var cache = new RazorCache (); var sbt = new SortedBlockTable (cache, "TestData\\ReadKeys", 0, 10); var timer = new Stopwatch (); timer.Start (); Assert.AreEqual (10000, sbt.Enumerate ().Count ()); timer.Stop (); Console.WriteLine ("Counted sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); // Confirm that the items are sorted. Key lastKey = Key.Empty; timer.Reset (); timer.Start (); foreach (var pair in sbt.Enumerate()) { Assert.IsTrue (lastKey.CompareTo (pair.Key) < 0); lastKey = pair.Key; } timer.Stop (); Console.WriteLine ("Read & verify sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); sbt.Close (); }
public void RandomizedThreadedLookups() { string path = Path.GetFullPath ("TestData\\RandomizedThreadedLookups"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); List<KeyValuePair<Key, Value>> items = new List<KeyValuePair<Key, Value>> (); int num_items = 10000; var mt = new MemTable (); for (int i = 0; i < num_items; i++) { var k0 = Key.Random (40); var v0 = Value.Random (200); mt.Add (k0, v0); items.Add (new KeyValuePair<Key, Value> (k0, v0)); } mt.WriteToSortedBlockTable ("TestData\\RandomizedThreadedLookups", 10, 10); var cache = new RazorCache (); var sbt = new SortedBlockTable (cache, "TestData\\RandomizedThreadedLookups", 10, 10); var indexCache = new RazorCache (); List<Thread> threads = new List<Thread> (); for (int t = 0; t < 10; t++) { threads.Add (new Thread ((num) => { for (int k = 0; k < num_items / 10; k++) { var pair = items [k * (int)num]; Value value; Assert.IsTrue (SortedBlockTable.Lookup ("TestData\\RandomizedThreadedLookups", 10, 10, indexCache, pair.Key, out value, ExceptionHandling.ThrowAll, null)); Assert.AreEqual (pair.Value, value); } })); } var timer = new Stopwatch (); timer.Start (); int threadNum = 0; threads.ForEach ((t) => t.Start (threadNum++)); threads.ForEach ((t) => t.Join ()); timer.Stop (); Console.WriteLine ("Randomized (threaded) read sbt table at a throughput of {0} MB/s (avg {1} ms per lookup)", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); sbt.Close (); }
public static bool Lookup(string baseFileName, int level, int version, RazorCache cache, Key key, out Value value, ExceptionHandling exceptionHandling, Action<string> logger) { SortedBlockTable sbt = new SortedBlockTable (cache, baseFileName, level, version); try { int dataBlockNum = FindBlockForKey (baseFileName, level, version, cache, key); if (dataBlockNum >= 0 && dataBlockNum < sbt._dataBlocks) { byte[] block = sbt.ReadBlock (LocalThreadAllocatedBlock (), dataBlockNum); return SearchBlockForKey (block, key, out value); } } finally { sbt.Close (); } value = Value.Empty; return false; }
public static void RunTableMergePass(KeyValueStore kvStore) { try { Interlocked.Increment(ref kvStore.mergeCount); lock (kvStore.mergeLock) { RazorCache cache = kvStore.Cache; Manifest manifest = kvStore.Manifest; while (true) { bool mergedDuringLastPass = false; using (var manifestInst = kvStore.Manifest.GetLatestManifest()) { if (manifestInst.GetNumPagesAtLevel(0) >= Config.MaxPagesOnLevel(0)) // Handle level 0 (merge all pages) { mergedDuringLastPass = true; int Level0PagesToTake = Config.MaxPagesOnLevel(0) * 2; // Grab more pages if they are available (this happens during heavy write pressure) var inputPageRecords = manifestInst.GetPagesAtLevel(0).OrderBy(p => p.Version).Take(Level0PagesToTake).ToList(); var startKey = inputPageRecords.Min(p => p.FirstKey); var endKey = inputPageRecords.Max(p => p.LastKey); var mergePages = manifestInst.FindPagesForKeyRange(1, startKey, endKey).AsPageRefs().ToList(); var allInputPages = inputPageRecords.AsPageRefs().Concat(mergePages).ToList(); var outputPages = SortedBlockTable.MergeTables(cache, manifest, 1, allInputPages, ExceptionHandling.ThrowAll, null).ToList(); manifest.ModifyPages(outputPages, allInputPages); manifest.LogMessage("Merge Level 0 => InputPages: {0} OutputPages:{1}", string.Join(",", allInputPages.Select(p => string.Format("{0}-{1}", p.Level, p.Version)).ToArray()), string.Join(",", outputPages.Select(p => string.Format("{0}-{1}", p.Level, p.Version)).ToArray()) ); } for (int level = 1; level < manifestInst.NumLevels - 1; level++) // handle the rest of the levels (merge only one page upwards) { if (manifestInst.GetNumPagesAtLevel(level) >= Config.MaxPagesOnLevel(level)) { mergedDuringLastPass = true; var inputPage = manifest.NextMergePage(level); var mergePages = manifestInst.FindPagesForKeyRange(level + 1, inputPage.FirstKey, inputPage.LastKey).ToList(); var inputPageRecords = mergePages.Concat(new PageRecord[] { inputPage }); var allInputPages = inputPageRecords.AsPageRefs().ToList(); var outputPages = SortedBlockTable.MergeTables(cache, manifest, level + 1, allInputPages, ExceptionHandling.ThrowAll, null); // Notify if a merge happened, implemented for testing primarily if (kvStore.MergeCallback != null) { kvStore.MergeCallback(level, inputPageRecords, outputPages); } manifest.ModifyPages(outputPages, allInputPages); manifest.LogMessage("Merge Level >0 => InputPages: {0} OutputPages:{1}", string.Join(",", allInputPages.Select(p => string.Format("{0}-{1}", p.Level, p.Version)).ToArray()), string.Join(",", outputPages.Select(p => string.Format("{0}-{1}", p.Level, p.Version)).ToArray()) ); } } } if (!mergedDuringLastPass) { return; // No more merging is needed, we are finished with this pass } } } } finally { Interlocked.Decrement(ref kvStore.mergeCount); } }