public ObjectStore(Config config, ObjectMetadataStore objectMetadata) { if (null == config) { throw new ArgumentNullException("config"); } if (null == objectMetadata) { throw new ArgumentNullException("objectMetadata"); } _config = config; _objectMetadata = objectMetadata; // Create a single cache instance that is shared across all Object Stores // Index cache is set to a fifth the size of the data cache size. int cacheSizeBytes = (int)config.ObjectStoreCacheSize * 1024 * 1024; _cache = new RazorCache((int)Math.Ceiling((double)cacheSizeBytes / 5), cacheSizeBytes); _stores = new Dictionary<string, ExpiringObjectStore>(StringComparer.OrdinalIgnoreCase); _secondaryStores = new Dictionary<string, ExpiringObjectStore>(StringComparer.OrdinalIgnoreCase); _secondaryStoreExists = new Dictionary<string, bool>(); // Start the Object Store cleanup timer if cleanup is enabled if (_config.ObjectStoreAutoClose && _config.ObjectStoreAutoCloseTimeout > 0) { _StartObjectStoreCleanup(); } }
//Routing public void AddFile(string url, string path) { FileInfo i = new FileInfo(path); if (!i.Exists) { throw new FileNotFoundException($"File '{path}' not found"); } if (i.Extension.ToLower() == ".cshtml") { try { RazorCache.AddTemplate(i.Name, File.ReadAllText(i.FullName)); } catch (Exception ex) { if (OnException != null) { OnException("AddFile", ex); } if (Debug) { RazorCache.AddTemplate(i.Name, $"Exception found in Razor Template:\n {ex.Message.Replace("@", "@@")}"); } else { RazorCache.AddTemplate(i.Name, $"Exception found in Razor Template, enable debug to see exception"); } } } Routing.AddFileRoute(url, i.FullName); }
public void DumpPrefixedSBT() { string path = Path.GetFullPath("TestData\\DumpPrefixedSBT"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\DumpPrefixedSBT", 0, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\DumpPrefixedSBT", 0, 10); foreach (var pair in sbt.EnumerateRaw()) { Console.WriteLine("Key: {0} Value: {1}", pair.Key.ToString(), pair.Value.ToString()); } }
public void RandomizedThreadedLookups() { string path = Path.GetFullPath("TestData\\RandomizedThreadedLookups"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } List <KeyValuePair <Key, Value> > items = new List <KeyValuePair <Key, Value> > (); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair <Key, Value> (k0, v0)); } mt.WriteToSortedBlockTable("TestData\\RandomizedThreadedLookups", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\RandomizedThreadedLookups", 10, 10); var indexCache = new RazorCache(); List <Thread> threads = new List <Thread> (); for (int t = 0; t < 10; t++) { threads.Add(new Thread((num) => { for (int k = 0; k < num_items / 10; k++) { var pair = items [k * (int)num]; Value value; Assert.IsTrue(SortedBlockTable.Lookup("TestData\\RandomizedThreadedLookups", 10, 10, indexCache, pair.Key, out value, ExceptionHandling.ThrowAll, null)); Assert.AreEqual(pair.Value, value); } })); } var timer = new Stopwatch(); timer.Start(); int threadNum = 0; threads.ForEach((t) => t.Start(threadNum++)); threads.ForEach((t) => t.Join()); timer.Stop(); Console.WriteLine("Randomized (threaded) read sbt table at a throughput of {0} MB/s (avg {1} ms per lookup)", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); sbt.Close(); }
public SortedBlockTable(RazorCache cache, string baseFileName, int level, int version) { _baseFileName = baseFileName; _level = level; _version = version; _cache = cache; _path = Config.SortedBlockTableFile (baseFileName, level, version); ReadMetadata (); }
public void LevelMergeReadTest2() { string path = Path.GetFullPath("TestData\\LevelMergeReadTest2"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { var randKey = Key.Random(40); var randVal = Value.Random(512); mt.Add(randKey, randVal); } mt.WriteToSortedBlockTable("TestData\\LevelMergeReadTest2", 0, i); totalData += mt.Size; } var cache = new RazorCache(); int ct = 0; Key key = new Key(new ByteArray(new byte[] { 0 })); var timer = new Stopwatch(); timer.Start(); foreach (var pair in SortedBlockTable.EnumerateMergedTablesPreCached(cache, "TestData\\LevelMergeReadTest2", new List <PageRef> { new PageRef { Level = 0, Version = 0 }, new PageRef { Level = 0, Version = 1 }, new PageRef { Level = 0, Version = 2 }, new PageRef { Level = 0, Version = 3 } }, ExceptionHandling.ThrowAll, null)) { Assert.True(key.CompareTo(pair.Key) < 0); key = pair.Key; ct++; } timer.Stop(); Console.WriteLine("Scanned through a multilevel merge at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
static void DumpFile(string baseDir, int level, int version) { RazorCache cache = new RazorCache(); var tablefile = new SortedBlockTable(cache, baseDir, level, version); try { tablefile.DumpContents(msg => Console.WriteLine(msg)); } finally { tablefile.Close(); } }
public void LevelMergeReadTest() { string path = Path.GetFullPath("TestData\\LevelMergeReadTest"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { var randKey = Key.Random(40); var randVal = Value.Random(512); mt.Add(randKey, randVal); } mt.WriteToSortedBlockTable("TestData\\LevelMergeReadTest", 0, i); totalData += mt.Size; } var tables = new List <IEnumerable <KeyValuePair <Key, Value> > >(); var sbts = new List <SortedBlockTable>(); var cache = new RazorCache(); for (int j = 0; j < num_tables_to_merge; j++) { var sbt = new SortedBlockTable(cache, "TestData\\LevelMergeReadTest", 0, j); tables.Add(sbt.Enumerate()); sbts.Add(sbt); } int ct = 0; Key key = Key.FromBytes(new byte[] { 0, 0 }); var timer = new Stopwatch(); timer.Start(); foreach (var pair in MergeEnumerator.Merge(tables, p => p.Key)) { Assert.True(key.CompareTo(pair.Key) < 0); key = pair.Key; ct++; } timer.Stop(); sbts.ForEach(s => s.Close()); Console.WriteLine("Scanned through a multilevel merge at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
static void CheckDatabase(string baseDir) { Console.WriteLine("Checking Key Value Store '{0}'", baseDir); RazorCache cache = new RazorCache(); var kv = new KeyValueStore(baseDir, cache); try { kv.ScanCheck(); } finally { kv.Close(); } }
static void RemoveOrphanedTables(string baseDir) { Console.WriteLine("Removing Orphaned Tables '{0}'", baseDir); RazorCache cache = new RazorCache(); var kv = new KeyValueStore(baseDir, cache); try { kv.RemoveOrphanedPages(); } finally { kv.Close(); } }
public void LevelMergeOutputTest() { string path = Path.GetFullPath("TestData\\LevelMergeOutputTest"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { var randKey = Key.Random(40); var randVal = Value.Random(512); mt.Add(randKey, randVal); } mt.WriteToSortedBlockTable("TestData\\LevelMergeOutputTest", 0, i); totalData += mt.Size; } var cache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Manifest mf = new Manifest("TestData\\LevelMergeOutputTest"); SortedBlockTable.MergeTables(cache, mf, 1, new List <PageRef> { new PageRef { Level = 0, Version = 0 }, new PageRef { Level = 0, Version = 1 }, new PageRef { Level = 0, Version = 2 }, new PageRef { Level = 0, Version = 3 } }, ExceptionHandling.ThrowAll, null); timer.Stop(); Console.WriteLine("Wrote a multilevel merge at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void EnumerateFromKeys() { string path = Path.GetFullPath("TestData\\EnumerateFromKeys"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } List <KeyValuePair <Key, Value> > items = new List <KeyValuePair <Key, Value> > (); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair <Key, Value> (k0, v0)); } mt.WriteToSortedBlockTable("TestData\\EnumerateFromKeys", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\EnumerateFromKeys", 10, 10); try { var indexCache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Assert.AreEqual(10000, sbt.EnumerateFromKey(indexCache, new Key(new byte[] { 0 }, 0)).Count()); timer.Stop(); Console.WriteLine("Counted from beginning at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); items = items.OrderBy((a) => a.Key).ToList(); timer.Reset(); timer.Start(); Assert.AreEqual(5000, sbt.EnumerateFromKey(indexCache, items [5000].Key).Count()); timer.Stop(); Console.WriteLine("Counted from halfway at a throughput of {0} MB/s", (double)mt.Size / 2 / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); Assert.AreEqual(0, sbt.EnumerateFromKey(indexCache, new Key(new byte[] { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF }, 0xFF)).Count()); } finally { sbt.Close(); } }
// Initializes a new instance of the KeyValueStore. public KeyValueStore(string baseFileName, RazorCache cache) { if (!Directory.Exists (baseFileName)) Directory.CreateDirectory (baseFileName); _manifest = new Manifest (baseFileName); _manifest.Logger = RazorDBx.C5.Logger.Log; int memTableVersion = _manifest.CurrentVersion (0); CheckForIncompleteJournalRotation (baseFileName, memTableVersion); // Check for a previously aborted journal rotation. _currentJournaledMemTable = new JournaledMemTable (_manifest.BaseFileName, memTableVersion); // Create new journal for this run (and potentially load from disk, if there was data loaded previously). _cache = cache == null ? new RazorCache () : cache; }
public void RandomizedLookups() { string path = Path.GetFullPath("TestData\\RandomizedKeys"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } List <KeyValuePair <Key, Value> > items = new List <KeyValuePair <Key, Value> > (); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair <Key, Value> (k0, v0)); } mt.WriteToSortedBlockTable("TestData\\RandomizedKeys", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\RandomizedKeys", 10, 10); var indexCache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); foreach (var pair in items) { Value value; Assert.IsTrue(SortedBlockTable.Lookup("TestData\\RandomizedKeys", 10, 10, indexCache, pair.Key, out value, ExceptionHandling.ThrowAll, null)); Assert.AreEqual(pair.Value, value); } timer.Stop(); Value randomValue; Assert.IsFalse(SortedBlockTable.Lookup("TestData\\RandomizedKeys", 10, 10, indexCache, Key.Random(40), out randomValue, ExceptionHandling.ThrowAll, null)); Console.WriteLine("Randomized read sbt table at a throughput of {0} MB/s (avg {1} ms per lookup)", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); sbt.Close(); }
public void LevelMergeDuplicateValuesTest() { string path = Path.GetFullPath("TestData\\LevelMergeDuplicateValuesTest"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); foreach (string file in Directory.GetFiles(path)) { File.Delete(file); } int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { int numToStore = j % 100; var key = new Key(new ByteArray(BitConverter.GetBytes(numToStore))); var value = new Value(BitConverter.GetBytes(j)); mt.Add(key, value); } mt.WriteToSortedBlockTable("TestData\\LevelMergeDuplicateValuesTest", 0, i); totalData += mt.Size; } var cache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Manifest mf = new Manifest("TestData\\LevelMergeDuplicateValuesTest"); SortedBlockTable.MergeTables(cache, mf, 1, new List<PageRef>{ new PageRef { Level = 0, Version = 0}, new PageRef { Level = 0, Version = 1}, new PageRef { Level = 0, Version = 2}, new PageRef { Level = 0, Version = 3} }, ExceptionHandling.ThrowAll, null); timer.Stop(); // Open the block table and scan it to check the stored values var sbt = new SortedBlockTable(cache, mf.BaseFileName, 1, 1); try { var pairs = sbt.Enumerate().ToList(); Assert.AreEqual(100, pairs.Count()); Assert.AreEqual(2400, BitConverter.ToInt32(pairs.First().Value.ValueBytes, 0)); } finally { sbt.Close(); } Console.WriteLine("Wrote a multilevel merge with duplicates at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void EnumerateFromKeys() { string path = Path.GetFullPath("TestData\\EnumerateFromKeys"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); List<KeyValuePair<Key, Value>> items = new List<KeyValuePair<Key, Value>>(); int num_items = 10000; var mt = new MemTable(); for (int i = 0; i < num_items; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); items.Add(new KeyValuePair<Key, Value>(k0, v0)); } mt.WriteToSortedBlockTable("TestData\\EnumerateFromKeys", 10, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\EnumerateFromKeys", 10, 10); try { var indexCache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Assert.AreEqual(10000, sbt.EnumerateFromKey(indexCache, new Key(new byte[] { 0 }, 0)).Count()); timer.Stop(); Console.WriteLine("Counted from beginning at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); items = items.OrderBy((a) => a.Key).ToList(); timer.Reset(); timer.Start(); Assert.AreEqual(5000, sbt.EnumerateFromKey(indexCache, items[5000].Key).Count()); timer.Stop(); Console.WriteLine("Counted from halfway at a throughput of {0} MB/s", (double)mt.Size / 2 / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); Assert.AreEqual(0, sbt.EnumerateFromKey(indexCache, new Key(new byte[] { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF }, 0xFF)).Count()); } finally { sbt.Close(); } }
public void TestFileOpenSpeed() { string path = Path.GetFullPath("TestData\\TestFileOpenSpeed"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\TestFileOpenSpeed", 0, 10); var openTables = new List <SortedBlockTable> (); var cache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); for (int j = 0; j < 10000; j++) { var sbt = new SortedBlockTable(cache, "TestData\\TestFileOpenSpeed", 0, 10); openTables.Add(sbt); } timer.Stop(); Console.WriteLine("Open block table {0} ms", timer.Elapsed.TotalMilliseconds / 10000); timer.Reset(); timer.Start(); for (int k = 0; k < 10000; k++) { openTables [k].Close(); } timer.Stop(); Console.WriteLine("Close block table {0} ms", timer.Elapsed.TotalMilliseconds / 10000); }
public void ReadKeys() { string path = Path.GetFullPath("TestData\\ReadKeys"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\ReadKeys", 0, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\ReadKeys", 0, 10); var timer = new Stopwatch(); timer.Start(); Assert.AreEqual(10000, sbt.Enumerate().Count()); timer.Stop(); Console.WriteLine("Counted sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); // Confirm that the items are sorted. Key lastKey = Key.Empty; timer.Reset(); timer.Start(); foreach (var pair in sbt.Enumerate()) { Assert.IsTrue(lastKey.CompareTo(pair.Key) < 0); lastKey = pair.Key; } timer.Stop(); Console.WriteLine("Read & verify sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); sbt.Close(); }
static void CheckBlockTableFiles(string baseDir) { Console.WriteLine("Checking Block Table Files '{0}'", baseDir); RazorCache cache = new RazorCache(); foreach (string file in Directory.GetFiles(baseDir, "*.sbt", SearchOption.TopDirectoryOnly)) { var fileparts = Path.GetFileNameWithoutExtension(file).Split('-'); int level = int.Parse(fileparts[0]); int version = int.Parse(fileparts[1]); Console.WriteLine("Level: {0} Version: {1}", level, version); var tablefile = new SortedBlockTable(cache, baseDir, level, version); try { tablefile.ScanCheck(); } finally { tablefile.Close(); } } }
public void DumpPrefixedSBT() { string path = Path.GetFullPath("TestData\\DumpPrefixedSBT"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\DumpPrefixedSBT", 0, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\DumpPrefixedSBT", 0, 10); foreach (var pair in sbt.EnumerateRaw()) { Console.WriteLine("Key: {0} Value: {1}", pair.Key.ToString(), pair.Value.ToString()); } }
public void WriteAndDumpSBT() { string path = Path.GetFullPath("TestData\\DumpPrefixedSBT"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\DumpPrefixedSBT", 0, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\DumpPrefixedSBT", 0, 10); sbt.DumpContents((msg) => Console.WriteLine(msg)); }
public void WriteAndDumpSBT() { string path = Path.GetFullPath("TestData\\DumpPrefixedSBT"); if (!Directory.Exists(path)) Directory.CreateDirectory(path); var mt = new MemTable(); for (int i = 0; i < 10000; i++) { var k0 = Key.Random(40); var v0 = Value.Random(200); mt.Add(k0, v0); } mt.WriteToSortedBlockTable("TestData\\DumpPrefixedSBT", 0, 10); var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, "TestData\\DumpPrefixedSBT", 0, 10); sbt.DumpContents((msg) => Console.WriteLine(msg)); }
static void RemoveOrphanedTables(string baseDir) { Console.WriteLine("Removing Orphaned Tables '{0}'", baseDir); RazorCache cache = new RazorCache(); var kv = new KeyValueStore(baseDir, cache); kv.Manifest.Logger = (msg) => Console.WriteLine(msg); try { kv.RemoveOrphanedPages(); } finally { kv.Close(); } }
static int FindBlockForKey(string baseFileName, int level, int version, RazorCache indexCache, Key key) { Key[] index = indexCache.GetBlockTableIndex (baseFileName, level, version); int dataBlockNum = Array.BinarySearch (index, key); if (dataBlockNum < 0) dataBlockNum = ~dataBlockNum - 1; return dataBlockNum; }
public void LevelMergeDuplicateValuesTest() { string path = Path.GetFullPath("TestData\\LevelMergeDuplicateValuesTest"); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } foreach (string file in Directory.GetFiles(path)) { File.Delete(file); } int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable(); for (int j = 0; j < items_per_table; j++) { int numToStore = j % 100; var key = new Key(new ByteArray(BitConverter.GetBytes(numToStore))); var value = new Value(BitConverter.GetBytes(j)); mt.Add(key, value); } mt.WriteToSortedBlockTable("TestData\\LevelMergeDuplicateValuesTest", 0, i); totalData += mt.Size; } var cache = new RazorCache(); var timer = new Stopwatch(); timer.Start(); Manifest mf = new Manifest("TestData\\LevelMergeDuplicateValuesTest"); SortedBlockTable.MergeTables(cache, mf, 1, new List <PageRef> { new PageRef { Level = 0, Version = 0 }, new PageRef { Level = 0, Version = 1 }, new PageRef { Level = 0, Version = 2 }, new PageRef { Level = 0, Version = 3 } }, ExceptionHandling.ThrowAll, null); timer.Stop(); // Open the block table and scan it to check the stored values var sbt = new SortedBlockTable(cache, mf.BaseFileName, 1, 1); try { var pairs = sbt.Enumerate().ToList(); Assert.AreEqual(100, pairs.Count()); Assert.AreEqual(2400, BitConverter.ToInt32(pairs.First().Value.ValueBytes, 0)); } finally { sbt.Close(); } Console.WriteLine("Wrote a multilevel merge with duplicates at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public static IEnumerable<KeyValuePair<Key, Value>> EnumerateMergedTablesPreCached(RazorCache cache, string baseFileName, IEnumerable<PageRef> tableSpecs, ExceptionHandling exceptionHandling, Action<string> logger) { var tables = tableSpecs.Select (pageRef => new SortedBlockTable (cache, baseFileName, pageRef.Level, pageRef.Version)).ToList (); try { foreach (var pair in MergeEnumerator.Merge(tables.Select(t => t.Enumerate().ToList().AsEnumerable()), t => t.Key)) yield return pair; } finally { tables.ForEach (t => t.Close ()); } }
public void V1SortedBlockTableFile() { var cache = new RazorCache(); var sbt = new SortedBlockTable(cache, @"..\FormatTestData\V1", 0, 10); Assert.AreEqual(1000, sbt.Enumerate().Count()); // Confirm that the items are sorted. KeyEx lastKey = KeyEx.Empty; foreach (var pair in sbt.Enumerate()) { Assert.IsTrue(lastKey.CompareTo(pair.Key) < 0); lastKey = pair.Key; } sbt.Close(); }
public void LevelMergeOutputTest() { string path = Path.GetFullPath ("TestData\\LevelMergeOutputTest"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable (); for (int j = 0; j < items_per_table; j++) { var randKey = Key.Random (40); var randVal = Value.Random (512); mt.Add (randKey, randVal); } mt.WriteToSortedBlockTable ("TestData\\LevelMergeOutputTest", 0, i); totalData += mt.Size; } var cache = new RazorCache (); var timer = new Stopwatch (); timer.Start (); Manifest mf = new Manifest ("TestData\\LevelMergeOutputTest"); SortedBlockTable.MergeTables (cache, mf, 1, new List<PageRef> { new PageRef { Level = 0, Version = 0}, new PageRef { Level = 0, Version = 1}, new PageRef { Level = 0, Version = 2}, new PageRef { Level = 0, Version = 3} }, ExceptionHandling.ThrowAll, null); timer.Stop (); Console.WriteLine ("Wrote a multilevel merge at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void LevelMergeReadTest2() { string path = Path.GetFullPath ("TestData\\LevelMergeReadTest2"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable (); for (int j = 0; j < items_per_table; j++) { var randKey = Key.Random (40); var randVal = Value.Random (512); mt.Add (randKey, randVal); } mt.WriteToSortedBlockTable ("TestData\\LevelMergeReadTest2", 0, i); totalData += mt.Size; } var cache = new RazorCache (); int ct = 0; Key key = new Key (new ByteArray (new byte[] { 0 })); var timer = new Stopwatch (); timer.Start (); foreach (var pair in SortedBlockTable.EnumerateMergedTablesPreCached(cache, "TestData\\LevelMergeReadTest2", new List<PageRef>{ new PageRef { Level = 0, Version = 0}, new PageRef { Level = 0, Version = 1}, new PageRef { Level = 0, Version = 2}, new PageRef { Level = 0, Version = 3} }, ExceptionHandling.ThrowAll, null)) { Assert.True (key.CompareTo (pair.Key) < 0); key = pair.Key; ct++; } timer.Stop (); Console.WriteLine ("Scanned through a multilevel merge at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public static bool Lookup(string baseFileName, int level, int version, RazorCache cache, Key key, out Value value, ExceptionHandling exceptionHandling, Action<string> logger) { SortedBlockTable sbt = new SortedBlockTable (cache, baseFileName, level, version); try { int dataBlockNum = FindBlockForKey (baseFileName, level, version, cache, key); if (dataBlockNum >= 0 && dataBlockNum < sbt._dataBlocks) { byte[] block = sbt.ReadBlock (LocalThreadAllocatedBlock (), dataBlockNum); return SearchBlockForKey (block, key, out value); } } finally { sbt.Close (); } value = Value.Empty; return false; }
public IEnumerable<KeyValuePair<Key, Value>> EnumerateFromKey(RazorCache indexCache, Key key) { if (!FileExists) yield break; int startingBlock; if (key.Length == 0) startingBlock = 0; else { startingBlock = FindBlockForKey (_baseFileName, _level, _version, indexCache, key); if (startingBlock < 0) startingBlock = 0; } if (startingBlock < _dataBlocks) { byte[] allocBlockA = new byte[Config.SortedBlockSize]; byte[] allocBlockB = new byte[Config.SortedBlockSize]; byte[] currentBlock = allocBlockA; var asyncResult = BeginReadBlock (currentBlock, startingBlock); try { for (int i = startingBlock; i < _dataBlocks; i++) { // wait on last block read to complete so we can start processing the data byte[] block = EndReadBlock (asyncResult); asyncResult = null; // Go ahead and kick off the next block read asynchronously while we parse the last one if (i < _dataBlocks) { SwapBlocks (allocBlockA, allocBlockB, ref currentBlock); // swap the blocks so we can issue another disk i/o asyncResult = BeginReadBlock (currentBlock, i + 1); } int offset = 2; // reset offset, start after tree root pointer // On the first block, we need to seek to the key first (if we don't have an empty key) if (i == startingBlock && key.Length != 0) { while (offset >= 0) { var pair = ReadPair (block, ref offset); if (pair.Key.CompareTo (key) >= 0) { yield return pair; break; } } } while (offset >= 0) yield return ReadPair (block, ref offset); // loop through the rest of the block } } finally { if (asyncResult != null) EndReadBlock (asyncResult); } } }
public void TestFileOpenSpeed() { string path = Path.GetFullPath ("TestData\\TestFileOpenSpeed"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); var mt = new MemTable (); for (int i = 0; i < 10000; i++) { var k0 = Key.Random (40); var v0 = Value.Random (200); mt.Add (k0, v0); } mt.WriteToSortedBlockTable ("TestData\\TestFileOpenSpeed", 0, 10); var openTables = new List<SortedBlockTable> (); var cache = new RazorCache (); var timer = new Stopwatch (); timer.Start (); for (int j = 0; j < 10000; j++) { var sbt = new SortedBlockTable (cache, "TestData\\TestFileOpenSpeed", 0, 10); openTables.Add (sbt); } timer.Stop (); Console.WriteLine ("Open block table {0} ms", timer.Elapsed.TotalMilliseconds / 10000); timer.Reset (); timer.Start (); for (int k = 0; k < 10000; k++) { openTables [k].Close (); } timer.Stop (); Console.WriteLine ("Close block table {0} ms", timer.Elapsed.TotalMilliseconds / 10000); }
public void ReadKeys() { string path = Path.GetFullPath ("TestData\\ReadKeys"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); var mt = new MemTable (); for (int i = 0; i < 10000; i++) { var k0 = Key.Random (40); var v0 = Value.Random (200); mt.Add (k0, v0); } mt.WriteToSortedBlockTable ("TestData\\ReadKeys", 0, 10); var cache = new RazorCache (); var sbt = new SortedBlockTable (cache, "TestData\\ReadKeys", 0, 10); var timer = new Stopwatch (); timer.Start (); Assert.AreEqual (10000, sbt.Enumerate ().Count ()); timer.Stop (); Console.WriteLine ("Counted sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); // Confirm that the items are sorted. Key lastKey = Key.Empty; timer.Reset (); timer.Start (); foreach (var pair in sbt.Enumerate()) { Assert.IsTrue (lastKey.CompareTo (pair.Key) < 0); lastKey = pair.Key; } timer.Stop (); Console.WriteLine ("Read & verify sorted table at a throughput of {0} MB/s", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); sbt.Close (); }
public void RandomizedThreadedLookups() { string path = Path.GetFullPath ("TestData\\RandomizedThreadedLookups"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); List<KeyValuePair<Key, Value>> items = new List<KeyValuePair<Key, Value>> (); int num_items = 10000; var mt = new MemTable (); for (int i = 0; i < num_items; i++) { var k0 = Key.Random (40); var v0 = Value.Random (200); mt.Add (k0, v0); items.Add (new KeyValuePair<Key, Value> (k0, v0)); } mt.WriteToSortedBlockTable ("TestData\\RandomizedThreadedLookups", 10, 10); var cache = new RazorCache (); var sbt = new SortedBlockTable (cache, "TestData\\RandomizedThreadedLookups", 10, 10); var indexCache = new RazorCache (); List<Thread> threads = new List<Thread> (); for (int t = 0; t < 10; t++) { threads.Add (new Thread ((num) => { for (int k = 0; k < num_items / 10; k++) { var pair = items [k * (int)num]; Value value; Assert.IsTrue (SortedBlockTable.Lookup ("TestData\\RandomizedThreadedLookups", 10, 10, indexCache, pair.Key, out value, ExceptionHandling.ThrowAll, null)); Assert.AreEqual (pair.Value, value); } })); } var timer = new Stopwatch (); timer.Start (); int threadNum = 0; threads.ForEach ((t) => t.Start (threadNum++)); threads.ForEach ((t) => t.Join ()); timer.Stop (); Console.WriteLine ("Randomized (threaded) read sbt table at a throughput of {0} MB/s (avg {1} ms per lookup)", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); sbt.Close (); }
public static IEnumerable<PageRecord> MergeTables(RazorCache cache, Manifest mf, int destinationLevel, IEnumerable<PageRef> tableSpecs, ExceptionHandling exceptionHandling, Action<string> logger) { var orderedTableSpecs = tableSpecs.OrderByPagePriority (); var outputTables = new List<PageRecord> (); SortedBlockTableWriter writer = null; Key firstKey = new Key (); Key lastKey = new Key (); Key maxKey = new Key (); // Maximum key we can span with this table to avoid covering more than 10 pages in the destination Action<KeyValuePair<Key, Value>> OpenPage = (pair) => { writer = new SortedBlockTableWriter (mf.BaseFileName, destinationLevel, mf.NextVersion (destinationLevel)); firstKey = pair.Key; using (var m = mf.GetLatestManifest()) maxKey = m.FindSpanningLimit (destinationLevel + 1, firstKey); }; Action ClosePage = () => { writer.Close (); outputTables.Add (new PageRecord (destinationLevel, writer.Version, firstKey, lastKey)); writer = null; }; foreach (var pair in EnumerateMergedTablesPreCached(cache, mf.BaseFileName, orderedTableSpecs, exceptionHandling, logger)) { if (writer == null) OpenPage (pair); if (writer.WrittenSize >= Config.MaxSortedBlockTableSize || (!maxKey.IsEmpty && pair.Key.CompareTo (maxKey) >= 0)) ClosePage (); writer.WritePair (pair.Key, pair.Value); lastKey = pair.Key; } if (writer != null) ClosePage (); return outputTables; }
public void LevelMergeReadTest() { string path = Path.GetFullPath ("TestData\\LevelMergeReadTest"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); int num_tables_to_merge = 4; int items_per_table = 2500; int totalData = 0; for (int i = 0; i < num_tables_to_merge; i++) { var mt = new MemTable (); for (int j = 0; j < items_per_table; j++) { var randKey = Key.Random (40); var randVal = Value.Random (512); mt.Add (randKey, randVal); } mt.WriteToSortedBlockTable ("TestData\\LevelMergeReadTest", 0, i); totalData += mt.Size; } var tables = new List<IEnumerable<KeyValuePair<Key, Value>>> (); var sbts = new List<SortedBlockTable> (); var cache = new RazorCache (); for (int j = 0; j < num_tables_to_merge; j++) { var sbt = new SortedBlockTable (cache, "TestData\\LevelMergeReadTest", 0, j); tables.Add (sbt.Enumerate ()); sbts.Add (sbt); } int ct = 0; Key key = Key.FromBytes (new byte[] { 0, 0 }); var timer = new Stopwatch (); timer.Start (); foreach (var pair in MergeEnumerator.Merge(tables, p => p.Key)) { Assert.True (key.CompareTo (pair.Key) < 0); key = pair.Key; ct++; } timer.Stop (); sbts.ForEach (s => s.Close ()); Console.WriteLine ("Scanned through a multilevel merge at a throughput of {0} MB/s", (double)totalData / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); }
public void Truncate() { _currentJournaledMemTable.Close(); TableManager.Default.Close(this); foreach (var pair in _secondaryIndexes) { pair.Value.Close(FastClose); } string basePath = Path.GetFullPath(Manifest.BaseFileName); foreach (string file in Directory.GetFiles(basePath, "*.*", SearchOption.AllDirectories)) { Helper.DeleteFile(file, false, (msg) => { Manifest.LogMessage(msg); }); } foreach (string dir in Directory.GetDirectories(basePath, "*.*", SearchOption.AllDirectories)) { Helper.DeleteFolder(dir, false, (msg) => { Manifest.LogMessage(msg); }); } _manifest = new Manifest(basePath); _currentJournaledMemTable = new JournaledMemTable(_manifest.BaseFileName, _manifest.CurrentVersion(0)); _cache = new RazorCache(); _secondaryIndexes = new Dictionary<string, KeyValueStore>(StringComparer.OrdinalIgnoreCase); Manifest.LogMessage("Database Truncated."); }
public void RandomizedLookups() { string path = Path.GetFullPath ("TestData\\RandomizedKeys"); if (!Directory.Exists (path)) Directory.CreateDirectory (path); List<KeyValuePair<Key, Value>> items = new List<KeyValuePair<Key, Value>> (); int num_items = 10000; var mt = new MemTable (); for (int i = 0; i < num_items; i++) { var k0 = Key.Random (40); var v0 = Value.Random (200); mt.Add (k0, v0); items.Add (new KeyValuePair<Key, Value> (k0, v0)); } mt.WriteToSortedBlockTable ("TestData\\RandomizedKeys", 10, 10); var cache = new RazorCache (); var sbt = new SortedBlockTable (cache, "TestData\\RandomizedKeys", 10, 10); var indexCache = new RazorCache (); var timer = new Stopwatch (); timer.Start (); foreach (var pair in items) { Value value; Assert.IsTrue (SortedBlockTable.Lookup ("TestData\\RandomizedKeys", 10, 10, indexCache, pair.Key, out value, ExceptionHandling.ThrowAll, null)); Assert.AreEqual (pair.Value, value); } timer.Stop (); Value randomValue; Assert.IsFalse (SortedBlockTable.Lookup ("TestData\\RandomizedKeys", 10, 10, indexCache, Key.Random (40), out randomValue, ExceptionHandling.ThrowAll, null)); Console.WriteLine ("Randomized read sbt table at a throughput of {0} MB/s (avg {1} ms per lookup)", (double)mt.Size / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); sbt.Close (); }