public void CrashTestOnMerge() { string path = Path.GetFullPath ("TestData\\CrashTestOnMerge"); using (var db = new KeyValueStore(path)) db.Truncate (); var doneSetting = new EventWaitHandle (false, EventResetMode.ManualReset, "CrashTestOnMerge"); doneSetting.Reset (); string testPath = Path.Combine (Path.GetDirectoryName (Assembly.GetExecutingAssembly ().GetName ().CodeBase), "RazorTest.exe"); var process = Process.Start (testPath, "CrashTestOnMerge"); doneSetting.WaitOne (30000); process.Kill (); process.WaitForExit (); // Open the database created by the other program using (var db = new KeyValueStore(path)) { db.Manifest.Logger = (msg) => Console.WriteLine (msg); Console.WriteLine ("Begin enumeration."); ByteArray lastKey = new ByteArray (); int ct = 0; foreach (var pair in db.Enumerate()) { ByteArray k = new ByteArray (pair.Key); Assert.True (lastKey.CompareTo (k) < 0); lastKey = k; ct++; } Assert.AreEqual (50000, ct); Console.WriteLine ("Found {0} items in the crashed database.", ct); } }
public static void RunTableMergePass(KeyValueStore kvStore) { try { Interlocked.Increment (ref kvStore.mergeCount); lock (kvStore.mergeLock) { RazorCache cache = kvStore.Cache; Manifest manifest = kvStore.Manifest; while (true) { bool mergedDuringLastPass = false; using (var manifestInst = kvStore.Manifest.GetLatestManifest()) { if (manifestInst.GetNumPagesAtLevel (0) >= Config.MaxPagesOnLevel (0)) { // Handle level 0 (merge all pages) mergedDuringLastPass = true; int Level0PagesToTake = Config.MaxPagesOnLevel (0) * 2; // Grab more pages if they are available (this happens during heavy write pressure) var inputPageRecords = manifestInst.GetPagesAtLevel (0).OrderBy (p => p.Version).Take (Level0PagesToTake).ToList (); var startKey = inputPageRecords.Min (p => p.FirstKey); var endKey = inputPageRecords.Max (p => p.LastKey); var mergePages = manifestInst.FindPagesForKeyRange (1, startKey, endKey).AsPageRefs ().ToList (); var allInputPages = inputPageRecords.AsPageRefs ().Concat (mergePages).ToList (); var outputPages = SortedBlockTable.MergeTables (cache, manifest, 1, allInputPages, ExceptionHandling.ThrowAll, null).ToList (); manifest.ModifyPages (outputPages, allInputPages); manifest.LogMessage ("Merge Level 0 => InputPages: {0} OutputPages:{1}", string.Join (",", allInputPages.Select (p => string.Format ("{0}-{1}", p.Level, p.Version)).ToArray ()), string.Join (",", outputPages.Select (p => string.Format ("{0}-{1}", p.Level, p.Version)).ToArray ()) ); } for (int level = 1; level < manifestInst.NumLevels - 1; level++) { // handle the rest of the levels (merge only one page upwards) if (manifestInst.GetNumPagesAtLevel (level) >= Config.MaxPagesOnLevel (level)) { mergedDuringLastPass = true; var inputPage = manifest.NextMergePage (level); var mergePages = manifestInst.FindPagesForKeyRange (level + 1, inputPage.FirstKey, inputPage.LastKey).ToList (); var inputPageRecords = mergePages.Concat (new PageRecord[] { inputPage }); var allInputPages = inputPageRecords.AsPageRefs ().ToList (); var outputPages = SortedBlockTable.MergeTables (cache, manifest, level + 1, allInputPages, ExceptionHandling.ThrowAll, null); // Notify if a merge happened, implemented for testing primarily if (kvStore.MergeCallback != null) kvStore.MergeCallback (level, inputPageRecords, outputPages); manifest.ModifyPages (outputPages, allInputPages); manifest.LogMessage ("Merge Level >0 => InputPages: {0} OutputPages:{1}", string.Join (",", allInputPages.Select (p => string.Format ("{0}-{1}", p.Level, p.Version)).ToArray ()), string.Join (",", outputPages.Select (p => string.Format ("{0}-{1}", p.Level, p.Version)).ToArray ()) ); } } } if (!mergedDuringLastPass) return; // No more merging is needed, we are finished with this pass } } } finally { Interlocked.Decrement (ref kvStore.mergeCount); } }
public void LargeDataSetGetTest() { string path = Path.GetFullPath ("TestData\\LargeDataSetGetTest"); int totalSize = 0; int num_items = 500; var timer = new Stopwatch (); using (var db = new KeyValueStore(path)) { db.Truncate (); // Generate a data value that is larger than the block size. var value = ByteArray.Random (Config.SortedBlockSize + 256); // Do it enough times to ensure a roll-over for (int i = 0; i < num_items; i++) { var key = BitConverter.GetBytes (i); db.Set (key, value.InternalBytes); totalSize += value.InternalBytes.Length; } timer.Start (); for (int i = 0; i < num_items; i++) { var key = BitConverter.GetBytes (i); Assert.AreEqual (value.InternalBytes, db.Get (key)); } timer.Stop (); Console.WriteLine ("Randomized read throughput of {0} MB/s (avg {1} ms per lookup)", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); } }
static void CheckDatabase(string baseDir) { Console.WriteLine ("Checking Key Value Store '{0}'", baseDir); RazorCache cache = new RazorCache (); var kv = new KeyValueStore (baseDir, cache); try { kv.ScanCheck (); } finally { kv.Close (); } }
public void AddObjectsAndLookup() { string path = Path.GetFullPath ("TestData\\AddObjectsAndLookup"); using (var db = new KeyValueStore(path)) { db.Truncate (); var indexed = new SortedDictionary<string, byte[]> (); indexed ["NumberType"] = Encoding.UTF8.GetBytes ("Fib"); db.Set (BitConverter.GetBytes (112), Encoding.UTF8.GetBytes ("112"), indexed); db.Set (BitConverter.GetBytes (1123), Encoding.UTF8.GetBytes ("1123"), indexed); db.Set (BitConverter.GetBytes (11235), Encoding.UTF8.GetBytes ("11235"), indexed); db.Set (BitConverter.GetBytes (112358), Encoding.UTF8.GetBytes ("112358"), indexed); indexed ["NumberType"] = Encoding.UTF8.GetBytes ("Seq"); db.Set (BitConverter.GetBytes (1), Encoding.UTF8.GetBytes ("1"), indexed); db.Set (BitConverter.GetBytes (2), Encoding.UTF8.GetBytes ("2"), indexed); db.Set (BitConverter.GetBytes (3), Encoding.UTF8.GetBytes ("3"), indexed); db.Set (BitConverter.GetBytes (4), Encoding.UTF8.GetBytes ("4"), indexed); indexed ["NumberType"] = Encoding.UTF8.GetBytes ("Zero"); db.Set (BitConverter.GetBytes (0), Encoding.UTF8.GetBytes ("0"), indexed); } using (var db = new KeyValueStore(path)) { var zeros = db.Find ("NumberType", Encoding.UTF8.GetBytes ("Zero")).ToList (); Assert.AreEqual (1, zeros.Count ()); Assert.AreEqual ("0", Encoding.UTF8.GetString (zeros [0].Value)); var seqs = db.Find ("NumberType", Encoding.UTF8.GetBytes ("Seq")).ToList (); Assert.AreEqual (4, seqs.Count ()); Assert.AreEqual ("1", Encoding.UTF8.GetString (seqs [0].Value)); Assert.AreEqual ("2", Encoding.UTF8.GetString (seqs [1].Value)); Assert.AreEqual ("3", Encoding.UTF8.GetString (seqs [2].Value)); Assert.AreEqual ("4", Encoding.UTF8.GetString (seqs [3].Value)); var fib = db.Find ("NumberType", Encoding.UTF8.GetBytes ("Fib")).ToList (); Assert.AreEqual (4, seqs.Count ()); Assert.AreEqual ("1123", Encoding.UTF8.GetString (fib [0].Value)); Assert.AreEqual ("112", Encoding.UTF8.GetString (fib [1].Value)); Assert.AreEqual ("11235", Encoding.UTF8.GetString (fib [2].Value)); Assert.AreEqual ("112358", Encoding.UTF8.GetString (fib [3].Value)); var non = db.Find ("NoIndex", new byte[] { 23 }).ToList (); Assert.AreEqual (0, non.Count ()); non = db.Find ("NumberType", Encoding.UTF8.GetBytes ("Unfound")).ToList (); Assert.AreEqual (0, non.Count ()); } }
KeyValueStore GetSecondaryIndex(string IndexName) { KeyValueStore indexStore = null; lock (_secondaryIndexes) { if (!_secondaryIndexes.TryGetValue(IndexName, out indexStore)) { indexStore = new KeyValueStore(Config.IndexBaseName(Manifest.BaseFileName, IndexName), _cache); if (Manifest.Logger != null) { indexStore.Manifest.Logger = msg => Manifest.Logger(string.Format("{0}: {1}", IndexName, msg)); } _secondaryIndexes.Add(IndexName, indexStore); } } return(indexStore); }
public void MarkKeyValueStoreAsModified(KeyValueStore kvStore) { if (kvStore.mergeCount == 0) // Only schedule a merge run if no merging is happening // determine if we've reached the next time threshold for another update { long ticks = Stopwatch.GetTimestamp(); long ticksTillNext = kvStore.ticksTillNextMerge; if (ticks > ticksTillNext) { // Schedule a tablemerge run on the threadpool ThreadPool.QueueUserWorkItem((o) => { RunTableMergePass(kvStore); }); } kvStore.ticksTillNextMerge = ticks + pauseTime; } }
public void RemoveIndexRangeForValue(string indexName, byte[] startAt, byte[] value) { KeyValueStore indexStore = GetSecondaryIndex(indexName); var pairs = indexStore.EnumerateFromKey(startAt); foreach (var pair in pairs) { if (ByteArray.CompareMemCmp(pair.Value, value) == 0) { indexStore.Delete(pair.Key); } if (ByteArray.CompareMemCmp(startAt, 0, pair.Key, 0, startAt.Length) == 0) { continue; // Exit index removal only when the projectID in the index key changes. } break; } }
public void MarkKeyValueStoreAsModified(KeyValueStore kvStore) { if (kvStore.mergeCount == 0) { // Only schedule a merge run if no merging is happening // determine if we've reached the next time threshold for another update long ticks = Stopwatch.GetTimestamp (); long ticksTillNext = kvStore.ticksTillNextMerge; if (ticks > ticksTillNext) { // Schedule a tablemerge run on the threadpool ThreadPool.QueueUserWorkItem ((o) => { RunTableMergePass (kvStore); }); } kvStore.ticksTillNextMerge = ticks + pauseTime; } }
public int CountIndex(string indexName) { KeyValueStore indexStore = GetSecondaryIndex(indexName); return(indexStore.Enumerate().Count()); }
public void IndexClean() { string path = Path.GetFullPath ("TestData\\IndexClean"); using (var db = new KeyValueStore(path)) { db.Truncate (); db.Manifest.Logger = msg => Console.WriteLine (msg); db.Set (Encoding.UTF8.GetBytes ("KeyA"), Encoding.UTF8.GetBytes ("ValueA:1"), new Dictionary<string, byte[]> { { "Idx", Encoding.UTF8.GetBytes("1") } }); db.Set (Encoding.UTF8.GetBytes ("KeyB"), Encoding.UTF8.GetBytes ("ValueB:2"), new Dictionary<string, byte[]> { { "Idx", Encoding.UTF8.GetBytes("2") } }); db.Set (Encoding.UTF8.GetBytes ("KeyC"), Encoding.UTF8.GetBytes ("ValueC:3"), new Dictionary<string, byte[]> { { "Idx", Encoding.UTF8.GetBytes("3") } }); var lookupValue = db.Find ("Idx", Encoding.UTF8.GetBytes ("3")).Single (); Assert.AreEqual ("ValueC:3", Encoding.UTF8.GetString (lookupValue.Value)); Assert.AreEqual ("KeyC", Encoding.UTF8.GetString (lookupValue.Key)); db.Delete (Encoding.UTF8.GetBytes ("KeyC")); } // Open the index directly and confirm that the lookup key is still there using (var db = new KeyValueStore(Path.Combine(path, "Idx"))) Assert.AreEqual (3, db.Enumerate ().Count ()); using (var db = new KeyValueStore(path)) db.CleanIndex ("Idx"); // Open the index directly and confirm that the lookup key is now gone using (var db = new KeyValueStore(Path.Combine(path, "Idx"))) Assert.AreEqual (2, db.Enumerate ().Count ()); }
public void Close(KeyValueStore kvStore) { RunTableMergePass (kvStore); }
public void TestLargeAndSmallOddWrites() { string path = Path.GetFullPath ("TestData\\TestLargeAndSmallInterlacedWrites"); using (var db = new KeyValueStore(path)) { db.Truncate (); // Create a random set of keybytes List<byte[]> keys = new List<byte[]> (); for (int i = 0; i < 10; i++) keys.Add (Key.Random (10).KeyBytes); // Set Odds to large for (int i = 0; i < keys.Count; i++) { var k = keys [i]; var v = ((i & 1) == 1) ? GenerateBlock (Config.MaxLargeValueSize - 100) : GenerateBlock (10); db.Set (k, v); } // Now check the results for (int i = 0; i < keys.Count; i++) { var k = keys [i]; var v = db.Get (k); CheckBlock (v); if ((i & 1) == 0) Assert.Less (v.Length, 100, " i = {0} should be small, but size={1}", i, v.Length); else Assert.Greater (v.Length, 100, " i = {0} should be large, but size={1}", i, v.Length); } } }
public void TestTooLargeData() { string path = Path.GetFullPath ("TestData\\TestTooLargeData"); using (var db = new KeyValueStore(path)) db.Set (Key.Random (10).KeyBytes, ByteArray.Random (Config.MaxLargeValueSize).InternalBytes); }
static void RemoveOrphanedTables(string baseDir) { Console.WriteLine ("Removing Orphaned Tables '{0}'", baseDir); RazorCache cache = new RazorCache (); var kv = new KeyValueStore (baseDir, cache); try { kv.RemoveOrphanedPages (); } finally { kv.Close (); } }
public void LookupOldDataFromIndex() { string path = Path.GetFullPath ("TestData\\LookupOldDataFromIndex"); using (var db = new KeyValueStore(path)) { db.Truncate (); db.Manifest.Logger = msg => Console.WriteLine (msg); db.Set (Encoding.UTF8.GetBytes ("KeyA"), Encoding.UTF8.GetBytes ("ValueA:1"), new Dictionary<string, byte[]> { { "Idx", Encoding.UTF8.GetBytes("1") } }); db.Set (Encoding.UTF8.GetBytes ("KeyB"), Encoding.UTF8.GetBytes ("ValueB:2"), new Dictionary<string, byte[]> { { "Idx", Encoding.UTF8.GetBytes("2") } }); db.Set (Encoding.UTF8.GetBytes ("KeyC"), Encoding.UTF8.GetBytes ("ValueC:3"), new Dictionary<string, byte[]> { { "Idx", Encoding.UTF8.GetBytes("3") } }); var lookupValue = db.Find ("Idx", Encoding.UTF8.GetBytes ("3")).Single (); Assert.AreEqual ("ValueC:3", Encoding.UTF8.GetString (lookupValue.Value)); Assert.AreEqual ("KeyC", Encoding.UTF8.GetString (lookupValue.Key)); db.Set (Encoding.UTF8.GetBytes ("KeyC"), Encoding.UTF8.GetBytes ("ValueC:4"), new Dictionary<string, byte[]> { { "Idx", Encoding.UTF8.GetBytes("4") } }); lookupValue = db.Find ("Idx", Encoding.UTF8.GetBytes ("4")).Single (); Assert.AreEqual ("ValueC:4", Encoding.UTF8.GetString (lookupValue.Value)); Assert.AreEqual ("KeyC", Encoding.UTF8.GetString (lookupValue.Key)); Assert.True (db.Find ("Idx", Encoding.UTF8.GetBytes ("3")).Any ()); db.RemoveFromIndex (Encoding.UTF8.GetBytes ("KeyC"), new Dictionary<string, byte[]> { { "Idx", Encoding.UTF8.GetBytes("3") } }); Assert.False (db.Find ("Idx", Encoding.UTF8.GetBytes ("3")).Any ()); } }
public void Close(KeyValueStore kvStore) { RunTableMergePass(kvStore); }
public static void RunTableMergePass(KeyValueStore kvStore) { try { Interlocked.Increment(ref kvStore.mergeCount); lock (kvStore.mergeLock) { RazorCache cache = kvStore.Cache; Manifest manifest = kvStore.Manifest; while (true) { bool mergedDuringLastPass = false; using (var manifestInst = kvStore.Manifest.GetLatestManifest()) { if (manifestInst.GetNumPagesAtLevel(0) >= Config.MaxPagesOnLevel(0)) // Handle level 0 (merge all pages) { mergedDuringLastPass = true; int Level0PagesToTake = Config.MaxPagesOnLevel(0) * 2; // Grab more pages if they are available (this happens during heavy write pressure) var inputPageRecords = manifestInst.GetPagesAtLevel(0).OrderBy(p => p.Version).Take(Level0PagesToTake).ToList(); var startKey = inputPageRecords.Min(p => p.FirstKey); var endKey = inputPageRecords.Max(p => p.LastKey); var mergePages = manifestInst.FindPagesForKeyRange(1, startKey, endKey).AsPageRefs().ToList(); var allInputPages = inputPageRecords.AsPageRefs().Concat(mergePages).ToList(); var outputPages = SortedBlockTable.MergeTables(cache, manifest, 1, allInputPages, ExceptionHandling.ThrowAll, null).ToList(); manifest.ModifyPages(outputPages, allInputPages); manifest.LogMessage("Merge Level 0 => InputPages: {0} OutputPages:{1}", string.Join(",", allInputPages.Select(p => string.Format("{0}-{1}", p.Level, p.Version)).ToArray()), string.Join(",", outputPages.Select(p => string.Format("{0}-{1}", p.Level, p.Version)).ToArray()) ); } for (int level = 1; level < manifestInst.NumLevels - 1; level++) // handle the rest of the levels (merge only one page upwards) { if (manifestInst.GetNumPagesAtLevel(level) >= Config.MaxPagesOnLevel(level)) { mergedDuringLastPass = true; var inputPage = manifest.NextMergePage(level); var mergePages = manifestInst.FindPagesForKeyRange(level + 1, inputPage.FirstKey, inputPage.LastKey).ToList(); var inputPageRecords = mergePages.Concat(new PageRecord[] { inputPage }); var allInputPages = inputPageRecords.AsPageRefs().ToList(); var outputPages = SortedBlockTable.MergeTables(cache, manifest, level + 1, allInputPages, ExceptionHandling.ThrowAll, null); // Notify if a merge happened, implemented for testing primarily if (kvStore.MergeCallback != null) { kvStore.MergeCallback(level, inputPageRecords, outputPages); } manifest.ModifyPages(outputPages, allInputPages); manifest.LogMessage("Merge Level >0 => InputPages: {0} OutputPages:{1}", string.Join(",", allInputPages.Select(p => string.Format("{0}-{1}", p.Level, p.Version)).ToArray()), string.Join(",", outputPages.Select(p => string.Format("{0}-{1}", p.Level, p.Version)).ToArray()) ); } } } if (!mergedDuringLastPass) { return; // No more merging is needed, we are finished with this pass } } } } finally { Interlocked.Decrement(ref kvStore.mergeCount); } }
public void FindStartsWith() { string path = Path.GetFullPath ("TestData\\FindStartsWith"); using (var db = new KeyValueStore(path)) { db.Truncate (); var indexed = new SortedDictionary<string, byte[]> (); indexed ["Bytes"] = Encoding.UTF8.GetBytes ("112"); db.Set (BitConverter.GetBytes (112), Encoding.UTF8.GetBytes ("112"), indexed); indexed ["Bytes"] = Encoding.UTF8.GetBytes ("1123"); db.Set (BitConverter.GetBytes (1123), Encoding.UTF8.GetBytes ("1123"), indexed); indexed ["Bytes"] = Encoding.UTF8.GetBytes ("11235"); db.Set (BitConverter.GetBytes (11235), Encoding.UTF8.GetBytes ("11235"), indexed); indexed ["Bytes"] = Encoding.UTF8.GetBytes ("112358"); db.Set (BitConverter.GetBytes (112358), Encoding.UTF8.GetBytes ("112358"), indexed); } using (var db = new KeyValueStore(path)) { var exact = db.Find ("Bytes", Encoding.UTF8.GetBytes ("1123")).ToList (); Assert.AreEqual (1, exact.Count ()); Assert.AreEqual ("1123", Encoding.UTF8.GetString (exact [0].Value)); var startsWith = db.FindStartsWith ("Bytes", Encoding.UTF8.GetBytes ("1123")).ToList (); Assert.AreEqual (3, startsWith.Count ()); Assert.AreEqual ("112358", Encoding.UTF8.GetString (startsWith [0].Value)); Assert.AreEqual ("11235", Encoding.UTF8.GetString (startsWith [1].Value)); Assert.AreEqual ("1123", Encoding.UTF8.GetString (startsWith [2].Value)); } }
public void RemoveUpdatedValuesFromIndex2() { string path = Path.GetFullPath ("TestData\\RemoveUpdatedValuesFromIndex2"); var timer = new Stopwatch (); using (var db = new KeyValueStore(path)) { db.Truncate (); int totalSize = 0; db.Manifest.Logger = msg => Console.WriteLine (msg); var indexed = new SortedDictionary<string, byte[]> (); int num_items = 1000; timer.Start (); for (int i = 0; i < num_items; i++) { indexed ["Mod"] = BitConverter.GetBytes (i % 100); db.Set (BitConverter.GetBytes (i), BitConverter.GetBytes (i), indexed); totalSize += 8 + 4; } timer.Stop (); Console.WriteLine ("Wrote data (with indexing) at a throughput of {0} MB/s", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); timer.Reset (); timer.Start (); var ctModZeros = db.Find ("Mod", BitConverter.GetBytes ((int)0)).Count (); timer.Stop (); Assert.AreEqual (10, ctModZeros); Console.WriteLine ("Scanned index at a throughput of {0} items/s", (double)ctModZeros / timer.Elapsed.TotalSeconds); } // Open the index directly and see if the data is there using (var db = new KeyValueStore(Path.Combine(path, "Mod"))) { int num_vals = db.EnumerateFromKey (BitConverter.GetBytes ((int)0)).Count (pair => pair.Key.Take (4).All (b => b == 0)); Assert.AreEqual (10, num_vals); } // Re-open the main key-value store and update the value at 30 using (var db = new KeyValueStore(path)) { var indexed = new SortedDictionary<string, byte[]> (); indexed ["Mod"] = BitConverter.GetBytes (201 % 100); db.Set (BitConverter.GetBytes (200), BitConverter.GetBytes (200), indexed); // Clean the data from the index db.RemoveFromIndex (BitConverter.GetBytes (200), new Dictionary<string, byte[]> { { "Mod", BitConverter.GetBytes(200 % 100) } }); } // Open the index again directly and confirm that the lookup key is gone now as well using (var db = new KeyValueStore(Path.Combine(path, "Mod"))) { int num_vals = db.EnumerateFromKey (BitConverter.GetBytes ((int)0)).Count (pair => pair.Key.Take (4).All (b => b == 0)); Assert.AreEqual (9, num_vals); } }
KeyValueStore GetSecondaryIndex(string IndexName) { KeyValueStore indexStore = null; lock (_secondaryIndexes) { if (!_secondaryIndexes.TryGetValue (IndexName, out indexStore)) { indexStore = new KeyValueStore (Config.IndexBaseName (Manifest.BaseFileName, IndexName), _cache); if (Manifest.Logger != null) indexStore.Manifest.Logger = msg => Manifest.Logger (string.Format ("{0}: {1}", IndexName, msg)); _secondaryIndexes.Add (IndexName, indexStore); } } return indexStore; }
public void AddObjectsAndLookupWhileMerging() { string path = Path.GetFullPath ("TestData\\AddObjectsAndLookupWhileMerging"); var timer = new Stopwatch (); using (var db = new KeyValueStore(path)) { db.Truncate (); int totalSize = 0; db.Manifest.Logger = msg => Console.WriteLine (msg); var indexed = new SortedDictionary<string, byte[]> (); int num_items = 1000000; timer.Start (); for (int i = 0; i < num_items; i++) { indexed ["Mod"] = BitConverter.GetBytes (i % 100); db.Set (BitConverter.GetBytes (i), BitConverter.GetBytes (i * 1000), indexed); totalSize += 8 + 4; } timer.Stop (); Console.WriteLine ("Wrote data (with indexing) at a throughput of {0} MB/s", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); timer.Reset (); timer.Start (); var ctModZeros = db.Find ("Mod", BitConverter.GetBytes ((int)0)).Count (); timer.Stop (); Assert.AreEqual (10000, ctModZeros); Console.WriteLine ("Scanned index at a throughput of {0} items/s", (double)ctModZeros / timer.Elapsed.TotalSeconds); } }
public void TruncateTest() { string path = Path.GetFullPath ("TestData\\TruncateTest"); using (var db = new KeyValueStore(path)) { var indexed = new SortedDictionary<string, byte[]> (); for (int i = 0; i < 15000; i++) { indexed ["RandomIndex"] = ByteArray.Random (20).InternalBytes; var randKey = ByteArray.Random (40); var randValue = ByteArray.Random (256); db.Set (randKey.InternalBytes, randValue.InternalBytes, indexed); } } using (var db = new KeyValueStore(path)) db.Truncate (); var files = Directory.GetFiles (path, "*.*", SearchOption.AllDirectories); Assert.AreEqual (new string[] { Path.GetFullPath(Path.Combine(path, "0.jf")) }, files); var dirs = Directory.GetDirectories (path, "*.*", SearchOption.AllDirectories); Assert.AreEqual (new string[0], dirs); }