public void LargeDataSetGetTest() { string path = Path.GetFullPath ("TestData\\LargeDataSetGetTest"); int totalSize = 0; int num_items = 500; var timer = new Stopwatch (); using (var db = new KeyValueStore(path)) { db.Truncate (); // Generate a data value that is larger than the block size. var value = ByteArray.Random (Config.SortedBlockSize + 256); // Do it enough times to ensure a roll-over for (int i = 0; i < num_items; i++) { var key = BitConverter.GetBytes (i); db.Set (key, value.InternalBytes); totalSize += value.InternalBytes.Length; } timer.Start (); for (int i = 0; i < num_items; i++) { var key = BitConverter.GetBytes (i); Assert.AreEqual (value.InternalBytes, db.Get (key)); } timer.Stop (); Console.WriteLine ("Randomized read throughput of {0} MB/s (avg {1} ms per lookup)", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); } }
public void TestLargeAndSmallOddWrites() { string path = Path.GetFullPath ("TestData\\TestLargeAndSmallInterlacedWrites"); using (var db = new KeyValueStore(path)) { db.Truncate (); // Create a random set of keybytes List<byte[]> keys = new List<byte[]> (); for (int i = 0; i < 10; i++) keys.Add (Key.Random (10).KeyBytes); // Set Odds to large for (int i = 0; i < keys.Count; i++) { var k = keys [i]; var v = ((i & 1) == 1) ? GenerateBlock (Config.MaxLargeValueSize - 100) : GenerateBlock (10); db.Set (k, v); } // Now check the results for (int i = 0; i < keys.Count; i++) { var k = keys [i]; var v = db.Get (k); CheckBlock (v); if ((i & 1) == 0) Assert.Less (v.Length, 100, " i = {0} should be small, but size={1}", i, v.Length); else Assert.Greater (v.Length, 100, " i = {0} should be large, but size={1}", i, v.Length); } } }