public void BasicPersistentGetAndSet() { string path = Path.GetFullPath("TestData\\BasicPersistentGetAndSet"); using (var db = new KeyValueStore(path)) { db.Truncate(); for (int i = 0; i < 10; i++) { byte[] key = BitConverter.GetBytes(i); byte[] value = Encoding.UTF8.GetBytes("Number " + i.ToString()); db.Set(key, value); } } using (var db = new KeyValueStore(path)) { for (int j = 0; j < 15; j++) { byte[] key = BitConverter.GetBytes(j); byte[] value = db.Get(key); if (j < 10) { Assert.AreEqual(Encoding.UTF8.GetBytes("Number " + j.ToString()), value); } else { Assert.IsNull(value); } } } }
public void LargeDataSetGetTest() { string path = Path.GetFullPath("TestData\\LargeDataSetGetTest"); int totalSize = 0; int num_items = 500; var timer = new Stopwatch(); using (var db = new KeyValueStore(path)) { db.Truncate(); // Generate a data value that is larger than the block size. var value = ByteArray.Random(Config.SortedBlockSize + 256); // Do it enough times to ensure a roll-over for (int i = 0; i < num_items; i++) { var key = BitConverter.GetBytes(i); db.Set(key, value.InternalBytes); totalSize += value.InternalBytes.Length; } timer.Start(); for (int i = 0; i < num_items; i++) { var key = BitConverter.GetBytes(i); Assert.AreEqual(value.InternalBytes, db.Get(key)); } timer.Stop(); Console.WriteLine("Randomized read throughput of {0} MB/s (avg {1} ms per lookup)", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items); } }
public void TestLargeAndSmallOddWrites() { string path = Path.GetFullPath("TestData\\TestLargeAndSmallInterlacedWrites"); using (var db = new KeyValueStore(path)) { db.Truncate(); // Create a random set of keybytes List<byte[]> keys = new List<byte[]>(); for (int i = 0; i < 10; i++) { keys.Add(Key.Random(10).KeyBytes); } // Set Odds to large for (int i = 0; i < keys.Count; i++) { var k = keys[i]; var v = ((i & 1) == 1) ? GenerateBlock(Config.MaxLargeValueSize - 100) : GenerateBlock(10); db.Set(k, v); } // Now check the results for (int i = 0; i < keys.Count; i++) { var k = keys[i]; var v = db.Get(k); CheckBlock(v); if ((i & 1) == 0) { Assert.Less(v.Length, 100, " i = {0} should be small, but size={1}", i, v.Length); } else { Assert.Greater(v.Length, 100, " i = {0} should be large, but size={1}", i, v.Length); } } } }
public void GetAndSetWithDelete() { string path = Path.GetFullPath("TestData\\GetAndSetWithDelete"); using (var db = new KeyValueStore(path)) { db.Truncate(); for (int i = 0; i < 10; i++) { byte[] key = BitConverter.GetBytes(i); byte[] value = Encoding.UTF8.GetBytes("Number " + i.ToString()); db.Set(key, value); } db.Delete(BitConverter.GetBytes(3)); db.Delete(BitConverter.GetBytes(30)); db.Delete(BitConverter.GetBytes(7)); db.Delete(BitConverter.GetBytes(1)); db.Delete(BitConverter.GetBytes(3)); } using (var db = new KeyValueStore(path)) { for (int j = 0; j < 15; j++) { byte[] key = BitConverter.GetBytes(j); byte[] value = db.Get(key); if (j == 3 || j == 1 || j == 7) { Assert.IsNull(value); } else if (j < 10) { Assert.AreEqual(Encoding.UTF8.GetBytes("Number " + j.ToString()), value); } else { Assert.IsNull(value); } } } }
public void BulkSetWithDelete() { int numItems = 100000; string path = Path.GetFullPath("TestData\\BulkSetWithDelete"); if (Directory.Exists(path)) Directory.Delete(path, true); Directory.CreateDirectory(path); using (var db = new KeyValueStore(path)) { db.Manifest.Logger = msg => Console.WriteLine(msg); db.Truncate(); Stopwatch timer = new Stopwatch(); timer.Start(); for (int i = 0; i < numItems; i++) { byte[] key = BitConverter.GetBytes(i); byte[] value = Encoding.UTF8.GetBytes("Number " + i.ToString()); db.Set(key, value); } timer.Stop(); Console.WriteLine("Wrote {0} items in {1}s", numItems, timer.Elapsed.TotalSeconds); int skip = 1000; timer.Reset(); timer.Start(); // Delete every skip-th item in reverse order, for (int j = numItems; j >= 0; j--) { if (j % skip == 0) { byte[] key = BitConverter.GetBytes(j); db.Delete(key); } } timer.Stop(); Console.WriteLine("Deleted every {0}-th item in {1}s", skip, timer.Elapsed.TotalSeconds); // Now check all the results timer.Reset(); timer.Start(); for (int k = 0; k < numItems; k++) { byte[] key = BitConverter.GetBytes(k); byte[] value = db.Get(key); if (k % skip == 0) { Assert.IsNull(value); } else { Assert.AreEqual(Encoding.UTF8.GetBytes("Number " + k.ToString()), value, string.Format("{0}", k)); } } timer.Stop(); Console.WriteLine("Read and check every item in {0}s", timer.Elapsed.TotalSeconds); } }
public void BulkSetThreadedGetWhileReMerging() { string path = Path.GetFullPath("TestData\\BulkSetThreadedGetWhileReMerging"); var timer = new Stopwatch(); int totalSize = 0; var items = new Dictionary<ByteArray, ByteArray>(); using (var db = new KeyValueStore(path)) { db.Truncate(); db.Manifest.Logger = (msg) => Console.WriteLine(msg); timer.Start(); int totalItems = 105000; for (int i = 0; i < totalItems; i++) { var randomKey = ByteArray.Random(40); var randomValue = ByteArray.Random(256); db.Set(randomKey.InternalBytes, randomValue.InternalBytes); items[randomKey] = randomValue; totalSize += randomKey.Length + randomValue.Length; } timer.Stop(); Console.WriteLine("Wrote sorted table at a throughput of {0} MB/s", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); List<KeyValuePair<ByteArray,ByteArray>> itemsList = items.ToList(); int numThreads = 10; List<Thread> threads = new List<Thread>(); for (int j = 0; j < numThreads; j++) { threads.Add(new Thread((num) => { int itemsPerThread = totalItems / numThreads; for (int i = 0; i < itemsPerThread; i++) { try { int idx = i * (int)num; byte[] value = db.Get(itemsList[idx].Key.InternalBytes); Assert.AreEqual(itemsList[idx].Value, new ByteArray(value)); } catch (Exception /*e*/) { //Console.WriteLine("Key: {0}\n{1}", insertedItem.Key, e); //Debugger.Launch(); //db.Get(insertedItem.Key.InternalBytes); //db.Manifest.LogContents(); throw; } } })); } timer.Reset(); Console.WriteLine("Begin randomized read back."); timer.Start(); for (int k=0; k < numThreads; k++) { threads[k].Start(k); } threads.ForEach(t => t.Join()); timer.Stop(); Console.WriteLine("Randomized read throughput of {0} MB/s (avg {1} ms per lookup)", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)items.Count); } }
public void BulkSetGetWhileReMerging() { string path = Path.GetFullPath("TestData\\BulkSetGetWhileReMerging"); var timer = new Stopwatch(); int totalSize = 0; var items = new Dictionary<ByteArray, ByteArray>(); using (var db = new KeyValueStore(path)) { db.Truncate(); db.Manifest.Logger = (msg) => Console.WriteLine(msg); timer.Start(); for (int i = 0; i < 105000; i++) { var randomKey = ByteArray.Random(40); var randomValue = ByteArray.Random(256); db.Set(randomKey.InternalBytes, randomValue.InternalBytes); items[randomKey] = randomValue; totalSize += randomKey.Length + randomValue.Length; } timer.Stop(); Console.WriteLine("Wrote sorted table at a throughput of {0} MB/s", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0)); timer.Reset(); Console.WriteLine("Begin randomized read back."); timer.Start(); foreach (var insertedItem in items) { try { byte[] value = db.Get(insertedItem.Key.InternalBytes); Assert.AreEqual(insertedItem.Value, new ByteArray(value)); } catch (Exception /*e*/) { //Console.WriteLine("Key: {0}\n{1}", insertedItem.Key, e); //Debugger.Launch(); //db.Get(insertedItem.Key.InternalBytes); //db.Manifest.LogContents(); throw; } } timer.Stop(); Console.WriteLine("Randomized read throughput of {0} MB/s (avg {1} ms per lookup)", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)items.Count); } }
public void RotationReadRaceTest() { string path = Path.GetFullPath("TestData\\RotationReadRaceTest"); using (var db = new KeyValueStore(path)) { db.Truncate(); int num_items = 58900; Console.WriteLine("Writing {0} items.", num_items); for (int i = 0; i < num_items; i++) { byte[] key = BitConverter.GetBytes(i); byte[] value = Encoding.UTF8.GetBytes("Number " + i.ToString()); db.Set(key, value); } Console.WriteLine("Read 1 item."); // Even though the page is rotated, but not written to disk yet, we should be able to query for the data anyway. { byte[] key = BitConverter.GetBytes(0); byte[] value = db.Get(key); Assert.AreEqual(Encoding.UTF8.GetBytes("Number 0"), value); } Console.WriteLine("Check Manifest."); // There is a chance that this could happen fast enough to make this assertion fail on some machines, but it should be unlikely. // The goal is to reproduce the race condition. If this assert succeeds then we have reproduced it. using (var mf = db.Manifest.GetLatestManifest()) { Assert.IsFalse(mf.GetPagesAtLevel(0).Length > 0); } Console.WriteLine("Done Checking Manifest."); } Console.WriteLine("Closed."); using (var db = new KeyValueStore(path)) { using (var mf = db.Manifest.GetLatestManifest()) { Assert.IsTrue(mf.GetPagesAtLevel(0).Length > 0); } } Console.WriteLine("Done."); }
private byte[] _Get(KeyValueStore store, byte[] key) { var val = store.Get(key); return _GetValueFromStoredValue(val); }