Enumerate() public méthode

public Enumerate ( ) : byte[]>>.IEnumerable
Résultat byte[]>>.IEnumerable
Exemple #1
0
        public void LargeDataEnumerateTest()
        {
            string path = Path.GetFullPath("TestData\\LargeDataEnumerateTest");
            int totalSize = 0;
            int num_items = 500;
            var timer = new Stopwatch();

            using (var db = new KeyValueStore(path)) {
                db.Truncate();

                // Generate a data value that is larger than the block size.
                var value = ByteArray.Random(Config.SortedBlockSize + 256);

                // Do it enough times to ensure a roll-over
                for (int i = 0; i < num_items; i++) {
                    var key = BitConverter.GetBytes(i).Reverse().ToArray(); // this has to be little endian to sort in an obvious way
                    db.Set(key, value.InternalBytes);
                    totalSize += value.InternalBytes.Length;
                }

                int j=0;
                timer.Start();
                foreach (var pair in db.Enumerate()) {
                    var key = BitConverter.GetBytes(j).Reverse().ToArray();
                    Assert.AreEqual(key, pair.Key);
                    Assert.AreEqual(value.InternalBytes, pair.Value);
                    j++;
                }
                timer.Stop();

                Console.WriteLine("Randomized read throughput of {0} MB/s (avg {1} ms per lookup)", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)num_items);

            }
        }
Exemple #2
0
        public void CleanIndex(string indexName)
        {
            KeyValueStore indexStore         = GetSecondaryIndex(indexName);
            var           allValueStoreItems = new HashSet <ByteArray>(this.Enumerate().Select(item => new ByteArray(item.Key)));

            foreach (var indexItem in indexStore.Enumerate())
            {
                byte[] itemKey = KeyValueStore.ItemKeyFromIndex(indexItem);
                if (!allValueStoreItems.Contains(new ByteArray(itemKey)))
                {
                    indexStore.Delete(indexItem.Key);
                }
            }
        }
Exemple #3
0
        public void CrashTestBeforeMerge()
        {
            string path = Path.GetFullPath("TestData\\CrashTestBeforeMerge");
            using (var db = new KeyValueStore(path)) {
                db.Truncate();
            }

            var doneSetting = new EventWaitHandle(false, EventResetMode.ManualReset, "CrashTestBeforeMerge");
            doneSetting.Reset();

            string testPath = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().GetName().CodeBase), "RazorTest.exe");
            var process = Process.Start(testPath, "CrashTestBeforeMerge");

            doneSetting.WaitOne(30000);
            process.Refresh();
            if (!process.HasExited) {
                try { process.Kill(); } catch { }
                process.WaitForExit();
            }

            // Open the database created by the other program
            using (var db = new KeyValueStore(path)) {

                db.Manifest.Logger = (msg) => Console.WriteLine(msg);

                Console.WriteLine("Begin enumeration.");
                ByteArray lastKey = new ByteArray();
                int ct = 0;
                foreach (var pair in db.Enumerate()) {
                    ByteArray k = new ByteArray(pair.Key);
                    Assert.True(lastKey.CompareTo(k) < 0);
                    lastKey = k;
                    ct++;
                }
                Assert.AreEqual(10000, ct);
                Console.WriteLine("Found {0} items in the crashed database.", ct);
            }
        }
Exemple #4
0
        public int CountIndex(string indexName)
        {
            KeyValueStore indexStore = GetSecondaryIndex(indexName);

            return(indexStore.Enumerate().Count());
        }
Exemple #5
0
        public void DumpKeySpaceUsed()
        {
            double valueBytes=0L;
            double keyBytes = 0L;
            double dupBytes = 0L;
            double totalRecords = 0L;

            Action<string> dumpFolderBytes = (folder) => {
                double tableValBytes = 0L;
                double tableKeyBytes = 0L;
                double tableDupBytes = 0L;
                double tableRecords = 0L;
                byte[] lastkey = null;
                using (var kvs = new KeyValueStore(folder)) {
                    foreach (var pair in kvs.Enumerate()) {
                        tableRecords++;
                        tableValBytes += pair.Value.Length;
                        tableKeyBytes += pair.Key.Length;
                        if (lastkey != null) {
                            int i = 0;
                            for (i = 0; i < lastkey.Length && i < pair.Key.Length; i++)
                                if (lastkey[i] != pair.Key[i])
                                    continue;
                            tableDupBytes += i;
                        }
                        lastkey = pair.Key;
                    }
                }
                valueBytes += tableValBytes;
                keyBytes += tableKeyBytes;
                dupBytes += tableDupBytes;
                totalRecords += tableRecords;
                Console.WriteLine("{0} Total Bytes: {1}", folder, tableValBytes + tableKeyBytes);
                Console.WriteLine("         #Records: {0}", tableRecords);
                Console.WriteLine("      Key   Bytes: {0}", tableKeyBytes);
                Console.WriteLine("      Value Bytes: {0}", tableValBytes);
                Console.WriteLine("      Dupl. Bytes: {0}", tableDupBytes);
                Console.WriteLine(" %Savings in keys: {0}%", tableDupBytes / tableKeyBytes * 100);
                Console.WriteLine(" %Savings overall: {0}%", tableDupBytes / (tableValBytes + tableKeyBytes) * 100);
                Console.WriteLine();
            };

            var baseFolder = @"d:\ncoverdata\ncover";
            foreach (var folder in Directory.GetDirectories(baseFolder, "*", SearchOption.AllDirectories))
                dumpFolderBytes(folder);

            Console.WriteLine("Total KeyValueStore Bytes: {0}", valueBytes + keyBytes);
            Console.WriteLine("         #Records: {0}", totalRecords);
            Console.WriteLine("      Key   Bytes: {0}", keyBytes);
            Console.WriteLine("      Value Bytes: {0}", valueBytes);
            Console.WriteLine("      Dupl. Bytes: {0}", dupBytes);
            Console.WriteLine(" %Savings in keys: {0}%", dupBytes / keyBytes * 100);
            Console.WriteLine(" %Savings overall: {0}%", dupBytes / (valueBytes + keyBytes) * 100);
        }
Exemple #6
0
        public void BulkSetEnumerateAllWithMissingSBT_ThrowAll()
        {
            string path = Path.GetFullPath("TestData\\BulkSetEnumerateAllWithMissingSBT_ThrowAll"+DateTime.Now.Ticks.ToString());
            var timer = new Stopwatch();
            int totalSize = 0;
            int readSize = 0;
            Action<string> logger = (msg) => { Console.WriteLine(msg); };
            using (var db = new KeyValueStore(path)) {
                db.Truncate();
                timer.Start();
                for (int i = 0; i < 500000; i++) {
                    var randomKey = BitConverter.GetBytes(i);
                    var randomValue = BitConverter.GetBytes(i);
                    db.Set(randomKey, randomValue);

                    readSize += randomKey.Length + randomValue.Length;
                    totalSize += randomKey.Length + randomValue.Length;
                }
                timer.Stop();
                Console.WriteLine("Wrote sorted table at a throughput of {0} MB/s", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0));
            }

            // delete the sbt files
            var files = Directory.GetFiles(path, "*.sbt");
            foreach(var fname in files)
                File.Delete(fname);

            // Close and re-open the database to force all the sstable merging to complete.
            Console.WriteLine("Begin enumeration.");
            RazorDB.Config.ExceptionHandling = ExceptionHandling.ThrowAll;
            Assert.Throws(typeof(FileNotFoundException), () => {
                using (var db = new KeyValueStore(path)) {
                    foreach (var pair in db.Enumerate());
                }
            });
        }
Exemple #7
0
        public void BulkSetEnumerateAllWithMissingSBT_AttemptRecovery()
        {
            try {
                RazorDB.Config.ExceptionHandling = ExceptionHandling.AttemptRecovery;

                string path = Path.GetFullPath("TestData\\BulkSetEnumerateAllWithMissingSBT_AttemptRecovery");
                var timer = new Stopwatch();
                int totalSize = 0;
                int readSize = 0;
                Action<string> logger = (msg) => { Console.WriteLine(msg); };
                using (var db = new KeyValueStore(path)) {
                    db.Truncate();
                    timer.Start();
                    for (int i = 0; i < 500000; i++) {
                        var randomKey = BitConverter.GetBytes(i);
                        var randomValue = BitConverter.GetBytes(i);
                        db.Set(randomKey, randomValue);

                        readSize += randomKey.Length + randomValue.Length;
                        totalSize += randomKey.Length + randomValue.Length;
                    }
                    timer.Stop();
                    Console.WriteLine("Wrote sorted table at a throughput of {0} MB/s", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0));
                }

                // delete the sbt files
                var files = Directory.GetFiles(path, "*.sbt");
                foreach (var fname in files)
                    File.Delete(fname);

                // Close and re-open the database to force all the sstable merging to complete.
                Console.WriteLine("Begin enumeration.");
                using (var db = new KeyValueStore(path)) {
                    timer.Reset();
                    timer.Start();
                    ByteArray lastKey = ByteArray.Empty;
                    int ct = 0;
                    foreach (var pair in db.Enumerate()) {
                        try {
                            ByteArray k = new ByteArray(pair.Key);
                            ByteArray v = new ByteArray(pair.Value);
                            Assert.AreEqual(k, v);
                            Assert.True(lastKey.CompareTo(k) < 0);
                            lastKey = k;
                            ct++;
                        } catch (Exception /*e*/) {
                            //Console.WriteLine("Key: {0}\n{1}",insertedItem.Key,e);
                            //Debugger.Launch();
                            //db.Get(insertedItem.Key.InternalBytes);
                            //db.Manifest.LogContents();
                            throw;
                        }
                    }
                    timer.Stop();
                    Assert.AreEqual(80568, ct);
                    Console.WriteLine("Enumerated read throughput of {0} MB/s (avg {1} ms per 1000 items)", (double)readSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)105);
                }

                // add some more records after deleting files
                using (var db = new KeyValueStore(path)) {
                    timer.Start();
                    // add 1,000,000 new keys
                    for (int i = 1000000; i < 3000000; i++) {
                        var randomKey = BitConverter.GetBytes(i);
                        var randomValue = BitConverter.GetBytes(i);
                        db.Set(randomKey, randomValue);

                        readSize += randomKey.Length + randomValue.Length;
                        totalSize += randomKey.Length + randomValue.Length;
                    }
                    timer.Stop();
                    Console.WriteLine("Wrote sorted table at a throughput of {0} MB/s", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0));
                }

                // Close and re-open the database to force all the sstable merging to complete.
                Console.WriteLine("Begin enumeration.");
                using (var db = new KeyValueStore(path)) {
                    timer.Reset();
                    timer.Start();
                    ByteArray lastKey = ByteArray.Empty;
                    int ct = 0;
                    foreach (var pair in db.Enumerate()) {
                        try {
                            ByteArray k = new ByteArray(pair.Key);
                            ByteArray v = new ByteArray(pair.Value);
                            Assert.AreEqual(k, v);
                            Assert.True(lastKey.CompareTo(k) < 0);
                            lastKey = k;
                            ct++;
                        } catch (Exception /*e*/) {
                            //Console.WriteLine("Key: {0}\n{1}",insertedItem.Key,e);
                            //Debugger.Launch();
                            //db.Get(insertedItem.Key.InternalBytes);
                            //db.Manifest.LogContents();
                            throw;
                        }
                    }
                    timer.Stop();
                    Assert.AreEqual(2080568, ct);
                    Console.WriteLine("Enumerated read throughput of {0} MB/s (avg {1} ms per 1000 items)", (double)readSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)105);
                }
            } finally {
                RazorDB.Config.ExceptionHandling = ExceptionHandling.ThrowAll;
            }
        }
Exemple #8
0
        public void BulkSetEnumerateAll2()
        {
            string path = Path.GetFullPath("TestData\\BulkSetEnumerateAll2");
            var timer = new Stopwatch();
            int totalSize = 0;
            int readSize = 0;

            using (var db = new KeyValueStore(path)) {
                db.Truncate();

                db.Manifest.Logger = (msg) => Console.WriteLine(msg);

                timer.Start();
                for (int i = 0; i < 105000; i++) {
                    var randomKey = BitConverter.GetBytes(i);
                    var randomValue = BitConverter.GetBytes(i);
                    db.Set(randomKey, randomValue);

                    readSize += randomKey.Length + randomValue.Length;
                    totalSize += randomKey.Length + randomValue.Length;
                }
                timer.Stop();
                Console.WriteLine("Wrote sorted table at a throughput of {0} MB/s", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0));

                timer.Reset();
                Console.WriteLine("Begin enumeration.");
                timer.Start();
                ByteArray lastKey = ByteArray.Empty;
                int ct = 0;
                foreach (var pair in db.Enumerate()) {
                    try {
                        ByteArray k = new ByteArray(pair.Key);
                        ByteArray v = new ByteArray(pair.Value);
                        Assert.AreEqual(k, v);
                        Assert.True(lastKey.CompareTo(k) < 0);
                        lastKey = k;
                        ct++;
                    } catch (Exception /*e*/) {
                        //Console.WriteLine("Key: {0}\n{1}",insertedItem.Key,e);
                        //Debugger.Launch();
                        //db.Get(insertedItem.Key.InternalBytes);
                        //db.Manifest.LogContents();
                        throw;
                    }
                }
                timer.Stop();
                Assert.AreEqual(105000, ct, "105000 items should be enumerated.");

                Console.WriteLine("Enumerated read throughput of {0} MB/s (avg {1} ms per 1000 items)", (double)readSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0), (double)timer.Elapsed.TotalSeconds / (double)105);
            }
        }
Exemple #9
0
        public void BulkSetBulkEnumerateWithCache()
        {
            string path = Path.GetFullPath("TestData\\BulkSetBulkEnumerateWithCache");
            var timer = new Stopwatch();
            int totalSize = 0;
            int readSize = 0;
            int num_items = 100000;

            using (var db = new KeyValueStore(path)) {
                db.Truncate();

                db.Manifest.Logger = (msg) => Console.WriteLine(msg);

                timer.Start();
                for (int i = 0; i < num_items; i++) {
                    var randomKey = ByteArray.Random(40);
                    var randomValue = ByteArray.Random(256);
                    db.Set(randomKey.InternalBytes, randomValue.InternalBytes);

                    readSize += randomKey.Length + randomValue.Length;
                    totalSize += randomKey.Length + randomValue.Length;
                }
                timer.Stop();
                Console.WriteLine("Wrote sorted table at a throughput of {0} MB/s", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0));

                timer.Reset();
                timer.Start();
                Assert.AreEqual(num_items, db.Enumerate().Count());
                timer.Stop();
                Console.WriteLine("Enumerated read throughput of {0} MB/s", (double)readSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0));

                timer.Reset();
                timer.Start();
                Assert.AreEqual(num_items, db.Enumerate().Count());
                timer.Stop();
                Console.WriteLine("Enumerated (second pass) read throughput of {0} MB/s", (double)readSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0));
            }
        }
Exemple #10
0
        public void BulkSetBulkEnumerateWhileMerging()
        {
            string path = Path.GetFullPath("TestData\\BulkSetBulkEnumerateWhileMerging");
            var timer = new Stopwatch();
            int totalSize = 0;
            int readSize = 0;
            int num_items = 100000;

            using (var db = new KeyValueStore(path)) {
                db.Truncate();

                db.Manifest.Logger = (msg) => Console.WriteLine(msg);

                timer.Start();
                for (int i = 0; i < num_items; i++) {
                    var randomKey = ByteArray.Random(40);
                    var randomValue = ByteArray.Random(256);
                    db.Set(randomKey.InternalBytes, randomValue.InternalBytes);

                    readSize += randomKey.Length + randomValue.Length;
                    totalSize += randomKey.Length + randomValue.Length;
                }
                timer.Stop();
                Console.WriteLine("Wrote sorted table at a throughput of {0} MB/s", (double)totalSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0));

                timer.Reset();
                Console.WriteLine("Begin enumeration.");
                timer.Start();
                ByteArray lastKey = new ByteArray();
                int ct = 0;
                foreach (var pair in db.Enumerate()) {
                    try {
                        ByteArray k = new ByteArray(pair.Key);
                        Assert.True(lastKey.CompareTo(k) < 0);
                        lastKey = k;
                        ct++;
                    } catch (Exception e) {
                        Console.WriteLine("Key: {0}\n{1}",pair.Key,e);
                        Debugger.Launch();
                        throw;
                    }
                }
                timer.Stop();
                Assert.AreEqual(num_items, ct, num_items.ToString() + " items should be enumerated.");

                Console.WriteLine("Enumerated read throughput of {0} MB/s", (double)readSize / timer.Elapsed.TotalSeconds / (1024.0 * 1024.0));
            }
        }
Exemple #11
0
        public void TestJournalFileGrowth()
        {
            string path = Path.GetFullPath("TestData\\TestJournalFileGrowth");
            // Open database and store enough data to cause a page split
            using (var db = new KeyValueStore(path)) {
                db.Truncate();

                ByteArray key = ByteArray.Random(40);
                for (int i = 0; i < 100000; i++) {
                    ByteArray value = ByteArray.Random(400);

                    db.Set(key.InternalBytes, value.InternalBytes);
                }

                var journalfileName = Config.JournalFile(db.Manifest.BaseFileName, db.Manifest.CurrentVersion(0));
                var journalLength = new FileInfo(journalfileName).Length;
                // Make sure the journal is smaller than the max memtable size.
                Assert.LessOrEqual(journalLength, Config.MaxMemTableSize);

                // Double check to be sure that the contents of the database are correct in this case.
                int count = 0;
                foreach (var value in db.Enumerate()) {
                    Assert.AreEqual(key.InternalBytes, value.Key);
                    count++;
                }
                Assert.AreEqual(1, count);
            }
        }
Exemple #12
0
 public void V1ReadLargeObjectsFromDatastore()
 {
     using (var db = new KeyValueStore(@"..\FormatTestData\V1LargeObjectStore")) {
          foreach (var pair in db.Enumerate()) {
              Assert.AreEqual(40, pair.Key.Length);
              Assert.AreEqual(Config.MaxSmallValueSize * 100, pair.Value.Length);
          }
      }
 }
Exemple #13
0
        public void IndexClean()
        {
            string path = Path.GetFullPath("TestData\\IndexClean");

            using (var db = new KeyValueStore(path)) {
                db.Truncate();
                db.Manifest.Logger = msg => Console.WriteLine(msg);

                db.Set(Encoding.UTF8.GetBytes("KeyA"), Encoding.UTF8.GetBytes("ValueA:1"), new Dictionary<string, byte[]> { { "Idx", Encoding.UTF8.GetBytes("1") } });
                db.Set(Encoding.UTF8.GetBytes("KeyB"), Encoding.UTF8.GetBytes("ValueB:2"), new Dictionary<string, byte[]> { { "Idx", Encoding.UTF8.GetBytes("2") } });
                db.Set(Encoding.UTF8.GetBytes("KeyC"), Encoding.UTF8.GetBytes("ValueC:3"), new Dictionary<string, byte[]> { { "Idx", Encoding.UTF8.GetBytes("3") } });

                var lookupValue = db.Find("Idx", Encoding.UTF8.GetBytes("3")).Single();
                Assert.AreEqual("ValueC:3", Encoding.UTF8.GetString(lookupValue.Value));
                Assert.AreEqual("KeyC", Encoding.UTF8.GetString(lookupValue.Key));

                db.Delete(Encoding.UTF8.GetBytes("KeyC"));
            }

            // Open the index directly and confirm that the lookup key is still there
            using (var db = new KeyValueStore(Path.Combine(path, "Idx"))) {
                Assert.AreEqual(3, db.Enumerate().Count());
            }

            using (var db = new KeyValueStore(path)) {
                db.CleanIndex("Idx");
            }

            // Open the index directly and confirm that the lookup key is now gone
            using (var db = new KeyValueStore(Path.Combine(path, "Idx"))) {
                Assert.AreEqual(2, db.Enumerate().Count());
            }
        }