public void TestHashComparison() { testName = "TestHashComparison"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; dbConfig.HashComparison = new EntryComparisonDelegate(EntryComparison); HashDatabase db = HashDatabase.Open(dbFileName, dbConfig); int ret; /* * Comparison gets the value that lowest byte of the * former dbt minus that of the latter one. */ ret = db.Compare(new DatabaseEntry(BitConverter.GetBytes(2)), new DatabaseEntry(BitConverter.GetBytes(2))); Assert.AreEqual(0, ret); ret = db.Compare(new DatabaseEntry(BitConverter.GetBytes(256)), new DatabaseEntry(BitConverter.GetBytes(1))); Assert.Greater(0, ret); db.Close(); }
public void TestStats() { testName = "TestStats"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig dbConfig = new HashDatabaseConfig(); ConfigCase1(dbConfig); HashDatabase db = HashDatabase.Open(dbFileName, dbConfig); HashStats stats = db.Stats(); HashStats fastStats = db.FastStats(); ConfirmStatsPart1Case1(stats); ConfirmStatsPart1Case1(fastStats); // Put 100 records into the database. PutRecordCase1(db, null); stats = db.Stats(); ConfirmStatsPart2Case1(stats); // Delete some data to get some free pages. byte[] bigArray = new byte[262144]; db.Delete(new DatabaseEntry(bigArray)); stats = db.Stats(); ConfirmStatsPart3Case1(stats); db.Close(); }
public void TestDuplicates() { testName = "TestDuplicates"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; string dbSecFileName = testHome + "/" + testName + "_sec.db"; // Open a primary hash database. HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.ALWAYS; HashDatabase db = HashDatabase.Open( dbFileName, dbConfig); // Open a secondary hash database. SecondaryHashDatabaseConfig secConfig = new SecondaryHashDatabaseConfig(null, null); secConfig.Primary = db; secConfig.Duplicates = DuplicatesPolicy.SORTED; secConfig.Creation = CreatePolicy.IF_NEEDED; SecondaryHashDatabase secDB = SecondaryHashDatabase.Open( dbSecFileName, secConfig); // Confirm the duplicate in opened secondary database. Assert.AreEqual(DuplicatesPolicy.SORTED, secDB.Duplicates); secDB.Close(); db.Close(); }
public void TestKeyExistException() { testName = "TestKeyExistException"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig hashConfig = new HashDatabaseConfig(); hashConfig.Creation = CreatePolicy.ALWAYS; hashConfig.Duplicates = DuplicatesPolicy.SORTED; HashDatabase hashDB = HashDatabase.Open(dbFileName, hashConfig); // Put the same record into db twice. DatabaseEntry key, data; key = new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("1")); data = new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("1")); try { hashDB.PutNoDuplicate(key, data); hashDB.PutNoDuplicate(key, data); } catch (KeyExistException) { throw new ExpectedTestException(); } finally { hashDB.Close(); } }
/* * Configure and open hash databases for inverted index. */ public InvertedIndex(DocumentsCatalogue documentsCatalogue, DatabaseEnvironment env) { this.documentsCatalogue = documentsCatalogue; this.env = env.env; /* Configure the database. */ var hashDatabaseConfig = new HashDatabaseConfig() { Duplicates = DuplicatesPolicy.NONE, Creation = CreatePolicy.IF_NEEDED, FreeThreaded = true, CacheSize = new CacheInfo(1, 0, 128), // Env = env.env, }; /* Create the database if does not already exist and open the database file. */ try { hashDatabase = HashDatabase.Open("inverted_index.db", hashDatabaseConfig); maxFreqDatabase = HashDatabase.Open("max_freq.db", hashDatabaseConfig); //Console.WriteLine("{0} open.", dbFileName); } catch (Exception e) { // Console.WriteLine("Error opening {0}.", dbFileName); Console.WriteLine(e.Message); return; } }
public void TestPutNoDuplicateWithUnsortedDuplicate() { testName = "TestPutNoDuplicateWithUnsortedDuplicate"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig hashConfig = new HashDatabaseConfig(); hashConfig.Creation = CreatePolicy.ALWAYS; hashConfig.Duplicates = DuplicatesPolicy.UNSORTED; hashConfig.ErrorPrefix = testName; HashDatabase hashDB = HashDatabase.Open(dbFileName, hashConfig); DatabaseEntry key, data; key = new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("1")); data = new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("1")); try { hashDB.PutNoDuplicate(key, data); } catch (DatabaseException) { throw new ExpectedTestException(); } finally { hashDB.Close(); } }
protected HatUserFactory() { string path; try{ path = ConfigurationSettings.AppSettings["db_path"]; } catch (Exception ex) { throw new System.IO.FileNotFoundException("db_path setting not found", ex); } try{ var cfg = new HashDatabaseConfig(); cfg.Duplicates = DuplicatesPolicy.SORTED; cfg.ErrorPrefix = "HatUserFactoryError_"; cfg.Creation = CreatePolicy.IF_NEEDED; cfg.CacheSize = new CacheInfo(0, 64 * 1024, 1); cfg.PageSize = 8 * 1024; db = HashDatabase.Open(path, this.GetType().Name, cfg); } catch (DatabaseException ex) { Console.WriteLine(ex.Message); throw; } }
public void TestConfig() { testName = "TestConfig"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; XmlElement xmlElem = Configuration.TestSetUp( testFixtureName, testName); // Open a primary btree database. HashDatabaseConfig hashDBConfig = new HashDatabaseConfig(); hashDBConfig.Creation = CreatePolicy.IF_NEEDED; HashDatabase hashDB = HashDatabase.Open( dbFileName, hashDBConfig); SecondaryHashDatabaseConfig secDBConfig = new SecondaryHashDatabaseConfig(hashDB, null); Config(xmlElem, ref secDBConfig, true); Confirm(xmlElem, secDBConfig, true); // Close the primary btree database. hashDB.Close(); }
public void TestPutNoDuplicate() { testName = "TestPutNoDuplicate"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig hashConfig = new HashDatabaseConfig(); hashConfig.Creation = CreatePolicy.ALWAYS; hashConfig.Duplicates = DuplicatesPolicy.SORTED; hashConfig.TableSize = 20; HashDatabase hashDB = HashDatabase.Open(dbFileName, hashConfig); DatabaseEntry key, data; for (int i = 1; i <= 10; i++) { key = new DatabaseEntry(BitConverter.GetBytes(i)); data = new DatabaseEntry(BitConverter.GetBytes(i)); hashDB.PutNoDuplicate(key, data); } Assert.IsTrue(hashDB.Exists( new DatabaseEntry(BitConverter.GetBytes((int)5)))); hashDB.Close(); }
public void GetHashDBAndCursor(string home, string name, out HashDatabase db, out HashCursor cursor) { string dbFileName = home + "/" + name + ".db"; HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; db = HashDatabase.Open(dbFileName, dbConfig); cursor = db.Cursor(); }
public void TestMessageFile() { testName = "TestMessageFile"; SetUpTest(true); // Configure and open an environment. DatabaseEnvironmentConfig envConfig = new DatabaseEnvironmentConfig(); envConfig.Create = true; envConfig.UseMPool = true; DatabaseEnvironment env = DatabaseEnvironment.Open( testHome, envConfig); // Configure and open a database. HashDatabaseConfig DBConfig = new HashDatabaseConfig(); DBConfig.Env = env; DBConfig.Creation = CreatePolicy.IF_NEEDED; string DBFileName = testName + ".db"; HashDatabase db = HashDatabase.Open(DBFileName, DBConfig); // Confirm message file does not exist. string messageFile = testHome + "/" + "msgfile"; Assert.AreEqual(false, File.Exists(messageFile)); // Call set_msgfile() of db. db.Msgfile = messageFile; // Print db statistic to message file. db.PrintStats(true); // Confirm message file exists now. Assert.AreEqual(true, File.Exists(messageFile)); db.Msgfile = ""; string line = null; // Read the third line of message file. System.IO.StreamReader file = new System.IO.StreamReader(@"" + messageFile); line = file.ReadLine(); line = file.ReadLine(); line = file.ReadLine(); // Confirm the message file is not empty. Assert.AreEqual(line, "DB handle information:"); file.Close(); // Close database and environment. db.Close(); env.Close(); }
public void TestInsertToLoc() { HashDatabase db; HashDatabaseConfig dbConfig; HashCursor cursor; DatabaseEntry data; KeyValuePair <DatabaseEntry, DatabaseEntry> pair; string dbFileName; testName = "TestInsertToLoc"; testHome = testFixtureHome + "/" + testName; dbFileName = testHome + "/" + testName + ".db"; Configuration.ClearDir(testHome); // Open database and cursor. dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; /* * The database should be set to be unsorted to * insert before/after a certain record. */ dbConfig.Duplicates = DuplicatesPolicy.UNSORTED; db = HashDatabase.Open(dbFileName, dbConfig); cursor = db.Cursor(); // Add record("key", "data") into database. AddOneByCursor(cursor); /* * Insert the new record("key","data1") after the * record("key", "data"). */ data = new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("data1")); cursor.Insert(data, Cursor.InsertLocation.AFTER); /* * Move the cursor to the record("key", "data") and * confirm that the next record is the one just inserted. */ pair = new KeyValuePair <DatabaseEntry, DatabaseEntry>( new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("key")), new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("data"))); Assert.IsTrue(cursor.Move(pair, true)); Assert.IsTrue(cursor.MoveNext()); Assert.AreEqual(ASCIIEncoding.ASCII.GetBytes("key"), cursor.Current.Key.Data); Assert.AreEqual(ASCIIEncoding.ASCII.GetBytes("data1"), cursor.Current.Value.Data); cursor.Close(); db.Close(); }
/// <summary> /// 创建数据库 /// </summary> /// <param name="config"></param> private void CreateDB(DatabaseConfig config) { string dbFile = Path.Combine(home, dbName + dbFileEx); db_File = dbFile = dbName + dbFileEx; dbFile = Path.Combine(Environment.CurrentDirectory, db_File); switch (DBType) { case DBType.BTree: case DBType.Sequence: { BTreeDatabaseConfig dbcfg = config as BTreeDatabaseConfig; if (DBType == DBType.Sequence) { dbcfg.Duplicates = DuplicatesPolicy.NONE; } db = BTreeDatabase.Open(db_File, dbName, dbcfg); if (dbcfg.Duplicates != DuplicatesPolicy.SORTED) { /* Configure and initialize sequence. */ seqConfig = new SequenceConfig { BackingDatabase = db, Creation = CreatePolicy.IF_NEEDED, Increment = true, InitialValue = Int64.MaxValue, key = new DatabaseEntry() }; seqConfig.SetRange(Int64.MinValue, Int64.MaxValue); seqConfig.Wrap = true; DbtFromString(seqConfig.key, "excs_sequence"); seq = new Sequence(seqConfig); } } break; case DBType.Hash: db = HashDatabase.Open(dbFile, config as HashDatabaseConfig); break; case DBType.Recno: db = RecnoDatabase.Open(dbFile, config as RecnoDatabaseConfig); break; case DBType.Queue: db = QueueDatabase.Open(dbFile, config as QueueDatabaseConfig); break; default: db = BTreeDatabase.Open(dbFile, config as BTreeDatabaseConfig); break; } }
static void Main(string[] args) { try { var cfg = new HashDatabaseConfig(); cfg.Duplicates = DuplicatesPolicy.UNSORTED; cfg.Creation = CreatePolicy.IF_NEEDED; cfg.CacheSize = new CacheInfo(0, 64 * 1024, 1); cfg.PageSize = 8 * 1024; Database db = HashDatabase.Open("d:\\test.db", "hat_db", cfg); Console.WriteLine("db opened"); var key = new DatabaseEntry(); var data = new DatabaseEntry(); key.Data = System.Text.Encoding.ASCII.GetBytes("key1"); data.Data = System.Text.Encoding.ASCII.GetBytes("val1"); try { db.Put(key, data); db.Put(key, data); } catch (Exception ex) { Console.WriteLine(ex.Message); } using (var dbc = db.Cursor()) { System.Text.ASCIIEncoding decode = new ASCIIEncoding(); /* Walk through the database and print out key/data pairs. */ Console.WriteLine("All key : data pairs:"); foreach (KeyValuePair <DatabaseEntry, DatabaseEntry> p in dbc) { Console.WriteLine("{0}::{1}", decode.GetString(p.Key.Data), decode.GetString(p.Value.Data)); } } db.Close(); Console.WriteLine("db closed"); } catch (Exception ex) { Console.WriteLine(ex.Message); } Console.ReadLine(); }
public void TestOpenNewHashDB() { testName = "TestOpenNewHashDB"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; XmlElement xmlElem = Configuration.TestSetUp(testFixtureName, testName); HashDatabaseConfig hashConfig = new HashDatabaseConfig(); HashDatabaseConfigTest.Config(xmlElem, ref hashConfig, true); HashDatabase hashDB = HashDatabase.Open(dbFileName, hashConfig); Confirm(xmlElem, hashDB, true); hashDB.Close(); }
public void TestAddUnique() { HashDatabase db; HashCursor cursor; KeyValuePair <DatabaseEntry, DatabaseEntry> pair; testName = "TestAddUnique"; testHome = testFixtureHome + "/" + testName; Configuration.ClearDir(testHome); // Open a database and cursor. HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; /* * To put no duplicate data, the database should be * set to be sorted. */ dbConfig.Duplicates = DuplicatesPolicy.SORTED; db = HashDatabase.Open( testHome + "/" + testName + ".db", dbConfig); cursor = db.Cursor(); // Add record("key", "data") into database. AddOneByCursor(cursor); /* * Fail to add duplicate record("key","data"). */ pair = new KeyValuePair <DatabaseEntry, DatabaseEntry>( new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("key")), new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("data"))); try { cursor.AddUnique(pair); } catch (KeyExistException) { } finally { cursor.Close(); db.Close(); } }
public void TestHashFunction() { testName = "TestHashFunction"; testHome = testFixtureHome + "/" + testName; string dbFileName = testHome + "/" + testName + ".db"; string dbSecFileName = testHome + "/" + testName + "_sec.db"; Configuration.ClearDir(testHome); // Open a primary hash database. HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; HashDatabase hashDB = HashDatabase.Open( dbFileName, dbConfig); /* * Define hash function and open a secondary * hash database. */ SecondaryHashDatabaseConfig secDBConfig = new SecondaryHashDatabaseConfig(hashDB, null); secDBConfig.HashFunction = new HashFunctionDelegate(HashFunction); secDBConfig.Creation = CreatePolicy.IF_NEEDED; SecondaryHashDatabase secDB = SecondaryHashDatabase.Open(dbSecFileName, secDBConfig); /* * Confirm the hash function defined in the configuration. * Call the hash function and the one from secondary * database. If they return the same value, then the hash * function is configured successfully. */ uint data = secDB.HashFunction(BitConverter.GetBytes(1)); Assert.AreEqual(0, data); // Close all. secDB.Close(); hashDB.Close(); }
public void TestPutNoDuplicateWithTxn() { testName = "TestPutNoDuplicateWithTxn"; testHome = testFixtureHome + "/" + testName; Configuration.ClearDir(testHome); // Open an environment. DatabaseEnvironmentConfig envConfig = new DatabaseEnvironmentConfig(); envConfig.Create = true; envConfig.UseLogging = true; envConfig.UseMPool = true; envConfig.UseTxns = true; DatabaseEnvironment env = DatabaseEnvironment.Open( testHome, envConfig); // Open a hash database within a transaction. Transaction txn = env.BeginTransaction(); HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; dbConfig.Duplicates = DuplicatesPolicy.SORTED; dbConfig.Env = env; HashDatabase db = HashDatabase.Open(testName + ".db", dbConfig, txn); DatabaseEntry dbt = new DatabaseEntry(BitConverter.GetBytes((int)100)); db.PutNoDuplicate(dbt, dbt, txn); try { db.PutNoDuplicate(dbt, dbt, txn); } catch (KeyExistException) { throw new ExpectedTestException(); } finally { // Close all. db.Close(); txn.Commit(); env.Close(); } }
public void TestHashFunction() { testName = "TestHashFunction"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; dbConfig.HashFunction = new HashFunctionDelegate(HashFunction); HashDatabase db = HashDatabase.Open(dbFileName, dbConfig); // Hash function will change the lowest byte to 0; uint data = db.HashFunction(BitConverter.GetBytes(1)); Assert.AreEqual(0, data); db.Close(); }
public void TestCompare() { testName = "TestCompare"; testHome = testFixtureHome + "/" + testName; string dbFileName = testHome + "/" + testName + ".db"; string dbSecFileName = testHome + "/" + testName + "_sec.db"; Configuration.ClearDir(testHome); // Open a primary hash database. HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.ALWAYS; HashDatabase db = HashDatabase.Open( dbFileName, dbConfig); // Open a secondary hash database. SecondaryHashDatabaseConfig secConfig = new SecondaryHashDatabaseConfig(null, null); secConfig.Creation = CreatePolicy.IF_NEEDED; secConfig.Primary = db; secConfig.Compare = new EntryComparisonDelegate(SecondaryEntryComparison); SecondaryHashDatabase secDB = SecondaryHashDatabase.Open(dbSecFileName, secConfig); /* * Get the compare function set in the configuration * and run it in a comparison to see if it is alright. */ DatabaseEntry dbt1, dbt2; dbt1 = new DatabaseEntry( BitConverter.GetBytes((int)257)); dbt2 = new DatabaseEntry( BitConverter.GetBytes((int)255)); Assert.Less(0, secDB.Compare(dbt1, dbt2)); secDB.Close(); db.Close(); }
protected Repository(string databaseName, uint tableSize, ILoggerFactory loggerService) { logger = loggerService.CreateLogger(databaseName); path = Environment.GetEnvironmentVariable("DATA_DIR"); var cfg = new HashDatabaseConfig { Creation = CreatePolicy.IF_NEEDED, CacheSize = new CacheInfo(1, 0, 1), ErrorFeedback = (prefix, message) => { logger.LogCritical($"{prefix}: {message}"); }, ErrorPrefix = databaseName, Duplicates = DuplicatesPolicy.UNSORTED, TableSize = tableSize }; db = HashDatabase.Open(Path.Combine(path, databaseName + ".db"), cfg); }
/// <summary> /// Initializes a new instance of the <see cref="AtomDictionary"/> class. /// </summary> /// <param name="databaseName">Name of the TripleT database this dictionary belongs to.</param> public AtomDictionary(string databaseName) { // // names for each of the files involved var nameStr2Long = String.Format("{0}.dict.str.dat", databaseName); var nameLong2Str = String.Format("{0}.dict.int.dat", databaseName); m_fileNextValue = String.Format("{0}.dict.dat", databaseName); // // we manually keep a file containing the value for our auto-incrementing index integer // assigned to new string values inserted into the dictionary. BerkeleyDB does not // support such a feature... if (File.Exists(m_fileNextValue)) { using (var sr = new BinaryReader(File.Open(m_fileNextValue, FileMode.Open, FileAccess.Read, FileShare.Read))) { m_next = sr.ReadInt64(); } } else { m_next = 1; } // // configuration for the dictionary databases. memory allocated for the caches is // hardcoded here. var config = new HashDatabaseConfig(); config.Duplicates = DuplicatesPolicy.NONE; config.CacheSize = new CacheInfo(0, 256 * 1024 * 1024, 4); config.PageSize = 512; config.Creation = CreatePolicy.IF_NEEDED; // // opening the databases... m_dbStr2Long = HashDatabase.Open(nameStr2Long, config); m_dbLong2Str = HashDatabase.Open(nameLong2Str, config); }
public void TestOpenExistingHashDB() { testName = "TestOpenExistingHashDB"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig hashConfig = new HashDatabaseConfig(); hashConfig.Creation = CreatePolicy.ALWAYS; HashDatabase hashDB = HashDatabase.Open(dbFileName, hashConfig); hashDB.Close(); DatabaseConfig dbConfig = new DatabaseConfig(); Database db = Database.Open(dbFileName, dbConfig); Assert.AreEqual(db.Type, DatabaseType.HASH); db.Close(); }
/* * Configure and open hash database for documents. * Each document has a unique ID and is stored in the database with hash key the document's ID and value the document. */ public DocumentsCatalogue(DatabaseEnvironment env) { /* Configure the database. */ var hashDatabaseConfig = new HashDatabaseConfig { Duplicates = DuplicatesPolicy.NONE, Creation = CreatePolicy.IF_NEEDED, FreeThreaded = true, }; /* Create the database if does not already exist and open the database file. */ try { hashDatabase = HashDatabase.Open("documents_catalogue.db", hashDatabaseConfig); urlDatabase = HashDatabase.Open("visited_urls.db", hashDatabaseConfig); } catch (Exception e) { Console.WriteLine(e.Message); return; } }
/// <summary> /// Initializes a new instance of the <see cref="Index"/> class. /// </summary> /// <param name="databaseName">Name of the database this index belongs to.</param> /// <param name="pBucket">The p bucket belonging to this database.</param> /// <param name="oBucket">The o bucket belonging to this database.</param> /// <param name="sBucket">The s bucket belonging to this database.</param> public Index(string databaseName, Bucket sBucket, Bucket pBucket, Bucket oBucket) { var nameDb = String.Format("{0}.index.dat", databaseName); m_sBucket = sBucket; m_pBucket = pBucket; m_oBucket = oBucket; // // the index is stored in a BerkeleyDB hash database. configuration for this database // is hardcoded here. var config = new HashDatabaseConfig(); config.Duplicates = DuplicatesPolicy.NONE; config.CacheSize = new CacheInfo(0, 256 * 1024 * 1024, 4); config.PageSize = 512; config.Creation = CreatePolicy.IF_NEEDED; // // open the index database m_db = HashDatabase.Open(nameDb, config); }
public void StatsInTxn(string home, string name, bool ifIsolation) { DatabaseEnvironmentConfig envConfig = new DatabaseEnvironmentConfig(); EnvConfigCase1(envConfig); DatabaseEnvironment env = DatabaseEnvironment.Open(home, envConfig); Transaction openTxn = env.BeginTransaction(); HashDatabaseConfig dbConfig = new HashDatabaseConfig(); ConfigCase1(dbConfig); dbConfig.Env = env; HashDatabase db = HashDatabase.Open(name + ".db", dbConfig, openTxn); openTxn.Commit(); Transaction statsTxn = env.BeginTransaction(); HashStats stats; HashStats fastStats; if (ifIsolation == false) { stats = db.Stats(statsTxn); fastStats = db.Stats(statsTxn); } else { stats = db.Stats(statsTxn, Isolation.DEGREE_ONE); fastStats = db.Stats(statsTxn, Isolation.DEGREE_ONE); } ConfirmStatsPart1Case1(stats); // Put 100 records into the database. PutRecordCase1(db, statsTxn); if (ifIsolation == false) { stats = db.Stats(statsTxn); } else { stats = db.Stats(statsTxn, Isolation.DEGREE_TWO); } ConfirmStatsPart2Case1(stats); // Delete some data to get some free pages. byte[] bigArray = new byte[262144]; db.Delete(new DatabaseEntry(bigArray), statsTxn); if (ifIsolation == false) { stats = db.Stats(statsTxn); } else { stats = db.Stats(statsTxn, Isolation.DEGREE_THREE); } ConfirmStatsPart3Case1(stats); statsTxn.Commit(); db.Close(); env.Close(); }
public void TestCompactWithoutTxn() { int i, nRecs; nRecs = 10000; testName = "TestCompactWithoutTxn"; SetUpTest(true); string hashDBFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig hashDBConfig = new HashDatabaseConfig(); hashDBConfig.Creation = CreatePolicy.ALWAYS; // The minimum page size hashDBConfig.PageSize = 512; hashDBConfig.HashComparison = new EntryComparisonDelegate(dbIntCompare); using (HashDatabase hashDB = HashDatabase.Open( hashDBFileName, hashDBConfig)) { DatabaseEntry key; DatabaseEntry data; // Fill the database with entries from 0 to 9999 for (i = 0; i < nRecs; i++) { key = new DatabaseEntry(BitConverter.GetBytes(i)); data = new DatabaseEntry(BitConverter.GetBytes(i)); hashDB.Put(key, data); } /* * Delete entries below 500, between 3000 and * 5000 and above 7000 */ for (i = 0; i < nRecs; i++) { if (i < 500 || i > 7000 || (i < 5000 && i > 3000)) { key = new DatabaseEntry(BitConverter.GetBytes(i)); hashDB.Delete(key); } } hashDB.Sync(); long fileSize = new FileInfo(hashDBFileName).Length; // Compact database CompactConfig cCfg = new CompactConfig(); cCfg.FillPercentage = 30; cCfg.Pages = 10; cCfg.Timeout = 1000; cCfg.TruncatePages = true; cCfg.start = new DatabaseEntry(BitConverter.GetBytes(1)); cCfg.stop = new DatabaseEntry(BitConverter.GetBytes(7000)); CompactData compactData = hashDB.Compact(cCfg); Assert.IsFalse((compactData.Deadlocks == 0) && (compactData.Levels == 0) && (compactData.PagesExamined == 0) && (compactData.PagesFreed == 0) && (compactData.PagesTruncated == 0)); hashDB.Sync(); long compactedFileSize = new FileInfo(hashDBFileName).Length; Assert.Less(compactedFileSize, fileSize); } }
/* * Test the external file database with or without environment. * 1. Config and open the environment; * 2. Verify the environment external file configs; * 3. Config and open the database; * 4. Verify the database external file configs; * 5. Insert and verify some external file data by database methods; * 6. Insert some external file data by cursor, update it and verify * the update by database stream and cursor; * 7. Verify the stats; * 8. Close all handles. * If "blobdbt" is true, set the data DatabaseEntry.ExternalFile as * true, otherwise make the data DatabaseEntry reach the external file * threshold in size. */ void TestBlobHashDatabase(uint env_threshold, string env_blobdir, uint db_threshold, string db_blobdir, bool blobdbt) { if (env_threshold == 0 && db_threshold == 0) { return; } string hashDBName = testHome + "/" + testName + ".db"; Configuration.ClearDir(testHome); HashDatabaseConfig cfg = new HashDatabaseConfig(); cfg.Creation = CreatePolicy.ALWAYS; string blrootdir = "__db_bl"; // Open the environment and verify the external file config. if (env_threshold > 0) { DatabaseEnvironmentConfig envConfig = new DatabaseEnvironmentConfig(); envConfig.AutoCommit = true; envConfig.Create = true; envConfig.UseMPool = true; envConfig.UseLogging = true; envConfig.UseTxns = true; envConfig.UseLocking = true; envConfig.ExternalFileThreshold = env_threshold; if (env_blobdir != null) { envConfig.ExternalFileDir = env_blobdir; blrootdir = env_blobdir; } DatabaseEnvironment env = DatabaseEnvironment.Open( testHome, envConfig); if (env_blobdir == null) { Assert.IsNull(env.ExternalFileDir); } else { Assert.AreEqual(0, env.ExternalFileDir.CompareTo(env_blobdir)); } Assert.AreEqual(env_threshold, env.ExternalFileThreshold); cfg.Env = env; hashDBName = testName + ".db"; } // Open the database and verify the external file config. if (db_threshold > 0) { cfg.ExternalFileThreshold = db_threshold; } if (db_blobdir != null) { cfg.ExternalFileDir = db_blobdir; /* * The external file directory setting in the database * is effective only when it is opened without * an environment. */ if (cfg.Env == null) { blrootdir = db_blobdir; } } HashDatabase db = HashDatabase.Open(hashDBName, cfg); Assert.AreEqual( db_threshold > 0 ? db_threshold : env_threshold, db.ExternalFileThreshold); if (db_blobdir == null && cfg.Env == null) { Assert.IsNull(db.ExternalFileDir); } else { Assert.AreEqual(0, db.ExternalFileDir.CompareTo(blrootdir)); } // Insert and verify some external file data by database // methods. string[] records = { "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p","q","r", "s", "t", "u", "v", "w", "x", "y", "z" }; DatabaseEntry kdbt = new DatabaseEntry(); DatabaseEntry ddbt = new DatabaseEntry(); byte[] kdata, ddata; string str; KeyValuePair <DatabaseEntry, DatabaseEntry> pair; ddbt.ExternalFile = blobdbt; Assert.AreEqual(blobdbt, ddbt.ExternalFile); for (int i = 0; i < records.Length; i++) { kdata = BitConverter.GetBytes(i); str = records[i]; if (!blobdbt) { for (int j = 0; j < db_threshold; j++) { str = str + records[i]; } } ddata = Encoding.ASCII.GetBytes(str); kdbt.Data = kdata; ddbt.Data = ddata; db.Put(kdbt, ddbt); try { pair = db.Get(kdbt); } catch (DatabaseException) { db.Close(); if (cfg.Env != null) { cfg.Env.Close(); } throw new TestException(); } Assert.AreEqual(ddata, pair.Value.Data); } /* * Insert some external file data by cursor, update it and * verify the update by database stream. */ kdata = BitConverter.GetBytes(records.Length); ddata = Encoding.ASCII.GetBytes("abc"); kdbt.Data = kdata; ddbt.Data = ddata; ddbt.ExternalFile = true; Assert.IsTrue(ddbt.ExternalFile); pair = new KeyValuePair <DatabaseEntry, DatabaseEntry>(kdbt, ddbt); CursorConfig dbcConfig = new CursorConfig(); Transaction txn = null; if (cfg.Env != null) { txn = cfg.Env.BeginTransaction(); } HashCursor cursor = db.Cursor(dbcConfig, txn); cursor.Add(pair); DatabaseStreamConfig dbsc = new DatabaseStreamConfig(); dbsc.SyncPerWrite = true; DatabaseStream dbs = cursor.DbStream(dbsc); Assert.AreNotEqual(null, dbs); Assert.IsFalse(dbs.GetConfig.ReadOnly); Assert.IsTrue(dbs.GetConfig.SyncPerWrite); Assert.AreEqual(3, dbs.Size()); DatabaseEntry sdbt = dbs.Read(0, 3); Assert.IsNotNull(sdbt); Assert.AreEqual(ddata, sdbt.Data); sdbt = new DatabaseEntry(Encoding.ASCII.GetBytes("defg")); Assert.IsTrue(dbs.Write(sdbt, 3)); Assert.AreEqual(7, dbs.Size()); sdbt = dbs.Read(0, 7); Assert.IsNotNull(sdbt); Assert.AreEqual(Encoding.ASCII.GetBytes("abcdefg"), sdbt.Data); dbs.Close(); /* * Verify the database stream can not write when it is * configured to be read-only. */ dbsc.ReadOnly = true; dbs = cursor.DbStream(dbsc); Assert.IsTrue(dbs.GetConfig.ReadOnly); try { Assert.IsFalse(dbs.Write(sdbt, 7)); throw new TestException(); } catch (DatabaseException) { } dbs.Close(); // Verify the update by cursor. Assert.IsTrue(cursor.Move(kdbt, true)); pair = cursor.Current; Assert.AreEqual(Encoding.ASCII.GetBytes("abcdefg"), pair.Value.Data); cursor.Close(); if (cfg.Env != null) { txn.Commit(); } /* * Verify the external files are created in the expected * location. * This part of test is disabled since BTreeDatabase.BlobSubDir * is not exposed to users. */ //if (cfg.Env != null) // blrootdir = testHome + "/" + blrootdir; //string blobdir = blrootdir + "/" + db.BlobSubDir; //Assert.AreEqual(records.Length + 1, // Directory.GetFiles(blobdir, "__db.bl*").Length); //Assert.AreEqual(1, // Directory.GetFiles(blobdir, "__db_blob_meta.db").Length); // Verify the stats. HashStats st = db.Stats(); Assert.AreEqual(records.Length + 1, st.nExternalFiles); // Close all handles. db.Close(); if (cfg.Env != null) { cfg.Env.Close(); } /* * Remove the default external file directory * when it is not under the test home. */ if (db_blobdir == null && cfg.Env == null) { Directory.Delete("__db_bl", true); } }
public void TestForeignKeyDelete(DatabaseType dbtype, ForeignKeyDeleteAction action) { SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; string fdbFileName = testHome + "/" + testName + "foreign.db"; string sdbFileName = testHome + "/" + testName + "sec.db"; Database primaryDB, fdb; SecondaryDatabase secDB; // Open primary database. if (dbtype == DatabaseType.BTREE) { BTreeDatabaseConfig btConfig = new BTreeDatabaseConfig(); btConfig.Creation = CreatePolicy.ALWAYS; primaryDB = BTreeDatabase.Open(dbFileName, btConfig); fdb = BTreeDatabase.Open(fdbFileName, btConfig); } else if (dbtype == DatabaseType.HASH) { HashDatabaseConfig hConfig = new HashDatabaseConfig(); hConfig.Creation = CreatePolicy.ALWAYS; primaryDB = HashDatabase.Open(dbFileName, hConfig); fdb = HashDatabase.Open(fdbFileName, hConfig); } else if (dbtype == DatabaseType.QUEUE) { QueueDatabaseConfig qConfig = new QueueDatabaseConfig(); qConfig.Creation = CreatePolicy.ALWAYS; qConfig.Length = 4; primaryDB = QueueDatabase.Open(dbFileName, qConfig); fdb = QueueDatabase.Open(fdbFileName, qConfig); } else if (dbtype == DatabaseType.RECNO) { RecnoDatabaseConfig rConfig = new RecnoDatabaseConfig(); rConfig.Creation = CreatePolicy.ALWAYS; primaryDB = RecnoDatabase.Open(dbFileName, rConfig); fdb = RecnoDatabase.Open(fdbFileName, rConfig); } else { throw new ArgumentException("Invalid DatabaseType"); } // Open secondary database. if (dbtype == DatabaseType.BTREE) { SecondaryBTreeDatabaseConfig secbtConfig = new SecondaryBTreeDatabaseConfig(primaryDB, new SecondaryKeyGenDelegate(SecondaryKeyGen)); secbtConfig.Creation = CreatePolicy.ALWAYS; secbtConfig.Duplicates = DuplicatesPolicy.SORTED; if (action == ForeignKeyDeleteAction.NULLIFY) { secbtConfig.SetForeignKeyConstraint(fdb, action, new ForeignKeyNullifyDelegate(Nullify)); } else { secbtConfig.SetForeignKeyConstraint(fdb, action); } secDB = SecondaryBTreeDatabase.Open(sdbFileName, secbtConfig); } else if (dbtype == DatabaseType.HASH) { SecondaryHashDatabaseConfig sechConfig = new SecondaryHashDatabaseConfig(primaryDB, new SecondaryKeyGenDelegate(SecondaryKeyGen)); sechConfig.Creation = CreatePolicy.ALWAYS; sechConfig.Duplicates = DuplicatesPolicy.SORTED; if (action == ForeignKeyDeleteAction.NULLIFY) { sechConfig.SetForeignKeyConstraint(fdb, action, new ForeignKeyNullifyDelegate(Nullify)); } else { sechConfig.SetForeignKeyConstraint(fdb, action); } secDB = SecondaryHashDatabase.Open(sdbFileName, sechConfig); } else if (dbtype == DatabaseType.QUEUE) { SecondaryQueueDatabaseConfig secqConfig = new SecondaryQueueDatabaseConfig(primaryDB, new SecondaryKeyGenDelegate(SecondaryKeyGen)); secqConfig.Creation = CreatePolicy.ALWAYS; secqConfig.Length = 4; if (action == ForeignKeyDeleteAction.NULLIFY) { secqConfig.SetForeignKeyConstraint(fdb, action, new ForeignKeyNullifyDelegate(Nullify)); } else { secqConfig.SetForeignKeyConstraint(fdb, action); } secDB = SecondaryQueueDatabase.Open(sdbFileName, secqConfig); } else if (dbtype == DatabaseType.RECNO) { SecondaryRecnoDatabaseConfig secrConfig = new SecondaryRecnoDatabaseConfig(primaryDB, new SecondaryKeyGenDelegate(SecondaryKeyGen)); secrConfig.Creation = CreatePolicy.ALWAYS; if (action == ForeignKeyDeleteAction.NULLIFY) { secrConfig.SetForeignKeyConstraint(fdb, action, new ForeignKeyNullifyDelegate(Nullify)); } else { secrConfig.SetForeignKeyConstraint(fdb, action); } secDB = SecondaryRecnoDatabase.Open(sdbFileName, secrConfig); } else { throw new ArgumentException("Invalid DatabaseType"); } /* Use integer keys for Queue/Recno support. */ fdb.Put(new DatabaseEntry(BitConverter.GetBytes(100)), new DatabaseEntry(BitConverter.GetBytes(1001))); fdb.Put(new DatabaseEntry(BitConverter.GetBytes(200)), new DatabaseEntry(BitConverter.GetBytes(2002))); fdb.Put(new DatabaseEntry(BitConverter.GetBytes(300)), new DatabaseEntry(BitConverter.GetBytes(3003))); primaryDB.Put(new DatabaseEntry(BitConverter.GetBytes(1)), new DatabaseEntry(BitConverter.GetBytes(100))); primaryDB.Put(new DatabaseEntry(BitConverter.GetBytes(2)), new DatabaseEntry(BitConverter.GetBytes(200))); if (dbtype == DatabaseType.BTREE || dbtype == DatabaseType.HASH) { primaryDB.Put(new DatabaseEntry(BitConverter.GetBytes(3)), new DatabaseEntry(BitConverter.GetBytes(100))); } try { fdb.Delete(new DatabaseEntry(BitConverter.GetBytes(100))); } catch (ForeignConflictException) { Assert.AreEqual(action, ForeignKeyDeleteAction.ABORT); } if (action == ForeignKeyDeleteAction.ABORT) { Assert.IsTrue(secDB.Exists(new DatabaseEntry(BitConverter.GetBytes(100)))); Assert.IsTrue(primaryDB.Exists(new DatabaseEntry(BitConverter.GetBytes(1)))); Assert.IsTrue(fdb.Exists(new DatabaseEntry(BitConverter.GetBytes(100)))); } else if (action == ForeignKeyDeleteAction.CASCADE) { try { Assert.IsFalse(secDB.Exists(new DatabaseEntry(BitConverter.GetBytes(100)))); } catch (KeyEmptyException) { Assert.IsTrue(dbtype == DatabaseType.QUEUE || dbtype == DatabaseType.RECNO); } try { Assert.IsFalse(primaryDB.Exists(new DatabaseEntry(BitConverter.GetBytes(1)))); } catch (KeyEmptyException) { Assert.IsTrue(dbtype == DatabaseType.QUEUE || dbtype == DatabaseType.RECNO); } try { Assert.IsFalse(fdb.Exists(new DatabaseEntry(BitConverter.GetBytes(100)))); } catch (KeyEmptyException) { Assert.IsTrue(dbtype == DatabaseType.QUEUE || dbtype == DatabaseType.RECNO); } } else if (action == ForeignKeyDeleteAction.NULLIFY) { try { Assert.IsFalse(secDB.Exists(new DatabaseEntry(BitConverter.GetBytes(100)))); } catch (KeyEmptyException) { Assert.IsTrue(dbtype == DatabaseType.QUEUE || dbtype == DatabaseType.RECNO); } Assert.IsTrue(primaryDB.Exists(new DatabaseEntry(BitConverter.GetBytes(1)))); try { Assert.IsFalse(fdb.Exists(new DatabaseEntry(BitConverter.GetBytes(100)))); } catch (KeyEmptyException) { Assert.IsTrue(dbtype == DatabaseType.QUEUE || dbtype == DatabaseType.RECNO); } } // Close secondary database. secDB.Close(); // Close primary database. primaryDB.Close(); // Close foreign database fdb.Close(); }
private void DeleteMultipleAndMultipleKey(string dbFileName, string dbName, DatabaseType type, bool mulKey) { List <DatabaseEntry> kList = new List <DatabaseEntry>(); List <uint> rList = new List <uint>(); List <KeyValuePair <DatabaseEntry, DatabaseEntry> > pList = new List <KeyValuePair <DatabaseEntry, DatabaseEntry> >(); DatabaseEntry key; Database db; SecondaryDatabase secDb; Configuration.ClearDir(testHome); if (type == DatabaseType.BTREE) { BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; db = BTreeDatabase.Open( dbFileName, dbName, dbConfig); SecondaryBTreeDatabaseConfig secDbConfig = new SecondaryBTreeDatabaseConfig(db, null); secDbConfig.Creation = CreatePolicy.IF_NEEDED; secDbConfig.Duplicates = DuplicatesPolicy.SORTED; secDbConfig.KeyGen = new SecondaryKeyGenDelegate(SecondaryKeyGen); secDb = SecondaryBTreeDatabase.Open( dbFileName, dbName + "_sec", secDbConfig); } else if (type == DatabaseType.HASH) { HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; db = HashDatabase.Open( dbFileName, dbName, dbConfig); SecondaryHashDatabaseConfig secDbConfig = new SecondaryHashDatabaseConfig(db, null); secDbConfig.Creation = CreatePolicy.IF_NEEDED; secDbConfig.Duplicates = DuplicatesPolicy.SORTED; secDbConfig.KeyGen = new SecondaryKeyGenDelegate(SecondaryKeyGen); secDb = SecondaryHashDatabase.Open( dbFileName, dbName + "_sec", secDbConfig); } else if (type == DatabaseType.QUEUE) { QueueDatabaseConfig dbConfig = new QueueDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; dbConfig.Length = 4; db = QueueDatabase.Open(dbFileName, dbConfig); SecondaryQueueDatabaseConfig secDbConfig = new SecondaryQueueDatabaseConfig(db, null); secDbConfig.Creation = CreatePolicy.IF_NEEDED; secDbConfig.Length = 4; secDbConfig.KeyGen = new SecondaryKeyGenDelegate(SecondaryKeyGen); secDb = SecondaryQueueDatabase.Open( dbFileName + "_sec", secDbConfig); } else if (type == DatabaseType.RECNO) { RecnoDatabaseConfig dbConfig = new RecnoDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; db = RecnoDatabase.Open( dbFileName, dbName, dbConfig); SecondaryRecnoDatabaseConfig secDbConfig = new SecondaryRecnoDatabaseConfig(db, null); secDbConfig.Creation = CreatePolicy.IF_NEEDED; secDbConfig.KeyGen = new SecondaryKeyGenDelegate(SecondaryKeyGen); secDb = SecondaryRecnoDatabase.Open( dbFileName, dbName + "_sec", secDbConfig); } else { throw new TestException(); } for (uint i = 1; i <= 100; i++) { key = new DatabaseEntry( BitConverter.GetBytes(i)); if (i >= 50 && i < 60) { kList.Add(key); } else if (i > 80) { pList.Add(new KeyValuePair < DatabaseEntry, DatabaseEntry>( key, key)); } else if (type == DatabaseType.QUEUE || type == DatabaseType.RECNO) { rList.Add(i); } db.Put(key, key); } if (mulKey) { // Create bulk buffer for key/value pairs. MultipleKeyDatabaseEntry pBuff; if (type == DatabaseType.BTREE) { pBuff = new MultipleKeyDatabaseEntry( pList, false); } else if (type == DatabaseType.HASH) { pBuff = new MultipleKeyDatabaseEntry( pList, false); } else if (type == DatabaseType.QUEUE) { pBuff = new MultipleKeyDatabaseEntry( pList, true); } else { pBuff = new MultipleKeyDatabaseEntry( pList, true); } // Bulk delete with the key/value pair bulk buffer. secDb.Delete(pBuff); foreach (KeyValuePair <DatabaseEntry, DatabaseEntry> pair in pList) { try { db.GetBoth(pair.Key, pair.Value); throw new TestException(); } catch (NotFoundException e1) { if (type == DatabaseType.QUEUE) { throw e1; } } catch (KeyEmptyException e2) { if (type == DatabaseType.BTREE || type == DatabaseType.HASH || type == DatabaseType.RECNO) { throw e2; } } } /* * Dump the database to verify that 80 records * remain after bulk delete. */ Assert.AreEqual(80, db.Truncate()); } else { // Create bulk buffer for key. MultipleDatabaseEntry kBuff; if (type == DatabaseType.BTREE) { kBuff = new MultipleDatabaseEntry( kList, false); } else if (type == DatabaseType.HASH) { kBuff = new MultipleDatabaseEntry( kList, false); } else if (type == DatabaseType.QUEUE) { kBuff = new MultipleDatabaseEntry( kList, true); } else { kBuff = new MultipleDatabaseEntry( kList, true); } /* * Bulk delete in secondary database with key * buffer. Primary records that the deleted * records in secondar database should be * deleted as well. */ secDb.Delete(kBuff); foreach (DatabaseEntry dbt in kList) { try { db.Get(dbt); throw new TestException(); } catch (NotFoundException e1) { if (type == DatabaseType.QUEUE || type == DatabaseType.RECNO) { throw e1; } } catch (KeyEmptyException e2) { if (type == DatabaseType.BTREE || type == DatabaseType.HASH) { throw e2; } } } /* * Bulk delete in secondary database with recno * based key buffer. */ if (type == DatabaseType.QUEUE || type == DatabaseType.RECNO) { MultipleDatabaseEntry rBuff = new MultipleDatabaseEntry(rList); secDb.Delete(rBuff); Assert.AreEqual(20, db.Truncate()); } } secDb.Close(); db.Close(); }