public void TestPutNoDuplicateWithUnsortedDuplicate() { testName = "TestPutNoDuplicateWithUnsortedDuplicate"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig hashConfig = new HashDatabaseConfig(); hashConfig.Creation = CreatePolicy.ALWAYS; hashConfig.Duplicates = DuplicatesPolicy.UNSORTED; hashConfig.ErrorPrefix = testName; HashDatabase hashDB = HashDatabase.Open(dbFileName, hashConfig); DatabaseEntry key, data; key = new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("1")); data = new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("1")); try { hashDB.PutNoDuplicate(key, data); } catch (DatabaseException) { throw new ExpectedTestException(); } finally { hashDB.Close(); } }
public void TestHashComparison() { testName = "TestHashComparison"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; dbConfig.HashComparison = new EntryComparisonDelegate(EntryComparison); HashDatabase db = HashDatabase.Open(dbFileName, dbConfig); int ret; /* * Comparison gets the value that lowest byte of the * former dbt minus that of the latter one. */ ret = db.Compare(new DatabaseEntry(BitConverter.GetBytes(2)), new DatabaseEntry(BitConverter.GetBytes(2))); Assert.AreEqual(0, ret); ret = db.Compare(new DatabaseEntry(BitConverter.GetBytes(256)), new DatabaseEntry(BitConverter.GetBytes(1))); Assert.Greater(0, ret); db.Close(); }
public void TestKeyExistException() { testName = "TestKeyExistException"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig hashConfig = new HashDatabaseConfig(); hashConfig.Creation = CreatePolicy.ALWAYS; hashConfig.Duplicates = DuplicatesPolicy.SORTED; HashDatabase hashDB = HashDatabase.Open(dbFileName, hashConfig); // Put the same record into db twice. DatabaseEntry key, data; key = new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("1")); data = new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("1")); try { hashDB.PutNoDuplicate(key, data); hashDB.PutNoDuplicate(key, data); } catch (KeyExistException) { throw new ExpectedTestException(); } finally { hashDB.Close(); } }
protected HatUserFactory() { string path; try{ path = ConfigurationSettings.AppSettings["db_path"]; } catch (Exception ex) { throw new System.IO.FileNotFoundException("db_path setting not found", ex); } try{ var cfg = new HashDatabaseConfig(); cfg.Duplicates = DuplicatesPolicy.SORTED; cfg.ErrorPrefix = "HatUserFactoryError_"; cfg.Creation = CreatePolicy.IF_NEEDED; cfg.CacheSize = new CacheInfo(0, 64 * 1024, 1); cfg.PageSize = 8 * 1024; db = HashDatabase.Open(path, this.GetType().Name, cfg); } catch (DatabaseException ex) { Console.WriteLine(ex.Message); throw; } }
public void TestConfig() { testName = "TestConfig"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; XmlElement xmlElem = Configuration.TestSetUp( testFixtureName, testName); // Open a primary btree database. HashDatabaseConfig hashDBConfig = new HashDatabaseConfig(); hashDBConfig.Creation = CreatePolicy.IF_NEEDED; HashDatabase hashDB = HashDatabase.Open( dbFileName, hashDBConfig); SecondaryHashDatabaseConfig secDBConfig = new SecondaryHashDatabaseConfig(hashDB, null); Config(xmlElem, ref secDBConfig, true); Confirm(xmlElem, secDBConfig, true); // Close the primary btree database. hashDB.Close(); }
public void PutRecordCase1(HashDatabase db, Transaction txn) { byte[] bigArray = new byte[262144]; for (int i = 0; i < 50; i++) { if (txn == null) { db.Put(new DatabaseEntry(BitConverter.GetBytes(i)), new DatabaseEntry(BitConverter.GetBytes(i))); } else { db.Put(new DatabaseEntry(BitConverter.GetBytes(i)), new DatabaseEntry(BitConverter.GetBytes(i)), txn); } } for (int i = 50; i < 100; i++) { if (txn == null) { db.Put(new DatabaseEntry(bigArray), new DatabaseEntry(bigArray)); } else { db.Put(new DatabaseEntry(bigArray), new DatabaseEntry(bigArray), txn); } } }
public void TestStats() { testName = "TestStats"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig dbConfig = new HashDatabaseConfig(); ConfigCase1(dbConfig); HashDatabase db = HashDatabase.Open(dbFileName, dbConfig); HashStats stats = db.Stats(); HashStats fastStats = db.FastStats(); ConfirmStatsPart1Case1(stats); ConfirmStatsPart1Case1(fastStats); // Put 100 records into the database. PutRecordCase1(db, null); stats = db.Stats(); ConfirmStatsPart2Case1(stats); // Delete some data to get some free pages. byte[] bigArray = new byte[262144]; db.Delete(new DatabaseEntry(bigArray)); stats = db.Stats(); ConfirmStatsPart3Case1(stats); db.Close(); }
public void TestPutNoDuplicate() { testName = "TestPutNoDuplicate"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig hashConfig = new HashDatabaseConfig(); hashConfig.Creation = CreatePolicy.ALWAYS; hashConfig.Duplicates = DuplicatesPolicy.SORTED; hashConfig.TableSize = 20; HashDatabase hashDB = HashDatabase.Open(dbFileName, hashConfig); DatabaseEntry key, data; for (int i = 1; i <= 10; i++) { key = new DatabaseEntry(BitConverter.GetBytes(i)); data = new DatabaseEntry(BitConverter.GetBytes(i)); hashDB.PutNoDuplicate(key, data); } Assert.IsTrue(hashDB.Exists( new DatabaseEntry(BitConverter.GetBytes((int)5)))); hashDB.Close(); }
/* * Configure and open hash databases for inverted index. */ public InvertedIndex(DocumentsCatalogue documentsCatalogue, DatabaseEnvironment env) { this.documentsCatalogue = documentsCatalogue; this.env = env.env; /* Configure the database. */ var hashDatabaseConfig = new HashDatabaseConfig() { Duplicates = DuplicatesPolicy.NONE, Creation = CreatePolicy.IF_NEEDED, FreeThreaded = true, CacheSize = new CacheInfo(1, 0, 128), // Env = env.env, }; /* Create the database if does not already exist and open the database file. */ try { hashDatabase = HashDatabase.Open("inverted_index.db", hashDatabaseConfig); maxFreqDatabase = HashDatabase.Open("max_freq.db", hashDatabaseConfig); //Console.WriteLine("{0} open.", dbFileName); } catch (Exception e) { // Console.WriteLine("Error opening {0}.", dbFileName); Console.WriteLine(e.Message); return; } }
public void TestDuplicates() { testName = "TestDuplicates"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; string dbSecFileName = testHome + "/" + testName + "_sec.db"; // Open a primary hash database. HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.ALWAYS; HashDatabase db = HashDatabase.Open( dbFileName, dbConfig); // Open a secondary hash database. SecondaryHashDatabaseConfig secConfig = new SecondaryHashDatabaseConfig(null, null); secConfig.Primary = db; secConfig.Duplicates = DuplicatesPolicy.SORTED; secConfig.Creation = CreatePolicy.IF_NEEDED; SecondaryHashDatabase secDB = SecondaryHashDatabase.Open( dbSecFileName, secConfig); // Confirm the duplicate in opened secondary database. Assert.AreEqual(DuplicatesPolicy.SORTED, secDB.Duplicates); secDB.Close(); db.Close(); }
public void GetHashDBAndCursor(string home, string name, out HashDatabase db, out HashCursor cursor) { string dbFileName = home + "/" + name + ".db"; HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; db = HashDatabase.Open(dbFileName, dbConfig); cursor = db.Cursor(); }
public void TestMessageFile() { testName = "TestMessageFile"; SetUpTest(true); // Configure and open an environment. DatabaseEnvironmentConfig envConfig = new DatabaseEnvironmentConfig(); envConfig.Create = true; envConfig.UseMPool = true; DatabaseEnvironment env = DatabaseEnvironment.Open( testHome, envConfig); // Configure and open a database. HashDatabaseConfig DBConfig = new HashDatabaseConfig(); DBConfig.Env = env; DBConfig.Creation = CreatePolicy.IF_NEEDED; string DBFileName = testName + ".db"; HashDatabase db = HashDatabase.Open(DBFileName, DBConfig); // Confirm message file does not exist. string messageFile = testHome + "/" + "msgfile"; Assert.AreEqual(false, File.Exists(messageFile)); // Call set_msgfile() of db. db.Msgfile = messageFile; // Print db statistic to message file. db.PrintStats(true); // Confirm message file exists now. Assert.AreEqual(true, File.Exists(messageFile)); db.Msgfile = ""; string line = null; // Read the third line of message file. System.IO.StreamReader file = new System.IO.StreamReader(@"" + messageFile); line = file.ReadLine(); line = file.ReadLine(); line = file.ReadLine(); // Confirm the message file is not empty. Assert.AreEqual(line, "DB handle information:"); file.Close(); // Close database and environment. db.Close(); env.Close(); }
public void TestInsertToLoc() { HashDatabase db; HashDatabaseConfig dbConfig; HashCursor cursor; DatabaseEntry data; KeyValuePair <DatabaseEntry, DatabaseEntry> pair; string dbFileName; testName = "TestInsertToLoc"; testHome = testFixtureHome + "/" + testName; dbFileName = testHome + "/" + testName + ".db"; Configuration.ClearDir(testHome); // Open database and cursor. dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; /* * The database should be set to be unsorted to * insert before/after a certain record. */ dbConfig.Duplicates = DuplicatesPolicy.UNSORTED; db = HashDatabase.Open(dbFileName, dbConfig); cursor = db.Cursor(); // Add record("key", "data") into database. AddOneByCursor(cursor); /* * Insert the new record("key","data1") after the * record("key", "data"). */ data = new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("data1")); cursor.Insert(data, Cursor.InsertLocation.AFTER); /* * Move the cursor to the record("key", "data") and * confirm that the next record is the one just inserted. */ pair = new KeyValuePair <DatabaseEntry, DatabaseEntry>( new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("key")), new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("data"))); Assert.IsTrue(cursor.Move(pair, true)); Assert.IsTrue(cursor.MoveNext()); Assert.AreEqual(ASCIIEncoding.ASCII.GetBytes("key"), cursor.Current.Key.Data); Assert.AreEqual(ASCIIEncoding.ASCII.GetBytes("data1"), cursor.Current.Value.Data); cursor.Close(); db.Close(); }
/// <summary> /// 创建数据库 /// </summary> /// <param name="config"></param> private void CreateDB(DatabaseConfig config) { string dbFile = Path.Combine(home, dbName + dbFileEx); db_File = dbFile = dbName + dbFileEx; dbFile = Path.Combine(Environment.CurrentDirectory, db_File); switch (DBType) { case DBType.BTree: case DBType.Sequence: { BTreeDatabaseConfig dbcfg = config as BTreeDatabaseConfig; if (DBType == DBType.Sequence) { dbcfg.Duplicates = DuplicatesPolicy.NONE; } db = BTreeDatabase.Open(db_File, dbName, dbcfg); if (dbcfg.Duplicates != DuplicatesPolicy.SORTED) { /* Configure and initialize sequence. */ seqConfig = new SequenceConfig { BackingDatabase = db, Creation = CreatePolicy.IF_NEEDED, Increment = true, InitialValue = Int64.MaxValue, key = new DatabaseEntry() }; seqConfig.SetRange(Int64.MinValue, Int64.MaxValue); seqConfig.Wrap = true; DbtFromString(seqConfig.key, "excs_sequence"); seq = new Sequence(seqConfig); } } break; case DBType.Hash: db = HashDatabase.Open(dbFile, config as HashDatabaseConfig); break; case DBType.Recno: db = RecnoDatabase.Open(dbFile, config as RecnoDatabaseConfig); break; case DBType.Queue: db = QueueDatabase.Open(dbFile, config as QueueDatabaseConfig); break; default: db = BTreeDatabase.Open(dbFile, config as BTreeDatabaseConfig); break; } }
static void Main(string[] args) { try { var cfg = new HashDatabaseConfig(); cfg.Duplicates = DuplicatesPolicy.UNSORTED; cfg.Creation = CreatePolicy.IF_NEEDED; cfg.CacheSize = new CacheInfo(0, 64 * 1024, 1); cfg.PageSize = 8 * 1024; Database db = HashDatabase.Open("d:\\test.db", "hat_db", cfg); Console.WriteLine("db opened"); var key = new DatabaseEntry(); var data = new DatabaseEntry(); key.Data = System.Text.Encoding.ASCII.GetBytes("key1"); data.Data = System.Text.Encoding.ASCII.GetBytes("val1"); try { db.Put(key, data); db.Put(key, data); } catch (Exception ex) { Console.WriteLine(ex.Message); } using (var dbc = db.Cursor()) { System.Text.ASCIIEncoding decode = new ASCIIEncoding(); /* Walk through the database and print out key/data pairs. */ Console.WriteLine("All key : data pairs:"); foreach (KeyValuePair <DatabaseEntry, DatabaseEntry> p in dbc) { Console.WriteLine("{0}::{1}", decode.GetString(p.Key.Data), decode.GetString(p.Value.Data)); } } db.Close(); Console.WriteLine("db closed"); } catch (Exception ex) { Console.WriteLine(ex.Message); } Console.ReadLine(); }
public void TestOpenNewHashDB() { testName = "TestOpenNewHashDB"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; XmlElement xmlElem = Configuration.TestSetUp(testFixtureName, testName); HashDatabaseConfig hashConfig = new HashDatabaseConfig(); HashDatabaseConfigTest.Config(xmlElem, ref hashConfig, true); HashDatabase hashDB = HashDatabase.Open(dbFileName, hashConfig); Confirm(xmlElem, hashDB, true); hashDB.Close(); }
public void TestAddUnique() { HashDatabase db; HashCursor cursor; KeyValuePair <DatabaseEntry, DatabaseEntry> pair; testName = "TestAddUnique"; testHome = testFixtureHome + "/" + testName; Configuration.ClearDir(testHome); // Open a database and cursor. HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; /* * To put no duplicate data, the database should be * set to be sorted. */ dbConfig.Duplicates = DuplicatesPolicy.SORTED; db = HashDatabase.Open( testHome + "/" + testName + ".db", dbConfig); cursor = db.Cursor(); // Add record("key", "data") into database. AddOneByCursor(cursor); /* * Fail to add duplicate record("key","data"). */ pair = new KeyValuePair <DatabaseEntry, DatabaseEntry>( new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("key")), new DatabaseEntry(ASCIIEncoding.ASCII.GetBytes("data"))); try { cursor.AddUnique(pair); } catch (KeyExistException) { } finally { cursor.Close(); db.Close(); } }
public void TestHashFunction() { testName = "TestHashFunction"; testHome = testFixtureHome + "/" + testName; string dbFileName = testHome + "/" + testName + ".db"; string dbSecFileName = testHome + "/" + testName + "_sec.db"; Configuration.ClearDir(testHome); // Open a primary hash database. HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; HashDatabase hashDB = HashDatabase.Open( dbFileName, dbConfig); /* * Define hash function and open a secondary * hash database. */ SecondaryHashDatabaseConfig secDBConfig = new SecondaryHashDatabaseConfig(hashDB, null); secDBConfig.HashFunction = new HashFunctionDelegate(HashFunction); secDBConfig.Creation = CreatePolicy.IF_NEEDED; SecondaryHashDatabase secDB = SecondaryHashDatabase.Open(dbSecFileName, secDBConfig); /* * Confirm the hash function defined in the configuration. * Call the hash function and the one from secondary * database. If they return the same value, then the hash * function is configured successfully. */ uint data = secDB.HashFunction(BitConverter.GetBytes(1)); Assert.AreEqual(0, data); // Close all. secDB.Close(); hashDB.Close(); }
public void TestPutNoDuplicateWithTxn() { testName = "TestPutNoDuplicateWithTxn"; testHome = testFixtureHome + "/" + testName; Configuration.ClearDir(testHome); // Open an environment. DatabaseEnvironmentConfig envConfig = new DatabaseEnvironmentConfig(); envConfig.Create = true; envConfig.UseLogging = true; envConfig.UseMPool = true; envConfig.UseTxns = true; DatabaseEnvironment env = DatabaseEnvironment.Open( testHome, envConfig); // Open a hash database within a transaction. Transaction txn = env.BeginTransaction(); HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; dbConfig.Duplicates = DuplicatesPolicy.SORTED; dbConfig.Env = env; HashDatabase db = HashDatabase.Open(testName + ".db", dbConfig, txn); DatabaseEntry dbt = new DatabaseEntry(BitConverter.GetBytes((int)100)); db.PutNoDuplicate(dbt, dbt, txn); try { db.PutNoDuplicate(dbt, dbt, txn); } catch (KeyExistException) { throw new ExpectedTestException(); } finally { // Close all. db.Close(); txn.Commit(); env.Close(); } }
public void TestHashFunction() { testName = "TestHashFunction"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; dbConfig.HashFunction = new HashFunctionDelegate(HashFunction); HashDatabase db = HashDatabase.Open(dbFileName, dbConfig); // Hash function will change the lowest byte to 0; uint data = db.HashFunction(BitConverter.GetBytes(1)); Assert.AreEqual(0, data); db.Close(); }
public static void Confirm(XmlElement xmlElem, HashDatabase hashDB, bool compulsory) { DatabaseTest.Confirm(xmlElem, hashDB, compulsory); Configuration.ConfirmCreatePolicy(xmlElem, "Creation", hashDB.Creation, compulsory); Configuration.ConfirmDuplicatesPolicy(xmlElem, "Duplicates", hashDB.Duplicates, compulsory); Configuration.ConfirmUint(xmlElem, "FillFactor", hashDB.FillFactor, compulsory); Configuration.ConfirmUint(xmlElem, "NumElements", hashDB.TableSize, compulsory); Assert.AreEqual(DatabaseType.HASH, hashDB.Type); string type = hashDB.Type.ToString(); Assert.IsNotNull(type); }
public void TestCompare() { testName = "TestCompare"; testHome = testFixtureHome + "/" + testName; string dbFileName = testHome + "/" + testName + ".db"; string dbSecFileName = testHome + "/" + testName + "_sec.db"; Configuration.ClearDir(testHome); // Open a primary hash database. HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.ALWAYS; HashDatabase db = HashDatabase.Open( dbFileName, dbConfig); // Open a secondary hash database. SecondaryHashDatabaseConfig secConfig = new SecondaryHashDatabaseConfig(null, null); secConfig.Creation = CreatePolicy.IF_NEEDED; secConfig.Primary = db; secConfig.Compare = new EntryComparisonDelegate(SecondaryEntryComparison); SecondaryHashDatabase secDB = SecondaryHashDatabase.Open(dbSecFileName, secConfig); /* * Get the compare function set in the configuration * and run it in a comparison to see if it is alright. */ DatabaseEntry dbt1, dbt2; dbt1 = new DatabaseEntry( BitConverter.GetBytes((int)257)); dbt2 = new DatabaseEntry( BitConverter.GetBytes((int)255)); Assert.Less(0, secDB.Compare(dbt1, dbt2)); secDB.Close(); db.Close(); }
protected Repository(string databaseName, uint tableSize, ILoggerFactory loggerService) { logger = loggerService.CreateLogger(databaseName); path = Environment.GetEnvironmentVariable("DATA_DIR"); var cfg = new HashDatabaseConfig { Creation = CreatePolicy.IF_NEEDED, CacheSize = new CacheInfo(1, 0, 1), ErrorFeedback = (prefix, message) => { logger.LogCritical($"{prefix}: {message}"); }, ErrorPrefix = databaseName, Duplicates = DuplicatesPolicy.UNSORTED, TableSize = tableSize }; db = HashDatabase.Open(Path.Combine(path, databaseName + ".db"), cfg); }
/// <summary> /// Initializes a new instance of the <see cref="AtomDictionary"/> class. /// </summary> /// <param name="databaseName">Name of the TripleT database this dictionary belongs to.</param> public AtomDictionary(string databaseName) { // // names for each of the files involved var nameStr2Long = String.Format("{0}.dict.str.dat", databaseName); var nameLong2Str = String.Format("{0}.dict.int.dat", databaseName); m_fileNextValue = String.Format("{0}.dict.dat", databaseName); // // we manually keep a file containing the value for our auto-incrementing index integer // assigned to new string values inserted into the dictionary. BerkeleyDB does not // support such a feature... if (File.Exists(m_fileNextValue)) { using (var sr = new BinaryReader(File.Open(m_fileNextValue, FileMode.Open, FileAccess.Read, FileShare.Read))) { m_next = sr.ReadInt64(); } } else { m_next = 1; } // // configuration for the dictionary databases. memory allocated for the caches is // hardcoded here. var config = new HashDatabaseConfig(); config.Duplicates = DuplicatesPolicy.NONE; config.CacheSize = new CacheInfo(0, 256 * 1024 * 1024, 4); config.PageSize = 512; config.Creation = CreatePolicy.IF_NEEDED; // // opening the databases... m_dbStr2Long = HashDatabase.Open(nameStr2Long, config); m_dbLong2Str = HashDatabase.Open(nameLong2Str, config); }
public void TestOpenExistingHashDB() { testName = "TestOpenExistingHashDB"; SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig hashConfig = new HashDatabaseConfig(); hashConfig.Creation = CreatePolicy.ALWAYS; HashDatabase hashDB = HashDatabase.Open(dbFileName, hashConfig); hashDB.Close(); DatabaseConfig dbConfig = new DatabaseConfig(); Database db = Database.Open(dbFileName, dbConfig); Assert.AreEqual(db.Type, DatabaseType.HASH); db.Close(); }
/* * Configure and open hash database for documents. * Each document has a unique ID and is stored in the database with hash key the document's ID and value the document. */ public DocumentsCatalogue(DatabaseEnvironment env) { /* Configure the database. */ var hashDatabaseConfig = new HashDatabaseConfig { Duplicates = DuplicatesPolicy.NONE, Creation = CreatePolicy.IF_NEEDED, FreeThreaded = true, }; /* Create the database if does not already exist and open the database file. */ try { hashDatabase = HashDatabase.Open("documents_catalogue.db", hashDatabaseConfig); urlDatabase = HashDatabase.Open("visited_urls.db", hashDatabaseConfig); } catch (Exception e) { Console.WriteLine(e.Message); return; } }
/// <summary> /// Initializes a new instance of the <see cref="Index"/> class. /// </summary> /// <param name="databaseName">Name of the database this index belongs to.</param> /// <param name="pBucket">The p bucket belonging to this database.</param> /// <param name="oBucket">The o bucket belonging to this database.</param> /// <param name="sBucket">The s bucket belonging to this database.</param> public Index(string databaseName, Bucket sBucket, Bucket pBucket, Bucket oBucket) { var nameDb = String.Format("{0}.index.dat", databaseName); m_sBucket = sBucket; m_pBucket = pBucket; m_oBucket = oBucket; // // the index is stored in a BerkeleyDB hash database. configuration for this database // is hardcoded here. var config = new HashDatabaseConfig(); config.Duplicates = DuplicatesPolicy.NONE; config.CacheSize = new CacheInfo(0, 256 * 1024 * 1024, 4); config.PageSize = 512; config.Creation = CreatePolicy.IF_NEEDED; // // open the index database m_db = HashDatabase.Open(nameDb, config); }
public void EnsureIndex() { Name = registeredIndexer.IndexName; config = environment.IndexDatabaseConfigForTypes.ContainsKey(registeredIndexer.YieldType) ? environment.IndexDatabaseConfigForTypes[registeredIndexer.YieldType] : environment.IndexDatabaseConfigForTypes[typeof(object)]; //Before we open (with create if needed). var recompileRequired = RecompileIsRequired(); index = BTreeDatabase.Open( getIndexFilename(), config); reverseIndex = HashDatabase.Open( getReverseIndexFilename(), environment.ReverseIndexDatabaseConfig); if (recompileRequired) BuildIndexFromGraphs(); else indexCompiled.Set(); }
public void StatsInTxn(string home, string name, bool ifIsolation) { DatabaseEnvironmentConfig envConfig = new DatabaseEnvironmentConfig(); EnvConfigCase1(envConfig); DatabaseEnvironment env = DatabaseEnvironment.Open(home, envConfig); Transaction openTxn = env.BeginTransaction(); HashDatabaseConfig dbConfig = new HashDatabaseConfig(); ConfigCase1(dbConfig); dbConfig.Env = env; HashDatabase db = HashDatabase.Open(name + ".db", dbConfig, openTxn); openTxn.Commit(); Transaction statsTxn = env.BeginTransaction(); HashStats stats; HashStats fastStats; if (ifIsolation == false) { stats = db.Stats(statsTxn); fastStats = db.Stats(statsTxn); } else { stats = db.Stats(statsTxn, Isolation.DEGREE_ONE); fastStats = db.Stats(statsTxn, Isolation.DEGREE_ONE); } ConfirmStatsPart1Case1(stats); // Put 100 records into the database. PutRecordCase1(db, statsTxn); if (ifIsolation == false) { stats = db.Stats(statsTxn); } else { stats = db.Stats(statsTxn, Isolation.DEGREE_TWO); } ConfirmStatsPart2Case1(stats); // Delete some data to get some free pages. byte[] bigArray = new byte[262144]; db.Delete(new DatabaseEntry(bigArray), statsTxn); if (ifIsolation == false) { stats = db.Stats(statsTxn); } else { stats = db.Stats(statsTxn, Isolation.DEGREE_THREE); } ConfirmStatsPart3Case1(stats); statsTxn.Commit(); db.Close(); env.Close(); }
public void TestForeignKeyDelete(DatabaseType dbtype, ForeignKeyDeleteAction action) { SetUpTest(true); string dbFileName = testHome + "/" + testName + ".db"; string fdbFileName = testHome + "/" + testName + "foreign.db"; string sdbFileName = testHome + "/" + testName + "sec.db"; Database primaryDB, fdb; SecondaryDatabase secDB; // Open primary database. if (dbtype == DatabaseType.BTREE) { BTreeDatabaseConfig btConfig = new BTreeDatabaseConfig(); btConfig.Creation = CreatePolicy.ALWAYS; primaryDB = BTreeDatabase.Open(dbFileName, btConfig); fdb = BTreeDatabase.Open(fdbFileName, btConfig); } else if (dbtype == DatabaseType.HASH) { HashDatabaseConfig hConfig = new HashDatabaseConfig(); hConfig.Creation = CreatePolicy.ALWAYS; primaryDB = HashDatabase.Open(dbFileName, hConfig); fdb = HashDatabase.Open(fdbFileName, hConfig); } else if (dbtype == DatabaseType.QUEUE) { QueueDatabaseConfig qConfig = new QueueDatabaseConfig(); qConfig.Creation = CreatePolicy.ALWAYS; qConfig.Length = 4; primaryDB = QueueDatabase.Open(dbFileName, qConfig); fdb = QueueDatabase.Open(fdbFileName, qConfig); } else if (dbtype == DatabaseType.RECNO) { RecnoDatabaseConfig rConfig = new RecnoDatabaseConfig(); rConfig.Creation = CreatePolicy.ALWAYS; primaryDB = RecnoDatabase.Open(dbFileName, rConfig); fdb = RecnoDatabase.Open(fdbFileName, rConfig); } else { throw new ArgumentException("Invalid DatabaseType"); } // Open secondary database. if (dbtype == DatabaseType.BTREE) { SecondaryBTreeDatabaseConfig secbtConfig = new SecondaryBTreeDatabaseConfig(primaryDB, new SecondaryKeyGenDelegate(SecondaryKeyGen)); secbtConfig.Creation = CreatePolicy.ALWAYS; secbtConfig.Duplicates = DuplicatesPolicy.SORTED; if (action == ForeignKeyDeleteAction.NULLIFY) { secbtConfig.SetForeignKeyConstraint(fdb, action, new ForeignKeyNullifyDelegate(Nullify)); } else { secbtConfig.SetForeignKeyConstraint(fdb, action); } secDB = SecondaryBTreeDatabase.Open(sdbFileName, secbtConfig); } else if (dbtype == DatabaseType.HASH) { SecondaryHashDatabaseConfig sechConfig = new SecondaryHashDatabaseConfig(primaryDB, new SecondaryKeyGenDelegate(SecondaryKeyGen)); sechConfig.Creation = CreatePolicy.ALWAYS; sechConfig.Duplicates = DuplicatesPolicy.SORTED; if (action == ForeignKeyDeleteAction.NULLIFY) { sechConfig.SetForeignKeyConstraint(fdb, action, new ForeignKeyNullifyDelegate(Nullify)); } else { sechConfig.SetForeignKeyConstraint(fdb, action); } secDB = SecondaryHashDatabase.Open(sdbFileName, sechConfig); } else if (dbtype == DatabaseType.QUEUE) { SecondaryQueueDatabaseConfig secqConfig = new SecondaryQueueDatabaseConfig(primaryDB, new SecondaryKeyGenDelegate(SecondaryKeyGen)); secqConfig.Creation = CreatePolicy.ALWAYS; secqConfig.Length = 4; if (action == ForeignKeyDeleteAction.NULLIFY) { secqConfig.SetForeignKeyConstraint(fdb, action, new ForeignKeyNullifyDelegate(Nullify)); } else { secqConfig.SetForeignKeyConstraint(fdb, action); } secDB = SecondaryQueueDatabase.Open(sdbFileName, secqConfig); } else if (dbtype == DatabaseType.RECNO) { SecondaryRecnoDatabaseConfig secrConfig = new SecondaryRecnoDatabaseConfig(primaryDB, new SecondaryKeyGenDelegate(SecondaryKeyGen)); secrConfig.Creation = CreatePolicy.ALWAYS; if (action == ForeignKeyDeleteAction.NULLIFY) { secrConfig.SetForeignKeyConstraint(fdb, action, new ForeignKeyNullifyDelegate(Nullify)); } else { secrConfig.SetForeignKeyConstraint(fdb, action); } secDB = SecondaryRecnoDatabase.Open(sdbFileName, secrConfig); } else { throw new ArgumentException("Invalid DatabaseType"); } /* Use integer keys for Queue/Recno support. */ fdb.Put(new DatabaseEntry(BitConverter.GetBytes(100)), new DatabaseEntry(BitConverter.GetBytes(1001))); fdb.Put(new DatabaseEntry(BitConverter.GetBytes(200)), new DatabaseEntry(BitConverter.GetBytes(2002))); fdb.Put(new DatabaseEntry(BitConverter.GetBytes(300)), new DatabaseEntry(BitConverter.GetBytes(3003))); primaryDB.Put(new DatabaseEntry(BitConverter.GetBytes(1)), new DatabaseEntry(BitConverter.GetBytes(100))); primaryDB.Put(new DatabaseEntry(BitConverter.GetBytes(2)), new DatabaseEntry(BitConverter.GetBytes(200))); if (dbtype == DatabaseType.BTREE || dbtype == DatabaseType.HASH) { primaryDB.Put(new DatabaseEntry(BitConverter.GetBytes(3)), new DatabaseEntry(BitConverter.GetBytes(100))); } try { fdb.Delete(new DatabaseEntry(BitConverter.GetBytes(100))); } catch (ForeignConflictException) { Assert.AreEqual(action, ForeignKeyDeleteAction.ABORT); } if (action == ForeignKeyDeleteAction.ABORT) { Assert.IsTrue(secDB.Exists(new DatabaseEntry(BitConverter.GetBytes(100)))); Assert.IsTrue(primaryDB.Exists(new DatabaseEntry(BitConverter.GetBytes(1)))); Assert.IsTrue(fdb.Exists(new DatabaseEntry(BitConverter.GetBytes(100)))); } else if (action == ForeignKeyDeleteAction.CASCADE) { try { Assert.IsFalse(secDB.Exists(new DatabaseEntry(BitConverter.GetBytes(100)))); } catch (KeyEmptyException) { Assert.IsTrue(dbtype == DatabaseType.QUEUE || dbtype == DatabaseType.RECNO); } try { Assert.IsFalse(primaryDB.Exists(new DatabaseEntry(BitConverter.GetBytes(1)))); } catch (KeyEmptyException) { Assert.IsTrue(dbtype == DatabaseType.QUEUE || dbtype == DatabaseType.RECNO); } try { Assert.IsFalse(fdb.Exists(new DatabaseEntry(BitConverter.GetBytes(100)))); } catch (KeyEmptyException) { Assert.IsTrue(dbtype == DatabaseType.QUEUE || dbtype == DatabaseType.RECNO); } } else if (action == ForeignKeyDeleteAction.NULLIFY) { try { Assert.IsFalse(secDB.Exists(new DatabaseEntry(BitConverter.GetBytes(100)))); } catch (KeyEmptyException) { Assert.IsTrue(dbtype == DatabaseType.QUEUE || dbtype == DatabaseType.RECNO); } Assert.IsTrue(primaryDB.Exists(new DatabaseEntry(BitConverter.GetBytes(1)))); try { Assert.IsFalse(fdb.Exists(new DatabaseEntry(BitConverter.GetBytes(100)))); } catch (KeyEmptyException) { Assert.IsTrue(dbtype == DatabaseType.QUEUE || dbtype == DatabaseType.RECNO); } } // Close secondary database. secDB.Close(); // Close primary database. primaryDB.Close(); // Close foreign database fdb.Close(); }
public void TestCompactWithoutTxn() { int i, nRecs; nRecs = 10000; testName = "TestCompactWithoutTxn"; SetUpTest(true); string hashDBFileName = testHome + "/" + testName + ".db"; HashDatabaseConfig hashDBConfig = new HashDatabaseConfig(); hashDBConfig.Creation = CreatePolicy.ALWAYS; // The minimum page size hashDBConfig.PageSize = 512; hashDBConfig.HashComparison = new EntryComparisonDelegate(dbIntCompare); using (HashDatabase hashDB = HashDatabase.Open( hashDBFileName, hashDBConfig)) { DatabaseEntry key; DatabaseEntry data; // Fill the database with entries from 0 to 9999 for (i = 0; i < nRecs; i++) { key = new DatabaseEntry(BitConverter.GetBytes(i)); data = new DatabaseEntry(BitConverter.GetBytes(i)); hashDB.Put(key, data); } /* * Delete entries below 500, between 3000 and * 5000 and above 7000 */ for (i = 0; i < nRecs; i++) { if (i < 500 || i > 7000 || (i < 5000 && i > 3000)) { key = new DatabaseEntry(BitConverter.GetBytes(i)); hashDB.Delete(key); } } hashDB.Sync(); long fileSize = new FileInfo(hashDBFileName).Length; // Compact database CompactConfig cCfg = new CompactConfig(); cCfg.FillPercentage = 30; cCfg.Pages = 10; cCfg.Timeout = 1000; cCfg.TruncatePages = true; cCfg.start = new DatabaseEntry(BitConverter.GetBytes(1)); cCfg.stop = new DatabaseEntry(BitConverter.GetBytes(7000)); CompactData compactData = hashDB.Compact(cCfg); Assert.IsFalse((compactData.Deadlocks == 0) && (compactData.Levels == 0) && (compactData.PagesExamined == 0) && (compactData.PagesFreed == 0) && (compactData.PagesTruncated == 0)); hashDB.Sync(); long compactedFileSize = new FileInfo(hashDBFileName).Length; Assert.Less(compactedFileSize, fileSize); } }