/// <summary> /// Default data processing entry point for <see cref="DataWriter"/>. /// </summary> /// <param name="timestamp">Timestamp of <paramref name="dataBlock"/>.</param> /// <param name="dataBlock">Points values read at current timestamp.</param> public void Write(DateTime timestamp, DataPoint[] dataBlock) { if (m_settings.WriteToOpenHistorian) // Write to openHistorian { foreach (DataPoint point in dataBlock) { m_historianKey.Timestamp = point.Timestamp; m_historianKey.PointID = point.PointID; m_historianValue.Value1 = point.Value; m_historianValue.Value3 = point.Flags; m_historianArchive.Write(m_historianKey, m_historianValue); } } if (m_settings.WriteToBerkeleyDB) // Write to Berkeley DB { //// Create a new pointList each time *slow //KeyValuePair<DatabaseEntry, DatabaseEntry>[] pointList = new KeyValuePair<DatabaseEntry, DatabaseEntry>[dataBlock.Length]; //Parallel.For(0, pointList.Length, (i) => pointList[i] = new KeyValuePair<DatabaseEntry, DatabaseEntry>( // new DatabaseEntry(BitConverter.GetBytes(timestamp.Ticks).Concat(BitConverter.GetBytes(dataBlock[i].PointID)).ToArray()), // new DatabaseEntry(BitConverter.GetBytes(dataBlock[i].Value)))); //using (MultipleKeyDatabaseEntry buffer = new MultipleKeyDatabaseEntry(pointList, false)) //{ // m_berkeleyDb.Put(buffer); //} //Parallel.For(0, pointList.Length, (i) => //{ // pointList[i].Key.Dispose(); // pointList[i].Value.Dispose(); //}); //// Single writes using the same key and value each time *medium speed //foreach (DataPoint point in dataBlock) //{ // m_berkeleyDbKey.Data = BitConverter.GetBytes(timestamp.Ticks).Concat(BitConverter.GetBytes(point.PointID)).ToArray(); // m_berkeleyDbValue.Data = BitConverter.GetBytes(point.Value).ToArray(); // m_berkeleyDb.Put(m_berkeleyDbKey, m_berkeleyDbValue); //} // Bulk writes using the same pointList *fastest method found so far* Parallel.For(0, dataBlock.Length, (i) => { m_berkeleyDbPointList[i].Key.Data = BitConverter.GetBytes(timestamp.Ticks).Concat(BitConverter.GetBytes(dataBlock[i].PointID)).ToArray(); m_berkeleyDbPointList[i].Value.Data = BitConverter.GetBytes(dataBlock[i].Value); }); using (MultipleKeyDatabaseEntry buffer = new MultipleKeyDatabaseEntry(m_berkeleyDbPointList.Take(dataBlock.Length), false)) { m_berkeleyDb.Put(buffer); } } }
/* Perform bulk read in primary db */ public int BulkRead() { BTreeCursor cursor; int count = 0; Transaction txn = env.BeginTransaction(); try { cursor = pdb.Cursor(txn); } catch (DatabaseException e) { txn.Abort(); throw e; } try { if (dups == 0) { /* Get all records in a single key/data buffer */ while (cursor.MoveNextMultipleKey()) { MultipleKeyDatabaseEntry pairs = cursor.CurrentMultipleKey; foreach (KeyValuePair <DatabaseEntry, DatabaseEntry> p in pairs) { count++; } } } else { /* * Get all key/data pairs in two buffers, one for all * keys, the other for all data. */ while (cursor.MoveNextMultiple()) { KeyValuePair <DatabaseEntry, MultipleDatabaseEntry> pairs = cursor.CurrentMultiple; foreach (DatabaseEntry d in pairs.Value) { count++; } } } cursor.Close(); txn.Commit(); return(count); } catch (DatabaseException e) { cursor.Close(); txn.Abort(); throw e; } }
public Ticks ReadBackBerkeleyDBData(BTreeDatabase database, Action <int> updateProgressBar) { IEnumerable <ulong> points; if (m_settings.ReadFromCsv) { points = m_indexToPointIDLookup.Skip(1); // First value is always 0 because the timestamp is the first column } else { points = m_points; } if (points == null) { ShowMessage("Point list not initialized"); return(new Ticks(0)); } int count = 0; ulong value; long timestamp; int messageInterval = m_settings.MessageInterval * points.Count(); using (BTreeCursor cursor = database.Cursor()) { DateTime startTime = DateTime.UtcNow; while (cursor.MoveNextMultipleKey()) { using (MultipleKeyDatabaseEntry pairs = cursor.CurrentMultipleKey) { foreach (KeyValuePair <DatabaseEntry, DatabaseEntry> p in pairs) { timestamp = BitConverter.ToInt64(p.Key.Data, 0); value = BitConverter.ToUInt64(p.Value.Data, 0); p.Key.Dispose(); p.Value.Dispose(); count++; if (count % messageInterval == 0) { PercentComplete = (int)((1.0D - (new Ticks(m_endTime.Ticks - timestamp).ToSeconds() / m_timeRange)) * 100.0D); ShowMessage($"{Environment.NewLine}{count} points read back so far, averaging {(count / (DateTime.UtcNow - startTime).TotalSeconds):N0} points per second."); updateProgressBar(PercentComplete); } } } } return(DateTime.UtcNow - startTime); } }
/* Perform bulk delete in primary db */ public void BulkDelete(int value) { Transaction txn = env.BeginTransaction(); try { if (dups == 0) { /* Delete a set of key/data pairs */ List <KeyValuePair <DatabaseEntry, DatabaseEntry> > pList = new List <KeyValuePair <DatabaseEntry, DatabaseEntry> >(); for (int i = 0; i < value; i++) { pList.Add( new KeyValuePair <DatabaseEntry, DatabaseEntry>( new DatabaseEntry(BitConverter.GetBytes(i)), new DatabaseEntry(getBytes(new DataVal(i))))); } MultipleKeyDatabaseEntry pairs = new MultipleKeyDatabaseEntry(pList, false); pdb.Delete(pairs, txn); } else { /* Delete a set of keys */ List <DatabaseEntry> kList = new List <DatabaseEntry>(); for (int i = 0; i < value; i++) { kList.Add(new DatabaseEntry( BitConverter.GetBytes(i))); } MultipleDatabaseEntry keySet = new MultipleDatabaseEntry(kList, false); pdb.Delete(keySet, txn); } txn.Commit(); } catch (DatabaseException e) { txn.Abort(); throw e; } }
/// <summary> /// Store multiple data items using keys from the buffer to which the /// key parameter refers and data values from the buffer to which the /// data parameter refers. A successful bulk operation is logically /// equivalent to a loop through each key/data pair, performing a Put /// for each one. /// </summary> /// <remarks> /// /// </remarks> /// <param name="key">Multiple key/data pairs to store in the database /// </param> public void Put(MultipleKeyDatabaseEntry key) { Put(key, null, null); }
/// <summary> /// Store the key/data pairs in the database, only if the key does not /// already appear in the database. /// </summary> /// <param name="key">Key/data pairs to store in the database</param> /// <param name="txn"> /// If the operation is part of an application-specified transaction, /// <paramref name="txn"/> is a Transaction object returned from /// <see cref="DatabaseEnvironment.BeginTransaction"/>; if /// the operation is part of a Berkeley DB Concurrent Data Store group, /// <paramref name="txn"/> is a handle returned from /// <see cref="DatabaseEnvironment.BeginCDSGroup"/>; otherwise null. /// </param> public void PutNoOverwrite( MultipleKeyDatabaseEntry key, Transaction txn) { Put(key, null, txn, DbConstants.DB_NOOVERWRITE); }
/// <summary> /// Store the key/data pairs in the database, only if the key does not /// already appear in the database. /// </summary> /// <param name="key">Key/data pairs to store in the database</param> public void PutNoOverwrite(MultipleKeyDatabaseEntry key) { PutNoOverwrite(key, null, null); }
/// <summary> /// Store multiple data items using keys from the buffer to which the /// key parameter refers and data values from the buffer to which the /// data parameter refers. A successful bulk operation is logically /// equivalent to a loop through each key/data pair, performing a Put /// for each one. /// </summary> /// <param name="key">Multiple key/data pairs to store in the database /// </param> /// <param name="txn"> /// If the operation is part of an application-specified transaction, /// <paramref name="txn"/> is a Transaction object returned from /// <see cref="DatabaseEnvironment.BeginTransaction"/>; if /// the operation is part of a Berkeley DB Concurrent Data Store group, /// <paramref name="txn"/> is a handle returned from /// <see cref="DatabaseEnvironment.BeginCDSGroup"/>; otherwise null. /// </param> public void Put(MultipleKeyDatabaseEntry key, Transaction txn) { Put(key, null, txn, 0); }
private void DeleteMultipleAndMultipleKey(string dbFileName, string dbName, DatabaseType type, bool mulKey) { List<DatabaseEntry> kList = new List<DatabaseEntry>(); List<uint> rList = new List<uint>(); List<KeyValuePair<DatabaseEntry, DatabaseEntry>> pList = new List<KeyValuePair<DatabaseEntry, DatabaseEntry>>(); DatabaseEntry key; Database db; SecondaryDatabase secDb; Configuration.ClearDir(testHome); if (type == DatabaseType.BTREE) { BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; db = BTreeDatabase.Open( dbFileName, dbName, dbConfig); SecondaryBTreeDatabaseConfig secDbConfig = new SecondaryBTreeDatabaseConfig(db, null); secDbConfig.Creation = CreatePolicy.IF_NEEDED; secDbConfig.Duplicates = DuplicatesPolicy.SORTED; secDbConfig.KeyGen = new SecondaryKeyGenDelegate(SecondaryKeyGen); secDb = SecondaryBTreeDatabase.Open( dbFileName, dbName + "_sec", secDbConfig); } else if (type == DatabaseType.HASH) { HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; db = HashDatabase.Open( dbFileName, dbName, dbConfig); SecondaryHashDatabaseConfig secDbConfig = new SecondaryHashDatabaseConfig(db, null); secDbConfig.Creation = CreatePolicy.IF_NEEDED; secDbConfig.Duplicates = DuplicatesPolicy.SORTED; secDbConfig.KeyGen = new SecondaryKeyGenDelegate(SecondaryKeyGen); secDb = SecondaryHashDatabase.Open( dbFileName, dbName + "_sec", secDbConfig); } else if (type == DatabaseType.QUEUE) { QueueDatabaseConfig dbConfig = new QueueDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; dbConfig.Length = 4; db = QueueDatabase.Open(dbFileName, dbConfig); SecondaryQueueDatabaseConfig secDbConfig = new SecondaryQueueDatabaseConfig(db, null); secDbConfig.Creation = CreatePolicy.IF_NEEDED; secDbConfig.Length = 4; secDbConfig.KeyGen = new SecondaryKeyGenDelegate(SecondaryKeyGen); secDb = SecondaryQueueDatabase.Open( dbFileName + "_sec", secDbConfig); } else if (type == DatabaseType.RECNO) { RecnoDatabaseConfig dbConfig = new RecnoDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; db = RecnoDatabase.Open( dbFileName, dbName, dbConfig); SecondaryRecnoDatabaseConfig secDbConfig = new SecondaryRecnoDatabaseConfig(db, null); secDbConfig.Creation = CreatePolicy.IF_NEEDED; secDbConfig.KeyGen = new SecondaryKeyGenDelegate(SecondaryKeyGen); secDb = SecondaryRecnoDatabase.Open( dbFileName, dbName + "_sec", secDbConfig); } else throw new TestException(); for (uint i = 1; i <= 100; i++) { key = new DatabaseEntry( BitConverter.GetBytes(i)); if (i >= 50 && i < 60) kList.Add(key); else if (i > 80) pList.Add(new KeyValuePair< DatabaseEntry, DatabaseEntry>( key, key)); else if (type == DatabaseType.QUEUE || type == DatabaseType.RECNO) rList.Add(i); db.Put(key, key); } if (mulKey) { // Create bulk buffer for key/value pairs. MultipleKeyDatabaseEntry pBuff; if (type == DatabaseType.BTREE) pBuff = new MultipleKeyDatabaseEntry( pList, false); else if (type == DatabaseType.HASH) pBuff = new MultipleKeyDatabaseEntry( pList, false); else if (type == DatabaseType.QUEUE) pBuff = new MultipleKeyDatabaseEntry( pList, true); else pBuff = new MultipleKeyDatabaseEntry( pList, true); // Bulk delete with the key/value pair bulk buffer. secDb.Delete(pBuff); foreach (KeyValuePair<DatabaseEntry, DatabaseEntry>pair in pList) { try { db.GetBoth(pair.Key, pair.Value); throw new TestException(); } catch (NotFoundException e1) { if (type == DatabaseType.QUEUE) throw e1; } catch (KeyEmptyException e2) { if (type == DatabaseType.BTREE || type == DatabaseType.HASH || type == DatabaseType.RECNO) throw e2; } } /* * Dump the database to verify that 80 records * remain after bulk delete. */ Assert.AreEqual(80, db.Truncate()); } else { // Create bulk buffer for key. MultipleDatabaseEntry kBuff; if (type == DatabaseType.BTREE) kBuff = new MultipleDatabaseEntry( kList, false); else if (type == DatabaseType.HASH) kBuff = new MultipleDatabaseEntry( kList, false); else if (type == DatabaseType.QUEUE) kBuff = new MultipleDatabaseEntry( kList, true); else kBuff = new MultipleDatabaseEntry( kList, true); /* * Bulk delete in secondary database with key * buffer. Primary records that the deleted * records in secondar database should be * deleted as well. */ secDb.Delete(kBuff); foreach (DatabaseEntry dbt in kList) { try { db.Get(dbt); throw new TestException(); } catch (NotFoundException e1) { if (type == DatabaseType.QUEUE || type == DatabaseType.RECNO) throw e1; } catch (KeyEmptyException e2) { if (type == DatabaseType.BTREE || type == DatabaseType.HASH) throw e2; } } /* * Bulk delete in secondary database with recno * based key buffer. */ if (type == DatabaseType.QUEUE || type == DatabaseType.RECNO) { MultipleDatabaseEntry rBuff = new MultipleDatabaseEntry(rList); secDb.Delete(rBuff); Assert.AreEqual(20, db.Truncate()); } } secDb.Close(); db.Close(); }
private void DeleteMultipleAndMultipleKey(string dbFileName, string dbName, DatabaseType type, bool mulKey) { List <DatabaseEntry> kList = new List <DatabaseEntry>(); List <uint> rList = new List <uint>(); List <KeyValuePair <DatabaseEntry, DatabaseEntry> > pList = new List <KeyValuePair <DatabaseEntry, DatabaseEntry> >(); DatabaseEntry key; Database db; SecondaryDatabase secDb; Configuration.ClearDir(testHome); if (type == DatabaseType.BTREE) { BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; db = BTreeDatabase.Open( dbFileName, dbName, dbConfig); SecondaryBTreeDatabaseConfig secDbConfig = new SecondaryBTreeDatabaseConfig(db, null); secDbConfig.Creation = CreatePolicy.IF_NEEDED; secDbConfig.Duplicates = DuplicatesPolicy.SORTED; secDbConfig.KeyGen = new SecondaryKeyGenDelegate(SecondaryKeyGen); secDb = SecondaryBTreeDatabase.Open( dbFileName, dbName + "_sec", secDbConfig); } else if (type == DatabaseType.HASH) { HashDatabaseConfig dbConfig = new HashDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; db = HashDatabase.Open( dbFileName, dbName, dbConfig); SecondaryHashDatabaseConfig secDbConfig = new SecondaryHashDatabaseConfig(db, null); secDbConfig.Creation = CreatePolicy.IF_NEEDED; secDbConfig.Duplicates = DuplicatesPolicy.SORTED; secDbConfig.KeyGen = new SecondaryKeyGenDelegate(SecondaryKeyGen); secDb = SecondaryHashDatabase.Open( dbFileName, dbName + "_sec", secDbConfig); } else if (type == DatabaseType.QUEUE) { QueueDatabaseConfig dbConfig = new QueueDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; dbConfig.Length = 4; db = QueueDatabase.Open(dbFileName, dbConfig); SecondaryQueueDatabaseConfig secDbConfig = new SecondaryQueueDatabaseConfig(db, null); secDbConfig.Creation = CreatePolicy.IF_NEEDED; secDbConfig.Length = 4; secDbConfig.KeyGen = new SecondaryKeyGenDelegate(SecondaryKeyGen); secDb = SecondaryQueueDatabase.Open( dbFileName + "_sec", secDbConfig); } else if (type == DatabaseType.RECNO) { RecnoDatabaseConfig dbConfig = new RecnoDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; db = RecnoDatabase.Open( dbFileName, dbName, dbConfig); SecondaryRecnoDatabaseConfig secDbConfig = new SecondaryRecnoDatabaseConfig(db, null); secDbConfig.Creation = CreatePolicy.IF_NEEDED; secDbConfig.KeyGen = new SecondaryKeyGenDelegate(SecondaryKeyGen); secDb = SecondaryRecnoDatabase.Open( dbFileName, dbName + "_sec", secDbConfig); } else { throw new TestException(); } for (uint i = 1; i <= 100; i++) { key = new DatabaseEntry( BitConverter.GetBytes(i)); if (i >= 50 && i < 60) { kList.Add(key); } else if (i > 80) { pList.Add(new KeyValuePair < DatabaseEntry, DatabaseEntry>( key, key)); } else if (type == DatabaseType.QUEUE || type == DatabaseType.RECNO) { rList.Add(i); } db.Put(key, key); } if (mulKey) { // Create bulk buffer for key/value pairs. MultipleKeyDatabaseEntry pBuff; if (type == DatabaseType.BTREE) { pBuff = new MultipleKeyDatabaseEntry( pList, false); } else if (type == DatabaseType.HASH) { pBuff = new MultipleKeyDatabaseEntry( pList, false); } else if (type == DatabaseType.QUEUE) { pBuff = new MultipleKeyDatabaseEntry( pList, true); } else { pBuff = new MultipleKeyDatabaseEntry( pList, true); } // Bulk delete with the key/value pair bulk buffer. secDb.Delete(pBuff); foreach (KeyValuePair <DatabaseEntry, DatabaseEntry> pair in pList) { try { db.GetBoth(pair.Key, pair.Value); throw new TestException(); } catch (NotFoundException e1) { if (type == DatabaseType.QUEUE) { throw e1; } } catch (KeyEmptyException e2) { if (type == DatabaseType.BTREE || type == DatabaseType.HASH || type == DatabaseType.RECNO) { throw e2; } } } /* * Dump the database to verify that 80 records * remain after bulk delete. */ Assert.AreEqual(80, db.Truncate()); } else { // Create bulk buffer for key. MultipleDatabaseEntry kBuff; if (type == DatabaseType.BTREE) { kBuff = new MultipleDatabaseEntry( kList, false); } else if (type == DatabaseType.HASH) { kBuff = new MultipleDatabaseEntry( kList, false); } else if (type == DatabaseType.QUEUE) { kBuff = new MultipleDatabaseEntry( kList, true); } else { kBuff = new MultipleDatabaseEntry( kList, true); } /* * Bulk delete in secondary database with key * buffer. Primary records that the deleted * records in secondar database should be * deleted as well. */ secDb.Delete(kBuff); foreach (DatabaseEntry dbt in kList) { try { db.Get(dbt); throw new TestException(); } catch (NotFoundException e1) { if (type == DatabaseType.QUEUE || type == DatabaseType.RECNO) { throw e1; } } catch (KeyEmptyException e2) { if (type == DatabaseType.BTREE || type == DatabaseType.HASH) { throw e2; } } } /* * Bulk delete in secondary database with recno * based key buffer. */ if (type == DatabaseType.QUEUE || type == DatabaseType.RECNO) { MultipleDatabaseEntry rBuff = new MultipleDatabaseEntry(rList); secDb.Delete(rBuff); Assert.AreEqual(20, db.Truncate()); } } secDb.Close(); db.Close(); }
/* Perform bulk delete in secondary db */ public int BulkSecondaryDelete(byte value, int deletePair) { DatabaseEntry key, data; byte[] tstrBytes = ASCIIEncoding.ASCII.GetBytes(DataVal.tstring); int i = 0; int count = 0; Transaction txn = env.BeginTransaction(); try { if (deletePair == 1) { /* * Delete the given key and all keys prior to it, * together with their duplicate data. */ List <KeyValuePair <DatabaseEntry, DatabaseEntry> > pList = new List <KeyValuePair <DatabaseEntry, DatabaseEntry> >(); do { int j = 0; int idx = 0; key = new DatabaseEntry(); key.Data = new byte[1]; key.Data[0] = tstrBytes[i]; while (j < this.num / DataVal.tstring.Length) { idx = j * DataVal.tstring.Length + i; data = new DatabaseEntry( getBytes(new DataVal(idx))); pList.Add(new KeyValuePair < DatabaseEntry, DatabaseEntry>(key, data)); j++; } if (i < this.num % DataVal.tstring.Length) { idx = j * DataVal.tstring.Length + i; data = new DatabaseEntry( getBytes(new DataVal(idx))); pList.Add(new KeyValuePair < DatabaseEntry, DatabaseEntry>( key, data)); j++; } count += j; } while (value != tstrBytes[i++]); MultipleKeyDatabaseEntry pairs = new MultipleKeyDatabaseEntry(pList, false); sdb.Delete(pairs, txn); } else { List <DatabaseEntry> kList = new List <DatabaseEntry>(); /* Delete the given key and all keys prior to it */ do { key = new DatabaseEntry(); key.Data = new byte[1]; key.Data[0] = tstrBytes[i]; kList.Add(key); } while (value != tstrBytes[i++]); MultipleDatabaseEntry keySet = new MultipleDatabaseEntry(kList, false); sdb.Delete(keySet, txn); count = this.num / DataVal.tstring.Length * i; if (i < this.num % DataVal.tstring.Length) { count += i; } } txn.Commit(); return(count); } catch (DatabaseException e) { txn.Abort(); throw e; } }
public void TestMoveNextMultipleKey() { testName = "TestMoveNextMultipleKey"; SetUpTest(true); BTreeDatabase db; BTreeCursor cursor; DatabaseEnvironment env; Transaction txn; int cnt = 0; BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig(); dbConfig.Creation = CreatePolicy.ALWAYS; dbConfig.Duplicates = DuplicatesPolicy.UNSORTED; GetMultipleDB(testHome, testName + ".db", dbConfig, out env, out txn, out db, out cursor); /* * Position the cursor to the first record, and bulk get * all other records. */ Assert.True(cursor.MoveFirst()); Assert.True(cursor.MoveNextMultipleKey()); MultipleKeyDatabaseEntry pairs = cursor.CurrentMultipleKey; foreach (KeyValuePair <DatabaseEntry, DatabaseEntry> p in pairs) { cnt++; } Assert.AreEqual(cnt, 100); // Bulk read uncommitted records. cnt = 0; LockingInfo lockingInfo = new LockingInfo(); Assert.True(cursor.MoveFirst(lockingInfo)); lockingInfo.IsolationDegree = Isolation.DEGREE_ONE; Assert.True(cursor.MoveNextMultipleKey(lockingInfo)); pairs = cursor.CurrentMultipleKey; foreach (KeyValuePair <DatabaseEntry, DatabaseEntry> p in pairs) { cnt++; } Assert.AreEqual(cnt, 100); cursor.Dispose(); txn.Commit(); /* * Insert duplicate records of the key in the last * record. */ txn = env.BeginTransaction(); cursor = db.Cursor(txn); Assert.IsTrue(cursor.MoveLast()); DatabaseEntry key = cursor.Current.Key; for (int i = 200; i < 800; i++) { cursor.Insert(new DatabaseEntry( BitConverter.GetBytes(i)), Cursor.InsertLocation.AFTER); } /* * Move to the first duplicate records, and bulk read * all duplicate ones. */ cnt = 0; Assert.True(cursor.Move(key, true)); while (cursor.MoveNextDuplicateMultipleKey( (int)db.Pagesize)) { pairs = cursor.CurrentMultipleKey; foreach (KeyValuePair < DatabaseEntry, DatabaseEntry> p in pairs) { Assert.AreEqual(key.Data, p.Key.Data); cnt++; } } Assert.AreEqual(cnt, 600); cursor.Close(); db.Close(); txn.Commit(); env.Close(); }
/* Perform bulk delete in secondary db */ public int BulkSecondaryDelete(byte value, int deletePair) { DatabaseEntry key, data; byte[] tstrBytes = ASCIIEncoding.ASCII.GetBytes(DataVal.tstring); int i = 0; int count = 0; Transaction txn = env.BeginTransaction(); try { if (deletePair == 1) { /* * Delete the given key and all keys prior to it, * together with their duplicate data. */ List<KeyValuePair<DatabaseEntry, DatabaseEntry>> pList = new List<KeyValuePair<DatabaseEntry, DatabaseEntry>>(); do { int j = 0; int idx = 0; key = new DatabaseEntry(); key.Data = new byte[1]; key.Data[0] = tstrBytes[i]; while (j < this.num / DataVal.tstring.Length) { idx = j * DataVal.tstring.Length + i; data = new DatabaseEntry( getBytes(new DataVal(idx))); pList.Add(new KeyValuePair< DatabaseEntry, DatabaseEntry>(key, data)); j++; } if (i < this.num % DataVal.tstring.Length) { idx = j * DataVal.tstring.Length + i; data = new DatabaseEntry( getBytes(new DataVal(idx))); pList.Add(new KeyValuePair< DatabaseEntry, DatabaseEntry>( key, data)); j++; } count += j; } while (value != tstrBytes[i++]); MultipleKeyDatabaseEntry pairs = new MultipleKeyDatabaseEntry(pList, false); sdb.Delete(pairs, txn); } else { List<DatabaseEntry> kList = new List<DatabaseEntry>(); /* Delete the given key and all keys prior to it */ do { key = new DatabaseEntry(); key.Data = new byte[1]; key.Data[0] = tstrBytes[i]; kList.Add(key); } while (value != tstrBytes[i++]); MultipleDatabaseEntry keySet = new MultipleDatabaseEntry(kList, false); sdb.Delete(keySet, txn); count = this.num / DataVal.tstring.Length * i; if (i < this.num % DataVal.tstring.Length) count += i; } txn.Commit(); return count; } catch (DatabaseException e) { txn.Abort(); throw e; } }
/* Perform bulk delete in primary db */ public void BulkDelete(int value) { Transaction txn = env.BeginTransaction(); try { if (dups == 0) { /* Delete a set of key/data pairs */ List<KeyValuePair<DatabaseEntry, DatabaseEntry>> pList = new List<KeyValuePair<DatabaseEntry, DatabaseEntry>>(); for (int i = 0; i < value; i++) pList.Add( new KeyValuePair<DatabaseEntry, DatabaseEntry>( new DatabaseEntry(BitConverter.GetBytes(i)), new DatabaseEntry(getBytes(new DataVal(i))))); MultipleKeyDatabaseEntry pairs = new MultipleKeyDatabaseEntry(pList, false); pdb.Delete(pairs, txn); } else { /* Delete a set of keys */ List<DatabaseEntry> kList = new List<DatabaseEntry>(); for (int i = 0; i < value; i++) kList.Add(new DatabaseEntry( BitConverter.GetBytes(i))); MultipleDatabaseEntry keySet = new MultipleDatabaseEntry(kList, false); pdb.Delete(keySet, txn); } txn.Commit(); } catch (DatabaseException e) { txn.Abort(); throw e; } }
private void PutMultipleKey(DatabaseEnvironment env) { List<KeyValuePair<DatabaseEntry, DatabaseEntry>> pList = new List<KeyValuePair< DatabaseEntry, DatabaseEntry>>(); BTreeDatabase db; DatabaseEntry key, value; Transaction txn; string dbFileName = (env == null) ? testHome + "/" + testName + ".db" : testName + ".db"; int i; BTreeDatabaseConfig dbConfig = new BTreeDatabaseConfig(); dbConfig.Creation = CreatePolicy.IF_NEEDED; if (env != null) { dbConfig.Env = env; txn = env.BeginTransaction(); db = BTreeDatabase.Open( dbFileName, dbConfig, txn); txn.Commit(); } else db = BTreeDatabase.Open(dbFileName, dbConfig); for (i = 0; i < 100; i++) { key = new DatabaseEntry( BitConverter.GetBytes(i)); value = new DatabaseEntry( ASCIIEncoding.ASCII.GetBytes("data" + i + Configuration.RandomString(512))); pList.Add(new KeyValuePair< DatabaseEntry, DatabaseEntry>(key, value)); } // Create btree bulk buffer for key/value pairs. MultipleKeyDatabaseEntry pairBuff = new MultipleKeyDatabaseEntry(pList, false); i = 0; foreach (KeyValuePair<DatabaseEntry, DatabaseEntry> pair in pairBuff) { Assert.AreEqual(pList[i].Key.Data, pair.Key.Data); Assert.AreEqual(pList[i].Value.Data, pair.Value.Data); i++; } Assert.AreEqual(100, i); /* * Create bulk buffer from another key/value pairs * bulk buffer. */ MultipleKeyDatabaseEntry pairBuff1 = new MultipleKeyDatabaseEntry( pairBuff.Data, false); Assert.AreEqual(false, pairBuff1.Recno); i = 0; foreach (KeyValuePair<DatabaseEntry, DatabaseEntry> pair in pairBuff1) { Assert.AreEqual(pList[i].Key.Data, pair.Key.Data); Assert.AreEqual(pList[i].Value.Data, pair.Value.Data); i++; } Assert.AreEqual(100, i); if (env == null) { // Bulk insert with key/value pair bulk buffer. db.Put(pairBuff); Cursor cursor = db.Cursor(); Assert.IsTrue(cursor.MoveFirst()); i = 0; Assert.AreEqual(pList[i].Key.Data, cursor.Current.Key.Data); Assert.AreEqual(pList[i].Value.Data, cursor.Current.Value.Data); while (cursor.MoveNext()) { i++; Assert.AreEqual(pList[i].Key.Data, cursor.Current.Key.Data); Assert.AreEqual(pList[i].Value.Data, cursor.Current.Value.Data); } Assert.AreEqual(99, i); cursor.Close(); /* * Dump all records from the database. The * number of records should be 100. */ Assert.AreEqual(100, db.Truncate()); // Bulk insert with copied key/value pair buffer. db.Put(pairBuff1); cursor = db.Cursor(); Assert.IsTrue(cursor.MoveFirst()); i = 0; Assert.AreEqual(pList[i].Key.Data, cursor.Current.Key.Data); Assert.AreEqual(pList[i].Value.Data, cursor.Current.Value.Data); while (cursor.MoveNext()) { i++; Assert.AreEqual(pList[i].Key.Data, cursor.Current.Key.Data); Assert.AreEqual(pList[i].Value.Data, cursor.Current.Value.Data); } Assert.AreEqual(99, i); cursor.Close(); Assert.AreEqual(100, db.Truncate()); } else { txn = env.BeginTransaction(); db.Put(pairBuff, txn); Assert.AreEqual(100, db.Truncate(txn)); txn.Commit(); } db.Close(); }