public void Delete_Query() { using (var file = new TempFile()) { var initial = new DateTime(2000, 01, 01); using (var db = new LiteEngine(file.Filename)) { for (var i = 0; i < 5000; i++) { db.Insert("col", new BsonDocument { { "dt", initial.AddDays(i) } }); } db.EnsureIndex("col", "dt"); Assert.AreEqual(5000, db.Count("col")); Assert.AreEqual(0, db.Count("col", Query.GT("dd", initial))); var del = db.Delete("col", Query.GT("dd", initial)); Assert.AreEqual(0, del); Assert.AreEqual(0, db.Count("col", Query.GT("dd", initial))); } } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var col = this.ReadCollection(engine, s); var query = this.ReadQuery(s); display.WriteResult(engine.Delete(col, query)); }
public void Simple_Performance_Runner() { // just a simple example to test performance speed using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { var ti = new Stopwatch(); var tx = new Stopwatch(); var tu = new Stopwatch(); var td = new Stopwatch(); ti.Start(); db.Insert("col", GetDocs(N1)); ti.Stop(); tx.Start(); db.EnsureIndex("col", "name"); tx.Stop(); tu.Start(); db.Update("col", GetDocs(N1)); tu.Stop(); db.EnsureIndex("col", "name"); td.Start(); db.Delete("col", Query.All()); td.Stop(); Debug.WriteLine("Insert time: " + ti.ElapsedMilliseconds); Debug.WriteLine("EnsureIndex time: " + tx.ElapsedMilliseconds); Debug.WriteLine("Update time: " + tu.ElapsedMilliseconds); Debug.WriteLine("Delete time: " + td.ElapsedMilliseconds); } }
public IEnumerable <BsonValue> Execute(StringScanner s, LiteEngine engine) { var col = this.ReadCollection(engine, s); var query = this.ReadQuery(s, true); s.ThrowIfNotFinish(); yield return(engine.Delete(col, query)); }
public void Checkpoint_TransactionRecovery_Test() { using (var file = new TempFile()) { using (var db = new LiteEngine(new FileDiskService(file.Filename), autocommit: false)) { var log = new StringBuilder(); db.Log.Level = Logger.CACHE; db.Log.Logging += (s) => log.AppendLine(s); // initialize my "col" with 1000 docs without transaction db.Insert("col", GetDocs(1, 1000)); // commit now for intialize new transaction db.Commit(); // insert a lot of docs inside a single collection (will do checkpoint in disk) db.Insert("col", GetDocs(1001, N)); // update all documents db.Update("col", GetDocs(1, N)); // create new index db.EnsureIndex("col", "type"); // checks if cache had a checkpoint Assert.IsTrue(log.ToString().Contains("checkpoint")); // datafile must be big (because checkpoint expand file) Assert.IsTrue(file.Size > 30 * 1024 * 1024); // in MB\ // delete all docs > 1000 db.Delete("col", Query.GT("_id", 1000)); db.DropIndex("col", "type"); // let's rollback everything db.Rollback(); // be sure cache are empty Assert.AreEqual(0, db.CacheUsed); // datafile must returns to original size (less than 1.5MB for 1000 docs) Assert.IsTrue(file.Size < 1.5 * 1024 * 1024); // in MB // test in my only doc exits Assert.AreEqual(1000, db.Count("col", Query.All())); Assert.AreEqual(1000, db.Count("col", null)); // test indexes (must have only _id index) Assert.AreEqual(1, db.GetIndexes("col").Count()); } } }
public void Engine_Delete_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { var doc1 = new BsonDocument { { "_id", 1 }, { "name", "John" } }; var doc2 = new BsonDocument { { "_id", 2 }, { "name", "Doe" } }; db.Insert("col", doc1); db.Insert("col", doc2); db.Delete("col", Query.GTE("_id", 1)); db.Insert("col", doc1); } }
public void Process_Insert_Delete() { using (var file = new TempFile()) { using (var dbA = new LiteEngine(file.Filename)) using (var dbB = new LiteEngine(file.Filename)) { dbA.EnsureIndex("col", "process", false); // insert 1000 x instance=1 var ta = Task.Factory.StartNew(() => { for (var i = 0; i < 1000; i++) { dbA.Insert("col", new BsonDocument { { "process", 1 } }); } }); // keeping delete all var tb = Task.Factory.StartNew(() => { // while before starts insert while (dbB.Count("col", Query.EQ("process", 1)) == 0) { Task.Delay(50).Wait(); } // while until has docs while (dbB.Count("col", Query.EQ("process", 1)) > 0) { dbB.Delete("col", Query.All()); Task.Delay(50).Wait(); } }); Task.WaitAll(ta, tb); Assert.AreEqual(0, dbA.Count("col", Query.EQ("process", 1))); Assert.AreEqual(0, dbB.Count("col", Query.EQ("process", 1))); } } }
public void Performance_Test() { // just a simple example to test performance speed using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { var ti = new Stopwatch(); var tx = new Stopwatch(); var tu = new Stopwatch(); var td = new Stopwatch(); ti.Start(); db.Insert("col", GetDocs(N1)); db.Commit(); ti.Stop(); tx.Start(); db.EnsureIndex("col", "name"); db.Commit(); tx.Stop(); tu.Start(); db.Update("col", GetDocs(N1)); db.Commit(); tu.Stop(); db.EnsureIndex("col", "name"); db.Commit(); td.Start(); db.Delete("col", Query.All()); db.Commit(); td.Stop(); Debug.Print("Insert time: " + ti.ElapsedMilliseconds); Debug.Print("EnsureIndex time: " + tx.ElapsedMilliseconds); Debug.Print("Update time: " + tu.ElapsedMilliseconds); Debug.Print("Delete time: " + td.ElapsedMilliseconds); } }
public void Delete() { _db.Delete("col", LiteDB.Query.All()); }
public void AutoId_No_Duplicate_After_Delete() { // using strong type using (var db = new LiteDatabase(new MemoryStream())) { var col = db.GetCollection <EntityInt>("col1"); var one = new EntityInt { Name = "One" }; var two = new EntityInt { Name = "Two" }; var three = new EntityInt { Name = "Three" }; var four = new EntityInt { Name = "Four" }; // insert col.Insert(one); col.Insert(two); Assert.AreEqual(1, one.Id); Assert.AreEqual(2, two.Id); // now delete first 2 rows col.Delete(one.Id); col.Delete(two.Id); // and insert new documents col.Insert(new EntityInt[] { three, four }); Assert.AreEqual(3, three.Id); Assert.AreEqual(4, four.Id); } // using bsondocument/engine using (var db = new LiteEngine(new MemoryStream())) { var one = new BsonDocument { ["Name"] = "One" }; var two = new BsonDocument { ["Name"] = "Two" }; var three = new BsonDocument { ["Name"] = "Three" }; var four = new BsonDocument { ["Name"] = "Four" }; db.Insert("col", one, BsonType.Int32); db.Insert("col", two, BsonType.Int32); Assert.AreEqual(1, one["_id"].AsInt32); Assert.AreEqual(2, two["_id"].AsInt32); // now delete first 2 rows db.Delete("col", one["_id"].AsInt32); db.Delete("col", two["_id"].AsInt32); // and insert new documents db.Insert("col", new BsonDocument[] { three, four }, BsonType.Int32); Assert.AreEqual(3, three["_id"].AsInt32); Assert.AreEqual(4, four["_id"].AsInt32); } }
public void ShrinkTest_Test() { // do some tests Action <LiteEngine> DoTest = (db) => { Assert.AreEqual(1, db.Count("col", null)); Assert.AreEqual(99, db.UserVersion); Assert.IsNotNull(db.GetIndexes("col").FirstOrDefault(x => x.Field == "name")); Assert.IsTrue(db.GetIndexes("col").FirstOrDefault(x => x.Field == "name").Unique); }; using (var file = new TempFile()) { using (var db = new LiteEngine(file.Filename)) { db.UserVersion = 99; db.EnsureIndex("col", "name", true); db.Insert("col", GetDocs(1, 40000)); db.Delete("col", Query.GT("_id", 1)); // delete 29.999 docs Assert.AreEqual(1, db.Count("col", null)); // file still large than 20mb (even with only 1 document) Assert.IsTrue(file.Size > 20 * 1024 * 1024); // reduce datafile db.Shrink(); // now file are small than 50kb Assert.IsTrue(file.Size < 50 * 1024); DoTest(db); } // re-open datafile to check if is ok using (var db = new LiteEngine(file.Filename)) { // still 1 doc and 1 name unique index DoTest(db); // shrink again but now with password var reduced = db.Shrink("abc123"); // file still same size (but now are encrypted) Assert.AreEqual(0, reduced); // still 1 doc and 1 name unique index DoTest(db); } // re-open, again, but now with password using (var db = new LiteEngine(file.Filename, "abc123")) { DoTest(db); // now, remove password db.Shrink(); // test again DoTest(db); } } }
public void ShrinkTest_Test() { // do some tests Action<LiteEngine> DoTest = (db) => { Assert.AreEqual(1, db.Count("col", null)); Assert.AreEqual(99, db.UserVersion); Assert.IsNotNull(db.GetIndexes("col").FirstOrDefault(x => x.Field == "name")); Assert.IsTrue(db.GetIndexes("col").FirstOrDefault(x => x.Field == "name").Unique); }; using (var file = new TempFile()) { using (var db = new LiteEngine(file.Filename)) { db.UserVersion = 99; db.EnsureIndex("col", "name", true); db.Insert("col", GetDocs(1, 30000)); db.Delete("col", Query.GT("_id", 1)); // delete 29.999 docs Assert.AreEqual(1, db.Count("col", null)); // file still large than 20mb (even with only 1 document) Assert.IsTrue(file.Size > 20 * 1024 * 1024); // reduce datafile db.Shrink(); // now file are small than 50kb Assert.IsTrue(file.Size < 50 * 1024); DoTest(db); } // re-open datafile to check if is ok using (var db = new LiteEngine(file.Filename)) { // still 1 doc and 1 name unique index DoTest(db); // shrink again but now with password var reduced = db.Shrink("abc123"); // file still same size (but now are encrypted) Assert.AreEqual(0, reduced); // still 1 doc and 1 name unique index DoTest(db); } // re-open, again, but now with password using (var db = new LiteEngine(file.Filename, "abc123")) { DoTest(db); // now, remove password db.Shrink(); // test again DoTest(db); } } }
public void Checkpoint_TransactionRecovery_Test() { using (var file = new TempFile()) { using (var db = new LiteEngine(new FileDiskService(file.Filename), autocommit: false )) { var log = new StringBuilder(); db.Log.Level = Logger.CACHE; db.Log.Logging += (s) => log.AppendLine(s); // initialize my "col" with 1000 docs without transaction db.Insert("col", GetDocs(1, 1000)); // commit now for intialize new transaction db.Commit(); // insert a lot of docs inside a single collection (will do checkpoint in disk) db.Insert("col", GetDocs(1001, N)); // update all documents db.Update("col", GetDocs(1, N)); // create new index db.EnsureIndex("col", "type"); // checks if cache had a checkpoint Assert.IsTrue(log.ToString().Contains("checkpoint")); // datafile must be big (because checkpoint expand file) Assert.IsTrue(file.Size > 30 * 1024 * 1024); // in MB\ // delete all docs > 1000 db.Delete("col", Query.GT("_id", 1000)); db.DropIndex("col", "type"); // let's rollback everything db.Rollback(); // be sure cache are empty Assert.AreEqual(0, db.CacheUsed); // datafile must returns to original size (less than 1.5MB for 1000 docs) Assert.IsTrue(file.Size < 1.5 * 1024 * 1024); // in MB // test in my only doc exits Assert.AreEqual(1000, db.Count("col", Query.All())); Assert.AreEqual(1000, db.Count("col", null)); // test indexes (must have only _id index) Assert.AreEqual(1, db.GetIndexes("col").Count()); } } }
public void FreeSlot_Delete() { using (var e = new LiteEngine()) { e.BeginTrans(); // get transaction/snapshot "col1" var t = e.GetMonitor().GetTransaction(false, out var isNew); var s = t.CreateSnapshot(LockMode.Write, "col1", true); // first page e.Insert("col1", new BsonDocument[] { new BsonDocument { ["_id"] = 1, ["n"] = new byte[2000] } }, BsonAutoId.Int32); e.Insert("col1", new BsonDocument[] { new BsonDocument { ["_id"] = 2, ["n"] = new byte[2000] } }, BsonAutoId.Int32); e.Insert("col1", new BsonDocument[] { new BsonDocument { ["_id"] = 3, ["n"] = new byte[2000] } }, BsonAutoId.Int32); // second page e.Insert("col1", new BsonDocument[] { new BsonDocument { ["_id"] = 4, ["n"] = new byte[2000] } }, BsonAutoId.Int32); e.Insert("col1", new BsonDocument[] { new BsonDocument { ["_id"] = 5, ["n"] = new byte[2000] } }, BsonAutoId.Int32); e.Insert("col1", new BsonDocument[] { new BsonDocument { ["_id"] = 6, ["n"] = new byte[2000] } }, BsonAutoId.Int32); // get pages var colPage = s.CollectionPage; var indexPage = s.LocalPages.FirstOrDefault(x => x.PageType == PageType.Index); var dataPage1 = s.LocalPages.FirstOrDefault(x => x.PageType == PageType.Data); var dataPage2 = s.LocalPages.FirstOrDefault(x => x.PageType == PageType.Data && x.PageID != dataPage1.PageID); // test dataPage free space dataPage1.FreeBytes.Should().Be(2064); dataPage2.FreeBytes.Should().Be(2064); colPage.FreeDataPageList.Should().Equal(uint.MaxValue, uint.MaxValue, uint.MaxValue, uint.MaxValue, dataPage2.PageID); // delete some data e.Delete("col1", new BsonValue[] { 2 }); // test again dataPage dataPage1.FreeBytes.Should().Be(4092); colPage.FreeDataPageList.Should().Equal(uint.MaxValue, uint.MaxValue, uint.MaxValue, dataPage1.PageID, dataPage2.PageID); // clear first page e.Delete("col1", new BsonValue[] { 1, 3 }); // page1 must be now a clean page var emptyPage = s.LocalPages.FirstOrDefault(x => x.PageID == dataPage1.PageID); emptyPage.PageType.Should().Be(PageType.Empty); emptyPage.ItemsCount.Should().Be(0); emptyPage.FreeBytes.Should().Be(8160); t.Pages.DeletedPages.Should().Be(1); t.Pages.FirstDeletedPageID.Should().Be(emptyPage.PageID); t.Pages.LastDeletedPageID.Should().Be(emptyPage.PageID); } }
public void AutoId_No_Duplicate_After_Delete() { // using strong type using (var db = new LiteDatabase(new MemoryStream())) { var col = db.GetCollection <EntityInt>("col1"); col.EnsureIndex(x => x.Name); col.Insert(new EntityInt { Name = "One" }); col.Insert(new EntityInt { Name = "Two" }); var one = col.FindOne(x => x.Name == "One"); var two = col.FindOne(x => x.Name == "Two"); Assert.AreEqual(1, one.Id); Assert.AreEqual(2, two.Id); // now delete first 2 rows col.Delete(one.Id); col.Delete(two.Id); // and insert new documents col.Insert(new EntityInt { Name = "Three" }); col.Insert(new EntityInt { Name = "Four" }); var three = col.FindOne(x => x.Name == "Three"); var four = col.FindOne(x => x.Name == "Four"); Assert.AreEqual(3, three.Id); Assert.AreEqual(4, four.Id); } // using bsondocument/engine using (var db = new LiteEngine(new MemoryStream())) { db.EnsureIndex("col", "Name"); db.Insert("col", new BsonDocument { ["Name"] = "One" }, BsonType.Int32); db.Insert("col", new BsonDocument { ["Name"] = "Two" }, BsonType.Int32); var one = db.FindOne("col", Query.EQ("Name", "One")); var two = db.FindOne("col", Query.EQ("Name", "Two")); Assert.AreEqual(1, one["_id"].AsInt32); Assert.AreEqual(2, two["_id"].AsInt32); // now delete first 2 rows db.Delete("col", one["_id"].AsInt32); db.Delete("col", two["_id"].AsInt32); // and insert new documents db.Insert("col", new BsonDocument { ["Name"] = "Three" }, BsonType.Int32); db.Insert("col", new BsonDocument { ["Name"] = "Four" }, BsonType.Int32); var three = db.FindOne("col", Query.EQ("Name", "Three")); var four = db.FindOne("col", Query.EQ("Name", "Four")); Assert.AreEqual(3, three["_id"].AsInt32); Assert.AreEqual(4, four["_id"].AsInt32); } }