public void Index_Order() { using (var tmp = new TempFile()) using (var db = new LiteEngine(tmp.Filename)) { db.Insert("col", new BsonDocument { { "text", "D" } }); db.Insert("col", new BsonDocument { { "text", "A" } }); db.Insert("col", new BsonDocument { { "text", "E" } }); db.Insert("col", new BsonDocument { { "text", "C" } }); db.Insert("col", new BsonDocument { { "text", "B" } }); db.EnsureIndex("col", "text"); var asc = string.Join("", db.Find("col", Query.All("text")) .Select(x => x["text"].AsString) .ToArray()); var desc = string.Join("", db.Find("col", Query.All("text", Query.Descending)) .Select(x => x["text"].AsString) .ToArray()); Assert.AreEqual("ABCDE", asc); Assert.AreEqual("EDCBA", desc); var indexes = db.GetIndexes("col"); Assert.AreEqual(1, indexes.Count(x => x.Field == "text")); } }
public void Loop_Test() { using (var tmp = new TempFile()) { using (var db = new LiteEngine(tmp.Filename)) { db.Insert("col", new BsonDocument { { "Number", 1 } }); db.Insert("col", new BsonDocument { { "Number", 2 } }); db.Insert("col", new BsonDocument { { "Number", 3 } }); db.Insert("col", new BsonDocument { { "Number", 4 } }); } using (var db = new LiteEngine(tmp.Filename)) { foreach (var doc in db.Find("col", Query.All())) { doc["Name"] = "John"; db.Update("col", doc); } db.EnsureIndex("col", "Name"); var all = db.Find("col", Query.EQ("Name", "John")); Assert.AreEqual(4, all.Count()); } } }
public void Query_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { db.Insert("col", new BsonDocument[] { new BsonDocument { { "_id", 1 }, { "name", "e" } }, new BsonDocument { { "_id", 2 }, { "name", "d" } }, new BsonDocument { { "_id", 3 }, { "name", "c" } }, new BsonDocument { { "_id", 4 }, { "name", "b" } }, new BsonDocument { { "_id", 5 }, { "name", "a" } } }); db.EnsureIndex("col", "name"); Func<Query, string> result = (q) => string.Join(",", db.FindIndex("col", q).Select(x => x.ToString())); Assert.AreEqual("1", result(Query.EQ("_id", 1))); Assert.AreEqual("4,5", result(Query.GTE("_id", 4))); Assert.AreEqual("1", result(Query.LT("_id", 2))); Assert.AreEqual("a,b,d,e", result(Query.Not("name", "c"))); Assert.AreEqual("2,4", result(Query.Where("_id", (v) => v.AsInt32 % 2 == 0))); } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var col = this.ReadCollection(engine, s); var value = JsonSerializer.Deserialize(s.ToString()); if (value.IsArray) { display.WriteResult(engine.Insert(col, value.AsArray.RawValue.Select(x => x.AsDocument))); } else { engine.Insert(col, new BsonDocument[] { value.AsDocument }); display.WriteResult(value.AsDocument["_id"]); } }
public void AutoIndexEngine_Test() { using (var db = new LiteEngine(new MemoryStream())) { var doc = new BsonDocument { ["name"] = "john doe", ["age"] = 40 }; db.Insert("people", doc); var result = db.FindOne("people", Query.And( Query.EQ("name", "john doe"), Query.EQ("age", 40))); Assert.AreEqual(doc["name"], result["name"]); var indexName = db.GetIndexes("people").FirstOrDefault(x => x.Field == "name"); var indexAge = db.GetIndexes("people").FirstOrDefault(x => x.Field == "age"); // indexes are not unique (by default, when using LiteEngine) Assert.AreEqual(false, indexName.Unique); Assert.AreEqual(false, indexAge.Unique); } }
public void Engine_Delete_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { var doc1 = new BsonDocument { { "_id", 1 }, { "name", "John" } }; var doc2 = new BsonDocument { { "_id", 2 }, { "name", "Doe" } }; db.Insert("col", doc1); db.Insert("col", doc2); db.Delete("col", Query.GTE("_id", 1)); db.Insert("col", doc1); } }
public void Change_Database_Culture() { using (var f = new TempFile()) { var current = Thread.CurrentThread.CurrentCulture; Thread.CurrentThread.CurrentCulture = new CultureInfo("fi"); // store in database using "fi" culture using (var e = new LiteEngine(f.Filename)) { e.Insert("col1", data.Select(x => new BsonDocument { ["_id"] = x }), BsonAutoId.Int32); } // change current culture do "en-GB" Thread.CurrentThread.CurrentCulture = new CultureInfo("en-gb"); using (var e = new LiteEngine(f.Filename)) { foreach (var id in data) { var doc = e.Find("col1", BsonExpression.Create("_id = @0", id)).Single(); doc["_id"].AsString.Should().Be(id); } } // back to local culture Thread.CurrentThread.CurrentUICulture = current; } }
public void Thread_Transaction_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { // insert first document db.Insert("col", new BsonDocument { { "_id", 1 }, { "count", 1 } }); // use parallel Parallel.For(1, 10000, (i) => { lock (db) { var doc = db.Find("col", Query.EQ("_id", 1)).Single(); doc["count"] = doc["count"].AsInt32 + 1; db.Update("col", doc); } }); Assert.AreEqual(10000, db.Find("col", Query.EQ("_id", 1)).Single()["count"].AsInt32); } }
public IEnumerable <BsonValue> Execute(StringScanner s, LiteEngine engine) { var col = this.ReadCollection(engine, s); var value = JsonSerializer.Deserialize(s); var sid = s.Scan(@"\s+_?id:(int32|int64|int|long|objectid|datetime|date|guid)", 1).Trim().ToLower(); var autoId = sid == "int32" || sid == "int" ? BsonType.Int32 : sid == "int64" || sid == "long" ? BsonType.Int64 : sid == "date" || sid == "datetime" ? BsonType.DateTime : sid == "guid" ? BsonType.Guid : BsonType.ObjectId; s.ThrowIfNotFinish(); if (value.IsArray) { var count = engine.InsertBulk(col, value.AsArray.RawValue.Select(x => x.AsDocument), autoId: autoId); yield return(count); } else if (value.IsDocument) { engine.Insert(col, new BsonDocument[] { value.AsDocument }, autoId); yield return(value.AsDocument["_id"]); } else { throw LiteException.SyntaxError(s, "Invalid JSON value (must be a document or an array)"); } }
public void Populate(IEnumerable <BsonDocument> docs) { // create indexes before _engine.EnsureIndex("col", "age"); // bulk data insert _engine.Insert("col", docs); }
public void MemoryStream_Test() { var mem = new MemoryStream(); using (var db = new LiteEngine(mem)) { db.Insert("col", new BsonDocument { { "_id", 1 } , { "name", "John" } }); db.Insert("col", new BsonDocument { { "_id", 2 }, { "name", "Doe" } }); } using (var db = new LiteEngine(mem)) { var john = db.Find("col", Query.EQ("_id", 1)).FirstOrDefault(); var doe = db.Find("col", Query.EQ("_id", 2)).FirstOrDefault(); Assert.AreEqual("John", john["name"].AsString); Assert.AreEqual("Doe", doe["name"].AsString); } }
public void Loop_With_Update() { using (var tmp = new TempFile()) { // initialize database with 4 using (var db = new LiteEngine(tmp.Filename)) { db.Insert("col", new BsonDocument { { "Number", 1 } }, BsonType.Int32); db.Insert("col", new BsonDocument { { "Number", 2 } }, BsonType.Int32); db.Insert("col", new BsonDocument { { "Number", 3 } }, BsonType.Int32); db.Insert("col", new BsonDocument { { "Number", 4 } }, BsonType.Int32); db.Insert("col", new BsonDocument { { "Number", 5 } }, BsonType.Int32); } using (var db = new LiteEngine(tmp.Filename)) { foreach (var doc in db.Find("col", Query.All(), 0, 1000)) { var id = doc["_id"]; doc["Name"] = "John"; // inside this update, locker must be in write db.Update("col", doc); } db.EnsureIndex("col", "Name"); var all = db.Find("col", Query.EQ("Name", "John")); Assert.AreEqual(5, all.Count()); } } }
public IEnumerable <BsonValue> Execute(StringScanner s, LiteEngine engine) { var col = this.ReadCollection(engine, s); // try read any kind of expression var expression = BsonExpression.ReadExpression(s, false, false); // if not found a valid one, try read only as path (will add $. before) if (expression == null) { expression = BsonExpression.ReadExpression(s, true, true); } var query = Query.All(); // support into new_collection var into = s.Scan(@"\s*into\s+([\w-]+)", 1); var autoId = BsonType.ObjectId; // checks for autoId if (into.Length > 0) { var sid = s.Scan(@"\s+_?id:(int32|int64|int|long|objectid|datetime|date|guid)", 1).Trim().ToLower(); autoId = sid == "int32" || sid == "int" ? BsonType.Int32 : sid == "int64" || sid == "long" ? BsonType.Int64 : sid == "date" || sid == "datetime" ? BsonType.DateTime : sid == "guid" ? BsonType.Guid : BsonType.ObjectId; } if (s.Scan(@"\s*where\s*").Length > 0) { query = this.ReadQuery(s, true); } var skipLimit = this.ReadSkipLimit(s); var includes = this.ReadIncludes(s); s.ThrowIfNotFinish(); var docs = engine.Find(col, query, includes, skipLimit.Key, skipLimit.Value); if (into.Length > 0) { // insert into results to other collection collection var count = engine.Insert(into, this.Execute(docs, expression), autoId); // return inserted documents return(new BsonValue[] { count }); } else { return(this.Execute(docs, expression).Select(x => x as BsonValue)); } }
public void Update_ExtendBlocks() { using (var db = new LiteEngine()) { var doc = new BsonDocument { ["_id"] = 1, ["d"] = new byte[1000] }; db.Insert("col1", doc); // small (same page) doc["d"] = new byte[300]; db.Update("col1", doc); var page3 = db.GetPageLog(3); page3["freeBytes"].AsInt32.Should().Be(7828); // big (same page) doc["d"] = new byte[2000]; db.Update("col1", doc); page3 = db.GetPageLog(3); page3["freeBytes"].AsInt32.Should().Be(6128); // big (extend page) doc["d"] = new byte[20000]; db.Update("col1", doc); page3 = db.GetPageLog(3); var page4 = db.GetPageLog(4); var page5 = db.GetPageLog(5); page3["freeBytes"].AsInt32.Should().Be(0); page4["freeBytes"].AsInt32.Should().Be(0); page5["freeBytes"].AsInt32.Should().Be(4428); // small (shrink page) doc["d"] = new byte[10000]; db.Update("col1", doc); page3 = db.GetPageLog(3); page4 = db.GetPageLog(4); page5 = db.GetPageLog(5); page3["freeBytes"].AsInt32.Should().Be(0); page4["freeBytes"].AsInt32.Should().Be(6278); page5["pageType"].AsString.Should().Be("Empty"); } }
public void Engine_Insert_Test() { using (var file = new TempFile()) { using (var db = new LiteEngine(file.Filename)) { db.Insert("col", new BsonDocument { { "_id", 1 } , { "name", "John" } }); db.Insert("col", new BsonDocument { { "_id", 2 }, { "name", "Doe" } }); } using (var db = new LiteEngine(file.Filename)) { var john = db.Find("col", Query.EQ("_id", 1)).FirstOrDefault(); var doe = db.Find("col", Query.EQ("_id", 2)).FirstOrDefault(); Assert.AreEqual("John", john["name"].AsString); Assert.AreEqual("Doe", doe["name"].AsString); } } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var col = this.ReadCollection(engine, s); var filename = s.Scan(@".*"); using (var sr = new StreamReader(new FileStream(filename, System.IO.FileMode.Open))) { var docs = JsonSerializer.DeserializeArray(sr); display.WriteResult(engine.Insert(col, docs.Select(x => x.AsDocument))); } }
public IEnumerable <BsonValue> Execute(StringScanner s, LiteEngine engine) { var col = this.ReadCollection(engine, s); var filename = s.Scan(@".*"); using (var sr = new StreamReader(new FileStream(filename, System.IO.FileMode.Open))) { var docs = JsonSerializer.DeserializeArray(sr); yield return(engine.Insert(col, docs.Select(x => x.AsDocument))); } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var col = this.ReadCollection(engine, s); var filename = s.Scan(@".*"); using (var sr = new StreamReader(filename, Encoding.UTF8)) { var docs = JsonSerializer.DeserializeArray(sr); display.WriteResult(engine.Insert(col, docs.Select(x => x.AsDocument))); } }
public void Encrypted_Database() { using (var encrypt = new TempFile()) using (var plain = new TempFile()) { // create a database with no password - plain data using (var db = new LiteEngine(plain.Filename)) { db.Insert("col", new BsonDocument { { "name", "Mauricio David" } }); } // read datafile to find "Mauricio" string Assert.IsTrue(plain.ReadAsText().Contains("Mauricio David")); // create a database with password using (var db = new LiteEngine(encrypt.Filename, "abc123")) { db.Insert("col", new BsonDocument { { "name", "Mauricio David" } }); } // test if is possible find "Mauricio" string Assert.IsFalse(encrypt.ReadAsText().Contains("Mauricio David")); // try access using wrong password try { using (var db = new LiteEngine(encrypt.Filename, "abc1234")) { Assert.Fail(); // can't work } } catch (LiteException ex) { Assert.IsTrue(ex.ErrorCode == 123); // wrong password } // open encrypted db and read document using (var db = new LiteEngine(encrypt.Filename, "abc123")) { var doc = db.Find("col", Query.All()).First(); Assert.AreEqual("Mauricio David", doc["name"].AsString); // let's remove password to work CheckIntegrety db.Shrink(null, null); } } }
public void Thread_InsertQuery_Test() { const int N = 3000; var running = true; using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { db.Insert("col", new BsonDocument()); // insert basic document var ta = Task.Factory.StartNew(() => { for (var i = 0; i < N; i++) { var doc = new BsonDocument { { "_id", i } }; db.Insert("col", doc); } running = false; }); // query while insert var tb = Task.Factory.StartNew(() => { while (running) { db.Find("col", Query.All()).ToList(); } }); Task.WaitAll(ta, tb); Assert.AreEqual(N + 1, db.Count("col", Query.All())); } }
private void CreateDatabase(LiteEngine engine) { engine.Insert("mycol", new[] { new BsonDocument { ["_id"] = 1, ["name"] = "Mauricio" } }, BsonAutoId.Int32); // do checkpoint to use only data file engine.Checkpoint(); }
public void MemoryStream_Test() { var mem = new MemoryStream(); using (var db = new LiteEngine(mem)) { db.Insert("col", new BsonDocument { { "_id", 1 }, { "name", "John" } }); db.Insert("col", new BsonDocument { { "_id", 2 }, { "name", "Doe" } }); } using (var db = new LiteEngine(mem)) { var john = db.Find("col", Query.EQ("_id", 1)).FirstOrDefault(); var doe = db.Find("col", Query.EQ("_id", 2)).FirstOrDefault(); Assert.AreEqual("John", john["name"].AsString); Assert.AreEqual("Doe", doe["name"].AsString); } }
public void Checkpoint_Recovery_Test() { using (var file = new TempFile()) { // init with N docs with type=1 using (var db = new LiteEngine(file.Filename)) { db.EnsureIndex("col", "type"); db.Insert("col", GetDocs(1, N, type: 1)); Assert.AreEqual(N, db.Count("col", Query.EQ("type", 1))); } // re-open and try update all docs to type=2 using (var db = new LiteEngine(file.Filename)) { var log = new StringBuilder(); db.Log.Level = Logger.CACHE; db.Log.Logging += (s) => log.AppendLine(s); try { // try update all to "type=2" // but throws exception before finish db.Update("col", GetDocs(1, N, type: 2, throwAtEnd: true)); } catch (Exception ex) { if (!ex.Message.Contains("Try Recovery!")) { Assert.Fail(ex.Message); } } // checks if cache had a checkpoint Assert.IsTrue(log.ToString().Contains("checkpoint")); // re-check if all docs will be type=1 Assert.AreEqual(N, db.Count("col", Query.EQ("type", 1))); Assert.AreEqual(0, db.Count("col", Query.EQ("type", 2))); } // re-open datafile the be sure contains only type=1 using (var db = new LiteEngine(file.Filename)) { Assert.AreEqual(N, db.Count("col", Query.EQ("type", 1))); Assert.AreEqual(0, db.Count("col", Query.EQ("type", 2))); } } }
public void Thread_InsertUpdate_Test() { const int N = 3000; using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { db.EnsureIndex("col", "updated"); Assert.AreEqual(0, db.Count("col", Query.EQ("updated", true))); // insert basic document var ta = Task.Factory.StartNew(() => { for (var i = 0; i < N; i++) { var doc = new BsonDocument { { "_id", i } }; db.Insert("col", doc); } }); // update _id=N var tb = Task.Factory.StartNew(() => { var i = 0; while (i < N) { var doc = new BsonDocument { { "_id", i }, { "updated", true }, { "name", TempFile.LoremIpsum(5, 10, 1, 5, 1) } }; if (db.Update("col", doc)) { i++; } } }); Task.WaitAll(ta, tb); Assert.AreEqual(N, db.Count("col", Query.EQ("updated", true))); } }
public void DropCollection_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { Assert.IsFalse(db.GetCollectionNames().Any(x => x == "col")); db.Insert("col", new BsonDocument { { "a", 1 } }); Assert.IsTrue(db.GetCollectionNames().Any(x => x == "col")); db.DropCollection("col"); Assert.IsFalse(db.GetCollectionNames().Any(x => x == "col")); } }
public void Engine_Insert_Test() { using (var file = new TempFile()) { using (var db = new LiteEngine(file.Filename)) { db.Insert("col", new BsonDocument { { "_id", 1 }, { "name", "John" } }); db.Insert("col", new BsonDocument { { "_id", 2 }, { "name", "Doe" } }); } using (var db = new LiteEngine(file.Filename)) { var john = db.Find("col", Query.EQ("_id", 1)).FirstOrDefault(); var doe = db.Find("col", Query.EQ("_id", 2)).FirstOrDefault(); Assert.AreEqual("John", john["name"].AsString); Assert.AreEqual("Doe", doe["name"].AsString); } } }
public void Concurrency_InsertQuery_Test() { const int N = 3000; var running = true; using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { db.Insert("col", new BsonDocument()); // insert basic document var ta = Task.Factory.StartNew(() => { for (var i = 0; i < N; i++) { var doc = new BsonDocument { { "_id", i } }; db.Insert("col", doc); } running = false; }); // query while insert var tb = Task.Factory.StartNew(() => { while (running) { db.Find("col", Query.All()).ToList(); } }); Task.WaitAll(ta, tb); Assert.AreEqual(N + 1, db.Count("col", Query.All())); } }
public void Engine_QueryUpdate_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { db.EnsureIndex("col", "name"); // insert 4 documents db.Insert("col", new BsonDocument { { "_id", 1 } }); db.Insert("col", new BsonDocument { { "_id", 2 } }); db.Insert("col", new BsonDocument { { "_id", 3 } }); db.Insert("col", new BsonDocument { { "_id", 4 } }); // query all documents and update name foreach(var d in db.Find("col", Query.All())) { d["name"] = "john"; db.Update("col", d); } // this simple test if same thread open a read mode and then open write lock mode Assert.AreEqual(4, db.Count("col", Query.EQ("name", "john"))); } }
public void Checkpoint_Insert_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { var log = new StringBuilder(); db.Log.Level = Logger.CACHE; db.Log.Logging += (s) => log.AppendLine(s); // insert basic N documents db.Insert("col", GetDocs(1, N)); Assert.IsTrue(log.ToString().Contains("checkpoint")); } }
public void BulkInsert_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { // let's bulk 500.000 documents db.Insert("col", GetDocs(1, 500000)); // and assert if all are inserted (based on collection header only) Assert.AreEqual(500000, db.Count("col")); // and now count all Assert.AreEqual(500000, db.Count("col", Query.All())); } }
public void DropCollection() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { Assert.IsFalse(db.GetCollectionNames().Any(x => x == "col")); db.Insert("col", new BsonDocument { { "a", 1 } }); Assert.IsTrue(db.GetCollectionNames().Any(x => x == "col")); db.DropCollection("col"); Assert.IsFalse(db.GetCollectionNames().Any(x => x == "col")); } }
public void Checkpoint_Recovery_Test() { using (var file = new TempFile()) { // init with N docs with type=1 using (var db = new LiteEngine(file.Filename)) { db.EnsureIndex("col", "type"); db.Insert("col", GetDocs(1, N, type: 1)); Assert.AreEqual(N, db.Count("col", Query.EQ("type", 1))); } // re-open and try update all docs to type=2 using (var db = new LiteEngine(file.Filename)) { var log = new StringBuilder(); db.Log.Level = Logger.CACHE; db.Log.Logging += (s) => log.AppendLine(s); try { // try update all to "type=2" // but throws exception before finish db.Update("col", GetDocs(1, N, type: 2, throwAtEnd: true)); } catch (Exception ex) { if (!ex.Message.Contains("Try Recovery!")) Assert.Fail(ex.Message); } // checks if cache had a checkpoint Assert.IsTrue(log.ToString().Contains("checkpoint")); // re-check if all docs will be type=1 Assert.AreEqual(N, db.Count("col", Query.EQ("type", 1))); Assert.AreEqual(0, db.Count("col", Query.EQ("type", 2))); } // re-open datafile the be sure contains only type=1 using (var db = new LiteEngine(file.Filename)) { Assert.AreEqual(N, db.Count("col", Query.EQ("type", 1))); Assert.AreEqual(0, db.Count("col", Query.EQ("type", 2))); } } }
public void Encrypted_Test() { using (var encrypt = new TempFile()) using (var plain = new TempFile()) { // create a database with no password - plain data using (var db = new LiteEngine(plain.Filename)) { db.Insert("col", new BsonDocument { { "name", "Mauricio David" } }); } // read datafile to find "Mauricio" string Assert.IsTrue(plain.ReadAsText().Contains("Mauricio David")); // create a database with password using (var db = new LiteEngine(encrypt.Filename, "abc123")) { db.Insert("col", new BsonDocument { { "name", "Mauricio David" } }); } // test if is possible find "Mauricio" string Assert.IsFalse(encrypt.ReadAsText().Contains("Mauricio David")); // try access using wrong password try { using (var db = new LiteEngine(encrypt.Filename, "abc1234")) { Assert.Fail(); // can't work } } catch (LiteException ex) { Assert.IsTrue(ex.ErrorCode == 123); // wrong password } // open encrypted db and read document using (var db = new LiteEngine(encrypt.Filename, "abc123")) { var doc = db.Find("col", Query.All()).First(); Assert.AreEqual("Mauricio David", doc["name"].AsString); } } }
/// <summary> /// Simple test: insert new document and then update this document. Finish quering first 100 documents /// </summary> static void RunTask(LiteEngine db) { for (var i = 0; i < 10; i++) { var doc = new BsonDocument() { ["name"] = "testing - " + Guid.NewGuid() }; db.Insert("collection", doc, BsonType.Int32); doc["name"] = "changed name - " + Guid.NewGuid(); db.Update("collection", doc); } db.Find("collection", Query.LTE("_id", 100)).ToArray(); }
// [TestMethod] public void BigFile_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { // create more than 4gb file while (file.Size < 4L * 1024 * 1024 * 1024) { db.Insert("col", GetDocs(5000)); } // now lets read all docs foreach (var d in db.Find("col", Query.All())) { // just read to check if there is any exception } } }
public void Concurrency_InsertUpdate_Test() { const int N = 3000; using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { db.EnsureIndex("col", "updated"); Assert.AreEqual(0, db.Count("col", Query.EQ("updated", true))); // insert basic document var ta = Task.Factory.StartNew(() => { for (var i = 0; i < N; i++) { var doc = new BsonDocument { { "_id", i } }; db.Insert("col", doc); } }); // update _id=N var tb = Task.Factory.StartNew(() => { var i = 0; while (i < N) { var doc = new BsonDocument { { "_id", i }, { "updated", true }, { "name", TempFile.LoremIpsum(5, 10, 1, 5, 1) } }; if (db.Update("col", doc)) i++; } }); Task.WaitAll(ta, tb); Assert.AreEqual(N, db.Count("col", Query.EQ("updated", true))); } }
public void Process_Insert_Delete() { using (var file = new TempFile()) { using (var dbA = new LiteEngine(file.Filename)) using (var dbB = new LiteEngine(file.Filename)) { dbA.EnsureIndex("col", "process", false); // insert 1000 x instance=1 var ta = Task.Factory.StartNew(() => { for (var i = 0; i < 1000; i++) { dbA.Insert("col", new BsonDocument { { "process", 1 } }); } }); // keeping delete all var tb = Task.Factory.StartNew(() => { // while before starts insert while (dbB.Count("col", Query.EQ("process", 1)) == 0) { Task.Delay(50).Wait(); } // while until has docs while (dbB.Count("col", Query.EQ("process", 1)) > 0) { dbB.Delete("col", Query.All()); Task.Delay(50).Wait(); } }); Task.WaitAll(ta, tb); Assert.AreEqual(0, dbA.Count("col", Query.EQ("process", 1))); Assert.AreEqual(0, dbB.Count("col", Query.EQ("process", 1))); } } }
public void Checkpoint_Index_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { // insert basic N documents db.Insert("col", GetDocs(1, N)); var log = new StringBuilder(); db.Log.Level = Logger.CACHE; db.Log.Logging += (s) => log.AppendLine(s); // create an index in col db.EnsureIndex("col", "name"); Assert.IsTrue(log.ToString().Contains("checkpoint")); Assert.AreEqual(N, db.Count("col", Query.All())); } }
public void Process_Insert_Count() { using (var file = new TempFile()) { using (var dbA = new LiteEngine(file.Filename)) using (var dbB = new LiteEngine(file.Filename)) { dbA.EnsureIndex("col", "process", false); // insert 1000 x instance=1 var ta = Task.Factory.StartNew(() => { for (var i = 0; i < 1000; i++) { dbA.Insert("col", new BsonDocument { { "process", 1 } }); } }); // keep querying until found 1000 docs var tb = Task.Factory.StartNew(() => { var count = 0L; while (count < 1000) { // force query all rows count = dbB.Count("col", Query.EQ("process", 1)); Task.Delay(50).Wait(); } }); Task.WaitAll(ta, tb); Assert.AreEqual(1000, dbA.Count("col", Query.EQ("process", 1))); Assert.AreEqual(1000, dbB.Count("col", Query.EQ("process", 1))); } } }
public void Process_Multi_Insert() { using (var file = new TempFile()) { using (var dbA = new LiteEngine(file.Filename)) using (var dbB = new LiteEngine(file.Filename)) { dbA.EnsureIndex("col", "process", false); // insert 1000 x instance=1 var ta = Task.Factory.StartNew(() => { for (var i = 0; i < 1000; i++) { dbA.Insert("col", new BsonDocument { { "process", 1 } }); } }); // insert 700 x instance=2 var tb = Task.Factory.StartNew(() => { for (var i = 0; i < 700; i++) { dbB.Insert("col", new BsonDocument { { "process", 2 } }); } }); Task.WaitAll(ta, tb); Assert.AreEqual(1000, dbA.Count("col", Query.EQ("process", 1))); Assert.AreEqual(700, dbA.Count("col", Query.EQ("process", 2))); Assert.AreEqual(1000, dbB.Count("col", Query.EQ("process", 1))); Assert.AreEqual(700, dbB.Count("col", Query.EQ("process", 2))); } } }
public void Performance_Test() { // just a simple example to test performance speed using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { var ti = new Stopwatch(); var tx = new Stopwatch(); var tu = new Stopwatch(); var td = new Stopwatch(); ti.Start(); db.Insert("col", GetDocs(N1)); db.Commit(); ti.Stop(); tx.Start(); db.EnsureIndex("col", "name"); db.Commit(); tx.Stop(); tu.Start(); db.Update("col", GetDocs(N1)); db.Commit(); tu.Stop(); db.EnsureIndex("col", "name"); db.Commit(); td.Start(); db.Delete("col", Query.All()); db.Commit(); td.Stop(); Debug.WriteLine("Insert time: " + ti.ElapsedMilliseconds); Debug.WriteLine("EnsureIndex time: " + tx.ElapsedMilliseconds); Debug.WriteLine("Update time: " + tu.ElapsedMilliseconds); Debug.WriteLine("Delete time: " + td.ElapsedMilliseconds); } }
public void Performance_Test() { // just a simple example to test performance speed using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { var ti = new Stopwatch(); var tx = new Stopwatch(); var tu = new Stopwatch(); var td = new Stopwatch(); ti.Start(); db.Insert("col", GetDocs(N1)); db.Commit(); ti.Stop(); tx.Start(); db.EnsureIndex("col", "name"); db.Commit(); tx.Stop(); tu.Start(); db.Update("col", GetDocs(N1)); db.Commit(); tu.Stop(); db.EnsureIndex("col", "name"); db.Commit(); td.Start(); db.Delete("col", Query.All()); db.Commit(); td.Stop(); Debug.Print("Insert time: " + ti.ElapsedMilliseconds); Debug.Print("EnsureIndex time: " + tx.ElapsedMilliseconds); Debug.Print("Update time: " + tu.ElapsedMilliseconds); Debug.Print("Delete time: " + td.ElapsedMilliseconds); } }
public void ShrinkTest_Test() { // do some tests Action<LiteEngine> DoTest = (db) => { Assert.AreEqual(1, db.Count("col", null)); Assert.AreEqual(99, db.UserVersion); Assert.IsNotNull(db.GetIndexes("col").FirstOrDefault(x => x.Field == "name")); Assert.IsTrue(db.GetIndexes("col").FirstOrDefault(x => x.Field == "name").Unique); }; using (var file = new TempFile()) { using (var db = new LiteEngine(file.Filename)) { db.UserVersion = 99; db.EnsureIndex("col", "name", true); db.Insert("col", GetDocs(1, 30000)); db.Delete("col", Query.GT("_id", 1)); // delete 29.999 docs Assert.AreEqual(1, db.Count("col", null)); // file still large than 20mb (even with only 1 document) Assert.IsTrue(file.Size > 20 * 1024 * 1024); // reduce datafile db.Shrink(); // now file are small than 50kb Assert.IsTrue(file.Size < 50 * 1024); DoTest(db); } // re-open datafile to check if is ok using (var db = new LiteEngine(file.Filename)) { // still 1 doc and 1 name unique index DoTest(db); // shrink again but now with password var reduced = db.Shrink("abc123"); // file still same size (but now are encrypted) Assert.AreEqual(0, reduced); // still 1 doc and 1 name unique index DoTest(db); } // re-open, again, but now with password using (var db = new LiteEngine(file.Filename, "abc123")) { DoTest(db); // now, remove password db.Shrink(); // test again DoTest(db); } } }
public void Checkpoint_TransactionRecovery_Test() { using (var file = new TempFile()) { using (var db = new LiteEngine(new FileDiskService(file.Filename), autocommit: false )) { var log = new StringBuilder(); db.Log.Level = Logger.CACHE; db.Log.Logging += (s) => log.AppendLine(s); // initialize my "col" with 1000 docs without transaction db.Insert("col", GetDocs(1, 1000)); // commit now for intialize new transaction db.Commit(); // insert a lot of docs inside a single collection (will do checkpoint in disk) db.Insert("col", GetDocs(1001, N)); // update all documents db.Update("col", GetDocs(1, N)); // create new index db.EnsureIndex("col", "type"); // checks if cache had a checkpoint Assert.IsTrue(log.ToString().Contains("checkpoint")); // datafile must be big (because checkpoint expand file) Assert.IsTrue(file.Size > 30 * 1024 * 1024); // in MB\ // delete all docs > 1000 db.Delete("col", Query.GT("_id", 1000)); db.DropIndex("col", "type"); // let's rollback everything db.Rollback(); // be sure cache are empty Assert.AreEqual(0, db.CacheUsed); // datafile must returns to original size (less than 1.5MB for 1000 docs) Assert.IsTrue(file.Size < 1.5 * 1024 * 1024); // in MB // test in my only doc exits Assert.AreEqual(1000, db.Count("col", Query.All())); Assert.AreEqual(1000, db.Count("col", null)); // test indexes (must have only _id index) Assert.AreEqual(1, db.GetIndexes("col").Count()); } } }
public void Concurrency_LockTransaction_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { // insert first document db.Insert("col", new BsonDocument { { "_id", 1 }, { "count", 1 } }); // use parallel Parallel.For(1, 10000, (i) => { lock(db) { var doc = db.Find("col", Query.EQ("_id", 1)).Single(); doc["count"] = doc["count"].AsInt32 + 1; db.Update("col", doc); } }); Assert.AreEqual(10000, db.Find("col", Query.EQ("_id", 1)).Single()["count"].AsInt32); } }
public void Concurrency_Insert_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { db.EnsureIndex("col", "thread"); // insert 5000 x thread=1 var ta = Task.Factory.StartNew(() => { for(var i = 0; i < 5000; i++) db.Insert("col", new BsonDocument { { "thread", 1 } }); }); // insert 4000 x thread=2 var tb = Task.Factory.StartNew(() => { for (var i = 0; i < 4000; i++) db.Insert("col", new BsonDocument { { "thread", 2 } }); }); Task.WaitAll(ta, tb); Assert.AreEqual(5000, db.Count("col", Query.EQ("thread", 1))); Assert.AreEqual(4000, db.Count("col", Query.EQ("thread", 2))); } }