public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var col = this.ReadCollection(engine, s); var index = s.Scan(this.FieldPattern).Trim(); display.WriteResult(engine.Max(col, index.Length == 0 ? "_id" : index)); }
public void Execute(LiteEngine engine, StringScanner s, Display d, InputCommand input, Env env) { var sb = new StringBuilder(); var enabled = !(s.Scan(@"off\s*").Length > 0); env.Log.Level = enabled ? Logger.FULL : Logger.NONE; }
public void Query_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { db.Insert("col", new BsonDocument[] { new BsonDocument { { "_id", 1 }, { "name", "e" } }, new BsonDocument { { "_id", 2 }, { "name", "d" } }, new BsonDocument { { "_id", 3 }, { "name", "c" } }, new BsonDocument { { "_id", 4 }, { "name", "b" } }, new BsonDocument { { "_id", 5 }, { "name", "a" } } }); db.EnsureIndex("col", "name"); Func<Query, string> result = (q) => string.Join(",", db.FindIndex("col", q).Select(x => x.ToString())); Assert.AreEqual("1", result(Query.EQ("_id", 1))); Assert.AreEqual("4,5", result(Query.GTE("_id", 4))); Assert.AreEqual("1", result(Query.LT("_id", 2))); Assert.AreEqual("a,b,d,e", result(Query.Not("name", "c"))); Assert.AreEqual("2,4", result(Query.Where("_id", (v) => v.AsInt32 % 2 == 0))); } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var col = this.ReadCollection(engine, s); var newName = s.Scan(@"[\w-]+").ThrowIfEmpty("Invalid new collection name"); display.WriteResult(engine.RenameCollection(col, newName)); }
public void Loop_Test() { using (var tmp = new TempFile()) { using (var db = new LiteEngine(tmp.Filename)) { db.Insert("col", new BsonDocument { { "Number", 1 } }); db.Insert("col", new BsonDocument { { "Number", 2 } }); db.Insert("col", new BsonDocument { { "Number", 3 } }); db.Insert("col", new BsonDocument { { "Number", 4 } }); } using (var db = new LiteEngine(tmp.Filename)) { foreach (var doc in db.Find("col", Query.All())) { doc["Name"] = "John"; db.Update("col", doc); } db.EnsureIndex("col", "Name"); var all = db.Find("col", Query.EQ("Name", "John")); Assert.AreEqual(4, all.Count()); } } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var fs = new LiteStorage(engine); var id = this.ReadId(s); display.WriteResult(fs.Delete(id)); }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var col = this.ReadCollection(engine, s); var doc = JsonSerializer.Deserialize(s.ToString()).AsDocument; display.WriteResult(engine.Update(col, doc)); }
public void Index_Order() { using (var tmp = new TempFile()) using (var db = new LiteEngine(tmp.Filename)) { db.Insert("col", new BsonDocument { { "text", "D" } }); db.Insert("col", new BsonDocument { { "text", "A" } }); db.Insert("col", new BsonDocument { { "text", "E" } }); db.Insert("col", new BsonDocument { { "text", "C" } }); db.Insert("col", new BsonDocument { { "text", "B" } }); db.EnsureIndex("col", "text"); var asc = string.Join("", db.Find("col", Query.All("text")) .Select(x => x["text"].AsString) .ToArray()); var desc = string.Join("", db.Find("col", Query.All("text", Query.Descending)) .Select(x => x["text"].AsString) .ToArray()); Assert.AreEqual("ABCDE", asc); Assert.AreEqual("EDCBA", desc); var indexes = db.GetIndexes("col"); Assert.AreEqual(1, indexes.Count(x => x.Field == "text")); } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var fs = new LiteStorage(engine); var id = this.ReadId(s); var metadata = JsonSerializer.Deserialize(s.ToString()).AsDocument; fs.SetMetadata(id, metadata); }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var cols = engine.GetCollectionNames().OrderBy(x => x).ToArray(); if (cols.Length > 0) { display.WriteLine(ConsoleColor.Cyan, string.Join(Environment.NewLine, cols)); } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var col = this.ReadCollection(engine, s); display.WriteResult(new BsonArray(engine.GetIndexes(col).Select(x => new BsonDocument { { "slot", x.Slot }, { "field", x.Field }, { "unique", x.Unique } }))); }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { if (engine == null) throw ShellExpcetion.NoDatabase(); var filename = s.Scan(@".+").Trim(); foreach (var line in File.ReadAllLines(filename)) { input.Queue.Enqueue(line); } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var col = this.ReadCollection(engine, s); var filename = s.Scan(@".*"); using (var sr = new StreamReader(filename, Encoding.UTF8)) { var docs = JsonSerializer.DeserializeArray(sr); display.WriteResult(engine.Insert(col, docs.Select(x => x.AsDocument))); } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var fs = new LiteStorage(engine); var id = this.ReadId(s); var filename = s.Scan(@"\s*.*").Trim(); if (!File.Exists(filename)) throw new IOException("File " + filename + " not found"); var file = fs.Upload(id, filename); display.WriteResult(file.AsDocument); }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var ver = s.Scan(@"\d*"); if (ver.Length > 0) { engine.UserVersion = Convert.ToUInt16(ver); } else { display.WriteLine(engine.UserVersion.ToString()); } }
public void DropCollection_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { Assert.IsFalse(db.GetCollectionNames().Any(x => x == "col")); db.Insert("col", new BsonDocument { { "a", 1 } }); Assert.IsTrue(db.GetCollectionNames().Any(x => x == "col")); db.DropCollection("col"); Assert.IsFalse(db.GetCollectionNames().Any(x => x == "col")); } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var fs = new LiteStorage(engine); var id = this.ReadId(s); var filename = s.Scan(@"\s*.*").Trim(); var file = fs.FindById(id); if (file != null) { file.SaveAs(filename); display.WriteResult(file.AsDocument); } }
public void Checkpoint_Insert_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { var log = new StringBuilder(); db.Log.Level = Logger.CACHE; db.Log.Logging += (s) => log.AppendLine(s); // insert basic N documents db.Insert("col", GetDocs(1, N)); Assert.IsTrue(log.ToString().Contains("checkpoint")); } }
public void UserVersion_Test() { using (var file = new TempFile()) { using (var db = new LiteEngine(file.Filename)) { Assert.AreEqual(0, db.UserVersion); db.UserVersion = 5; } using (var db = new LiteEngine(file.Filename)) { Assert.AreEqual(5, db.UserVersion); } } }
public void Engine_Delete_Test() { using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { var doc1 = new BsonDocument { { "_id", 1 }, { "name", "John" } }; var doc2 = new BsonDocument { { "_id", 2 }, { "name", "Doe" } }; db.Insert("col", doc1); db.Insert("col", doc2); db.Delete("col", Query.GTE("_id", 1)); db.Insert("col", doc1); } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var col = this.ReadCollection(engine, s); var value = JsonSerializer.Deserialize(s.ToString()); if (value.IsArray) { display.WriteResult(engine.Insert(col, value.AsArray.RawValue.Select(x => x.AsDocument))); } else { engine.Insert(col, new BsonDocument[] { value.AsDocument }); display.WriteResult(value.AsDocument["_id"]); } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var connectionString = new ConnectionString(s.Scan(@".+").TrimToNull()); env.Filename = connectionString.Filename; env.Password = connectionString.Password; env.Journal = connectionString.Journal; // create file if not exits if(!File.Exists(env.Filename)) { using (var e = env.CreateEngine(DataAccess.Write)) { } } }
public void Checkpoint_Recovery_Test() { using (var file = new TempFile()) { // init with N docs with type=1 using (var db = new LiteEngine(file.Filename)) { db.EnsureIndex("col", "type"); db.Insert("col", GetDocs(1, N, type: 1)); Assert.AreEqual(N, db.Count("col", Query.EQ("type", 1))); } // re-open and try update all docs to type=2 using (var db = new LiteEngine(file.Filename)) { var log = new StringBuilder(); db.Log.Level = Logger.CACHE; db.Log.Logging += (s) => log.AppendLine(s); try { // try update all to "type=2" // but throws exception before finish db.Update("col", GetDocs(1, N, type: 2, throwAtEnd: true)); } catch (Exception ex) { if (!ex.Message.Contains("Try Recovery!")) Assert.Fail(ex.Message); } // checks if cache had a checkpoint Assert.IsTrue(log.ToString().Contains("checkpoint")); // re-check if all docs will be type=1 Assert.AreEqual(N, db.Count("col", Query.EQ("type", 1))); Assert.AreEqual(0, db.Count("col", Query.EQ("type", 2))); } // re-open datafile the be sure contains only type=1 using (var db = new LiteEngine(file.Filename)) { Assert.AreEqual(N, db.Count("col", Query.EQ("type", 1))); Assert.AreEqual(0, db.Count("col", Query.EQ("type", 2))); } } }
public void Encrypted_Test() { using (var encrypt = new TempFile()) using (var plain = new TempFile()) { // create a database with no password - plain data using (var db = new LiteEngine(plain.Filename)) { db.Insert("col", new BsonDocument { { "name", "Mauricio David" } }); } // read datafile to find "Mauricio" string Assert.IsTrue(plain.ReadAsText().Contains("Mauricio David")); // create a database with password using (var db = new LiteEngine(encrypt.Filename, "abc123")) { db.Insert("col", new BsonDocument { { "name", "Mauricio David" } }); } // test if is possible find "Mauricio" string Assert.IsFalse(encrypt.ReadAsText().Contains("Mauricio David")); // try access using wrong password try { using (var db = new LiteEngine(encrypt.Filename, "abc1234")) { Assert.Fail(); // can't work } } catch (LiteException ex) { Assert.IsTrue(ex.ErrorCode == 123); // wrong password } // open encrypted db and read document using (var db = new LiteEngine(encrypt.Filename, "abc123")) { var doc = db.Find("col", Query.All()).First(); Assert.AreEqual("Mauricio David", doc["name"].AsString); } } }
public void Concurrency_InsertUpdate_Test() { const int N = 3000; using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { db.EnsureIndex("col", "updated"); Assert.AreEqual(0, db.Count("col", Query.EQ("updated", true))); // insert basic document var ta = Task.Factory.StartNew(() => { for (var i = 0; i < N; i++) { var doc = new BsonDocument { { "_id", i } }; db.Insert("col", doc); } }); // update _id=N var tb = Task.Factory.StartNew(() => { var i = 0; while (i < N) { var doc = new BsonDocument { { "_id", i }, { "updated", true }, { "name", TempFile.LoremIpsum(5, 10, 1, 5, 1) } }; if (db.Update("col", doc)) i++; } }); Task.WaitAll(ta, tb); Assert.AreEqual(N, db.Count("col", Query.EQ("updated", true))); } }
public void Storage_ReadWriteStream_Test() { var HELLO1 = "Hello World LiteDB 1 ".PadRight(300000, '-') + "\nEND"; var HELLO2 = "Hello World LiteDB 2 - END"; using (var file = new TempFile()) using (var db = new LiteEngine(file.Filename)) { var sto = new LiteStorage(db); // insert HELLO1 file content using (var stream = sto.OpenWrite("f1", "f1.txt")) { using (var sw = new StreamWriter(stream)) { sw.Write(HELLO1); } } using (var stream = sto.OpenRead("f1")) { var sr = new StreamReader(stream); var hello = sr.ReadToEnd(); Assert.AreEqual(HELLO1, hello); } // updating to HELLO2 content same file id using (var stream = sto.OpenWrite("f1", "f1.txt")) { using (var sw = new StreamWriter(stream)) { sw.Write(HELLO2); } } using (var stream = sto.OpenRead("f1")) { var sr = new StreamReader(stream); var hello = sr.ReadToEnd(); Assert.AreEqual(HELLO2, hello); } } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var fs = new LiteStorage(engine); if (s.HasTerminated) { var files = fs.FindAll().Select(x => x.AsDocument); display.WriteResult(new BsonArray(files)); } else { var id = this.ReadId(s); var files = fs.Find(id).Select(x => x.AsDocument); display.WriteResult(new BsonArray(files)); } }
public void MemoryStream_Test() { var mem = new MemoryStream(); using (var db = new LiteEngine(mem)) { db.Insert("col", new BsonDocument { { "_id", 1 } , { "name", "John" } }); db.Insert("col", new BsonDocument { { "_id", 2 }, { "name", "Doe" } }); } using (var db = new LiteEngine(mem)) { var john = db.Find("col", Query.EQ("_id", 1)).FirstOrDefault(); var doe = db.Find("col", Query.EQ("_id", 2)).FirstOrDefault(); Assert.AreEqual("John", john["name"].AsString); Assert.AreEqual("Doe", doe["name"].AsString); } }
public void Engine_Insert_Test() { using (var file = new TempFile()) { using (var db = new LiteEngine(file.Filename)) { db.Insert("col", new BsonDocument { { "_id", 1 } , { "name", "John" } }); db.Insert("col", new BsonDocument { { "_id", 2 }, { "name", "Doe" } }); } using (var db = new LiteEngine(file.Filename)) { var john = db.Find("col", Query.EQ("_id", 1)).FirstOrDefault(); var doe = db.Find("col", Query.EQ("_id", 2)).FirstOrDefault(); Assert.AreEqual("John", john["name"].AsString); Assert.AreEqual("Doe", doe["name"].AsString); } } }
/// <summary> /// Read collection name from db.(collection).(command) /// </summary> public string ReadCollection(LiteEngine db, StringScanner s) { return(s.Scan(@"db\.([\w-]+)\.\w+\s*", 1)); }
public void FreeSlot_Insert() { using (var e = new LiteEngine()) { e.BeginTrans(); // get transaction/snapshot "col1" var t = e.GetMonitor().GetTransaction(false, false, out var isNew); var s = t.CreateSnapshot(LockMode.Write, "col1", true); e.Insert("col1", new BsonDocument[] { new BsonDocument { ["n"] = new byte[200] } }, BsonAutoId.Int32); // get pages var colPage = s.CollectionPage; var dataPage = s.LocalPages.FirstOrDefault(x => x.PageType == PageType.Data); var indexPage = s.LocalPages.FirstOrDefault(x => x.PageType == PageType.Index); // test dataPage free space dataPage.FreeBytes.Should().Be(7928); // page should be in Slot #0 (7344 - 8160 free bytes) colPage.FreeDataPageList.Should().Equal(dataPage.PageID, uint.MaxValue, uint.MaxValue, uint.MaxValue, uint.MaxValue); // adding 1 more document into same page e.Insert("col1", new BsonDocument[] { new BsonDocument { ["n"] = new byte[600] } }, BsonAutoId.Int32); dataPage.FreeBytes.Should().Be(7296); // page should me moved into Slot #1 (6120 - 7343 free bytes) colPage.FreeDataPageList.Should().Equal(uint.MaxValue, dataPage.PageID, uint.MaxValue, uint.MaxValue, uint.MaxValue); // adding 1 big document to move this page into last page e.Insert("col1", new BsonDocument[] { new BsonDocument { ["n"] = new byte[6000] } }, BsonAutoId.Int32); dataPage.FreeBytes.Should().Be(1264); // now this page should me moved into last Slot (#4) - next document will use another data page (even a very small document) colPage.FreeDataPageList.Should().Equal(uint.MaxValue, uint.MaxValue, uint.MaxValue, uint.MaxValue, dataPage.PageID); // adding a very small document to test adding new page e.Insert("col1", new BsonDocument[] { new BsonDocument { ["n"] = new byte[10] } }, BsonAutoId.Int32); // no changes in dataPage... but new page as created dataPage.FreeBytes.Should().Be(1264); var dataPage2 = s.LocalPages.FirstOrDefault(x => x.PageType == PageType.Data && x.PageID != dataPage.PageID); dataPage2.FreeBytes.Should().Be(8118); // test slots (#0 for dataPage2 and #4 for dataPage1) colPage.FreeDataPageList.Should().Equal(dataPage2.PageID, uint.MaxValue, uint.MaxValue, uint.MaxValue, dataPage.PageID); // add another big document into dataPage2 do put both pages in same free Slot (#4) e.Insert("col1", new BsonDocument[] { new BsonDocument { ["n"] = new byte[7000] } }, BsonAutoId.Int32); // now, both pages are linked in same slot #4 (starts with new dataPage2) colPage.FreeDataPageList.Should().Equal(uint.MaxValue, uint.MaxValue, uint.MaxValue, uint.MaxValue, dataPage2.PageID); // dataPage2 link into dataPage1 dataPage2.NextPageID.Should().Be(dataPage.PageID); dataPage.PrevPageID.Should().Be(dataPage2.PageID); // and both start/end points to null dataPage2.PrevPageID.Should().Be(uint.MaxValue); dataPage.NextPageID.Should().Be(uint.MaxValue); // do ColID tests dataPage.ColID.Should().Be(colPage.PageID); dataPage2.ColID.Should().Be(colPage.PageID); indexPage.ColID.Should().Be(colPage.PageID); } }
public void FreeSlot_Delete() { using (var e = new LiteEngine()) { e.BeginTrans(); // get transaction/snapshot "col1" var t = e.GetMonitor().GetTransaction(false, false, out var isNew); var s = t.CreateSnapshot(LockMode.Write, "col1", true); // first page e.Insert("col1", new BsonDocument[] { new BsonDocument { ["_id"] = 1, ["n"] = new byte[2000] } }, BsonAutoId.Int32); e.Insert("col1", new BsonDocument[] { new BsonDocument { ["_id"] = 2, ["n"] = new byte[2000] } }, BsonAutoId.Int32); e.Insert("col1", new BsonDocument[] { new BsonDocument { ["_id"] = 3, ["n"] = new byte[2000] } }, BsonAutoId.Int32); // second page e.Insert("col1", new BsonDocument[] { new BsonDocument { ["_id"] = 4, ["n"] = new byte[2000] } }, BsonAutoId.Int32); e.Insert("col1", new BsonDocument[] { new BsonDocument { ["_id"] = 5, ["n"] = new byte[2000] } }, BsonAutoId.Int32); e.Insert("col1", new BsonDocument[] { new BsonDocument { ["_id"] = 6, ["n"] = new byte[2000] } }, BsonAutoId.Int32); // get pages var colPage = s.CollectionPage; var indexPage = s.LocalPages.FirstOrDefault(x => x.PageType == PageType.Index); var dataPage1 = s.LocalPages.FirstOrDefault(x => x.PageType == PageType.Data); var dataPage2 = s.LocalPages.FirstOrDefault(x => x.PageType == PageType.Data && x.PageID != dataPage1.PageID); // test dataPage free space dataPage1.FreeBytes.Should().Be(2064); dataPage2.FreeBytes.Should().Be(2064); colPage.FreeDataPageList.Should().Equal(uint.MaxValue, uint.MaxValue, uint.MaxValue, uint.MaxValue, dataPage2.PageID); // delete some data e.Delete("col1", new BsonValue[] { 2 }); // test again dataPage dataPage1.FreeBytes.Should().Be(4092); colPage.FreeDataPageList.Should().Equal(uint.MaxValue, uint.MaxValue, uint.MaxValue, dataPage1.PageID, dataPage2.PageID); // clear first page e.Delete("col1", new BsonValue[] { 1, 3 }); // page1 must be now a clean page var emptyPage = s.LocalPages.FirstOrDefault(x => x.PageID == dataPage1.PageID); emptyPage.PageType.Should().Be(PageType.Empty); emptyPage.ItemsCount.Should().Be(0); emptyPage.FreeBytes.Should().Be(8160); t.Pages.DeletedPages.Should().Be(1); t.Pages.FirstDeletedPageID.Should().Be(emptyPage.PageID); t.Pages.LastDeletedPageID.Should().Be(emptyPage.PageID); } }
public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env) { var col = this.ReadCollection(engine, s); var query = this.ReadQuery(s); display.WriteResult(engine.Delete(col, query)); }