Exemple #1
0
        public void Execute(LiteEngine db, StringScanner s, Display display)
        {
            var col   = this.ReadCollection(db, s);
            var query = s.Match("{") ? Query.All() : this.ReadQuery(s);
            var code  = DynamicCode.GetCode(s);

            var docs = col.Find(query).ToArray();

            try
            {
                db.BeginTrans();

                foreach (var doc in docs)
                {
                    code(doc["_id"].RawValue, doc, col, db);
                }

                db.Commit();

                display.WriteBson(docs.Length);
            }
            catch (Exception ex)
            {
                db.Rollback();
                throw ex;
            }
        }
Exemple #2
0
        public void Execute(LiteEngine db, StringScanner s, Display display)
        {
            if (db == null)
            {
                throw new LiteException("No database");
            }

            var col      = this.ReadCollection(db, s);
            var filename = s.Scan(@".*");
            var json     = File.ReadAllText(filename, Encoding.UTF8);
            var docs     = JsonEx.DeserializeArray <BsonDocument>(json);
            var count    = 0;

            db.BeginTrans();

            foreach (var doc in docs)
            {
                count++;
                col.Insert(doc);
            }

            db.Commit();

            display.WriteBson(count);
        }
Exemple #3
0
        public void Execute(ref LiteEngine db, StringScanner s, Display display)
        {
            var col      = this.ReadCollection(db, s);
            var filename = s.Scan(@".*");
            var json     = File.ReadAllText(filename, Encoding.UTF8);
            var docs     = JsonEx.Deserialize(json);
            var count    = 0;

            db.BeginTrans();

            if (docs.IsArray)
            {
                foreach (var doc in docs.AsArray)
                {
                    count++;
                    col.Insert(new BsonDocument(doc));
                }
            }
            else
            {
                count = 1;
                col.Insert(new BsonDocument(docs));
            }

            db.Commit();

            display.WriteBson(count);
        }
Exemple #4
0
        public void Create_100k_Rows_DB()
        {
            using (var db = new LiteEngine(dbpath))
            {
                var c = db.GetCollection <PerfItem>("perf");
                //c.EnsureIndex("MyGuid", true);
                var id = 0;

                for (var j = 0; j < 3; j++)
                {
                    var d = DateTime.Now;
                    db.BeginTrans();

                    for (var i = 0; i < 10000; i++)
                    {
                        id++;

                        c.Insert(id, new PerfItem {
                            Id = id, MyGuid = Guid.NewGuid(), Nome = "Jose Silva " + id
                        });
                    }

                    db.Commit();
                    Debug.Print("Commits " + j + " in " + DateTime.Now.Subtract(d).TotalMilliseconds);
                }
            }
        }
Exemple #5
0
        public void Execute(ref LiteEngine db, StringScanner s, Display display)
        {
            if (db == null)
            {
                throw new ArgumentException("No database");
            }

            db.Commit();
        }
Exemple #6
0
        public void Checkpoint_TransactionRecovery_Test()
        {
            using (var file = new TempFile())
            {
                using (var db = new LiteEngine(new FileDiskService(file.Filename), autocommit: false))
                {
                    var log = new StringBuilder();
                    db.Log.Level    = Logger.CACHE;
                    db.Log.Logging += (s) => log.AppendLine(s);

                    // initialize my "col" with 1000 docs without transaction
                    db.Insert("col", GetDocs(1, 1000));

                    // commit now for intialize new transaction
                    db.Commit();

                    // insert a lot of docs inside a single collection (will do checkpoint in disk)
                    db.Insert("col", GetDocs(1001, N));

                    // update all documents
                    db.Update("col", GetDocs(1, N));

                    // create new index
                    db.EnsureIndex("col", "type");

                    // checks if cache had a checkpoint
                    Assert.IsTrue(log.ToString().Contains("checkpoint"));

                    // datafile must be big (because checkpoint expand file)
                    Assert.IsTrue(file.Size > 30 * 1024 * 1024); // in MB\

                    // delete all docs > 1000
                    db.Delete("col", Query.GT("_id", 1000));

                    db.DropIndex("col", "type");

                    // let's rollback everything
                    db.Rollback();

                    // be sure cache are empty
                    Assert.AreEqual(0, db.CacheUsed);

                    // datafile must returns to original size (less than 1.5MB for 1000 docs)
                    Assert.IsTrue(file.Size < 1.5 * 1024 * 1024); // in MB

                    // test in my only doc exits
                    Assert.AreEqual(1000, db.Count("col", Query.All()));
                    Assert.AreEqual(1000, db.Count("col", null));

                    // test indexes (must have only _id index)
                    Assert.AreEqual(1, db.GetIndexes("col").Count());
                }
            }
        }
Exemple #7
0
        public void Perf_Test()
        {
            var path = DB.Path(true, "test.db");

            using (var db = new LiteEngine("journal=true;filename=" + path))
            {
                db.BeginTrans();
                var col = db.GetCollection <Post>("posts");
                col.Insert(Post.GetData(20000));
                db.Commit();
            }
        }
        public void Performance_Test()
        {
            // just a simple example to test performance speed
            using (var file = new TempFile())
                using (var db = new LiteEngine(file.Filename))
                {
                    var ti = new Stopwatch();
                    var tx = new Stopwatch();
                    var tu = new Stopwatch();
                    var td = new Stopwatch();

                    ti.Start();
                    db.Insert("col", GetDocs(N1));
                    db.Commit();
                    ti.Stop();

                    tx.Start();
                    db.EnsureIndex("col", "name");
                    db.Commit();
                    tx.Stop();

                    tu.Start();
                    db.Update("col", GetDocs(N1));
                    db.Commit();
                    tu.Stop();

                    db.EnsureIndex("col", "name");
                    db.Commit();

                    td.Start();
                    db.Delete("col", Query.All());
                    db.Commit();
                    td.Stop();

                    Debug.WriteLine("Insert time: " + ti.ElapsedMilliseconds);
                    Debug.WriteLine("EnsureIndex time: " + tx.ElapsedMilliseconds);
                    Debug.WriteLine("Update time: " + tu.ElapsedMilliseconds);
                    Debug.WriteLine("Delete time: " + td.ElapsedMilliseconds);
                }
        }
Exemple #9
0
        public void Performance_Test()
        {
            // just a simple example to test performance speed
            using (var file = new TempFile())
            using (var db = new LiteEngine(file.Filename))
            {
                var ti = new Stopwatch();
                var tx = new Stopwatch();
                var tu = new Stopwatch();
                var td = new Stopwatch();

                ti.Start();
                db.Insert("col", GetDocs(N1));
                db.Commit();
                ti.Stop();

                tx.Start();
                db.EnsureIndex("col", "name");
                db.Commit();
                tx.Stop();

                tu.Start();
                db.Update("col", GetDocs(N1));
                db.Commit();
                tu.Stop();

                db.EnsureIndex("col", "name");
                db.Commit();

                td.Start();
                db.Delete("col", Query.All());
                db.Commit();
                td.Stop();

                Debug.Print("Insert time: " + ti.ElapsedMilliseconds);
                Debug.Print("EnsureIndex time: " + tx.ElapsedMilliseconds);
                Debug.Print("Update time: " + tu.ElapsedMilliseconds);
                Debug.Print("Delete time: " + td.ElapsedMilliseconds);
            }
        }
Exemple #10
0
        public void Files_Store()
        {
            using (var db = new LiteEngine(dbpath))
            {
                var c = db.GetCollection("customer");

                db.BeginTrans();

                for (var i = 1; i <= 500; i++)
                {
                    var d = new BsonDocument();
                    d["Name"] = "San Jose";

                    c.Insert(i, d);
                }
                for (var i = 1; i <= 500; i++)
                {
                    c.Delete(i);
                }

                db.Commit();


                Dump.Pages(db, "before");

                var meta = new Dictionary <string, string>();
                meta["my-data"] = "Google LiteDB";

                db.Storage.Upload("my/foto1.jpg", new MemoryStream(new byte[5000]), meta);

                Dump.Pages(db, "after file");

                var f = db.Storage.FindByKey("my/foto1.jpg");

                Assert.AreEqual(5000, f.Length);
                Assert.AreEqual("Google LiteDB", f.Metadata["my-data"]);

                var mem = new MemoryStream();

                f.OpenRead(db).CopyTo(mem);

                // file real size after read all bytes
                Assert.AreEqual(5000, mem.Length);

                // all bytes are 0
                Assert.AreEqual(5000, mem.ToArray().Count(x => x == 0));

                db.Storage.Delete("my/foto1.jpg");

                Dump.Pages(db, "deleted file");
            }
        }
Exemple #11
0
        public void Page_PrevNext_Test()
        {
            using (var db = new LiteEngine(DB.Path()))
            {
                var k = 1;

                db.BeginTrans();

                this.PopulateCollection("my_collection_1", db, k);
                //this.PopulateCollection("my_collection_2", db, k);
                //this.PopulateCollection("my_collection_3", db, k);
                //this.PopulateCollection("my_collection_4", db, k);
                //this.PopulateCollection("my_collection_3", db, 5);

                db.Commit();

                this.Verify("my_collection_1", db, k);
                //this.Verify("my_collection_2", db, k);
                //this.Verify("my_collection_3", db, k);
                //this.Verify("my_collection_4", db, k);

                Dump.Pages(db);

                db.GetCollection("my_collection_1").Delete(Query.All());
                //db.GetCollection("my_collection_2").Delete(Query.All());
                //db.GetCollection("my_collection_3").Delete(Query.All());
                //db.GetCollection("my_collection_4").Delete(Query.All());

                Dump.Pages(db, "After clear");

                db.FileStorage.Upload("my/foto1.jpg", new MemoryStream(new byte[1024 * 50]));
            }

            using (var db = new LiteEngine(DB.Path(false)))
            {
                Dump.Pages(db, "After File");
            }
        }
Exemple #12
0
 public void Execute(LiteEngine engine, StringScanner s, Display display, InputCommand input, Env env)
 {
     engine.Commit();
 }
Exemple #13
0
 public void Execute(LiteEngine db, StringScanner s, Display display)
 {
     db.Commit();
 }
Exemple #14
0
        public void Checkpoint_TransactionRecovery_Test()
        {
            using (var file = new TempFile())
            {
                using (var db = new LiteEngine(new FileDiskService(file.Filename), autocommit: false ))
                {
                    var log = new StringBuilder();
                    db.Log.Level = Logger.CACHE;
                    db.Log.Logging += (s) => log.AppendLine(s);

                    // initialize my "col" with 1000 docs without transaction
                    db.Insert("col", GetDocs(1, 1000));

                    // commit now for intialize new transaction
                    db.Commit();

                    // insert a lot of docs inside a single collection (will do checkpoint in disk)
                    db.Insert("col", GetDocs(1001, N));

                    // update all documents
                    db.Update("col", GetDocs(1, N));

                    // create new index
                    db.EnsureIndex("col", "type");

                    // checks if cache had a checkpoint
                    Assert.IsTrue(log.ToString().Contains("checkpoint"));

                    // datafile must be big (because checkpoint expand file)
                    Assert.IsTrue(file.Size > 30 * 1024 * 1024); // in MB\

                    // delete all docs > 1000
                    db.Delete("col", Query.GT("_id", 1000));

                    db.DropIndex("col", "type");

                    // let's rollback everything
                    db.Rollback();

                    // be sure cache are empty
                    Assert.AreEqual(0, db.CacheUsed);

                    // datafile must returns to original size (less than 1.5MB for 1000 docs)
                    Assert.IsTrue(file.Size < 1.5 * 1024 * 1024); // in MB

                    // test in my only doc exits
                    Assert.AreEqual(1000, db.Count("col", Query.All()));
                    Assert.AreEqual(1000, db.Count("col", null));

                    // test indexes (must have only _id index)
                    Assert.AreEqual(1, db.GetIndexes("col").Count());
                }
            }
        }