public BsonValue Execute(LiteDatabase db, StringScanner s) { var col = this.ReadCollection(db, s); var query = s.Match("{") ? Query.Query.All() : this.ReadQuery(s); var code = DynamicCode.GetCode(s); var docs = col.Find(query).ToArray(); try { db.BeginTrans(); foreach (var doc in docs) { code(doc["_id"], doc, col, db); } db.Commit(); return docs.Length; } catch (Exception ex) { db.Rollback(); throw ex; } }
public async Task Transaction_Avoid_Dirty_Read() { var data1 = DataGen.Person(1, 100).ToArray(); var data2 = DataGen.Person(101, 200).ToArray(); using (var db = new LiteDatabase(new MemoryStream())) { var person = db.GetCollection <Person>(); // init person collection with 100 document person.Insert(data1); var taskASemaphore = new SemaphoreSlim(0, 1); var taskBSemaphore = new SemaphoreSlim(0, 1); // task A will open transaction and will insert +100 documents // but will commit only 1s later - this plus +100 document must be visible only inside task A var ta = Task.Run(() => { db.BeginTrans(); person.Insert(data2); taskBSemaphore.Release(); taskASemaphore.Wait(); var count = person.Count(); count.Should().Be(data1.Length + data2.Length); db.Commit(); taskBSemaphore.Release(); }); // task B will not open transaction and will wait 250ms before and count collection - // at this time, task A already insert +100 document but here I can't see (are not committed yet) // after task A finish, I can see now all 200 documents var tb = Task.Run(() => { taskBSemaphore.Wait(); var count = person.Count(); // read 100 documents count.Should().Be(data1.Length); taskASemaphore.Release(); taskBSemaphore.Wait(); // read 200 documents count = person.Count(); count.Should().Be(data1.Length + data2.Length); }); await Task.WhenAll(ta, tb); } }
public void RegistreerProcessStart(ProcessState state) { using (var db = new LiteDatabase(_databaseFile)) { var col = db.GetCollection <ProcessState>("processtate"); col.Insert(state); db.Commit(); } }
public void RegistreerArbeidsverhouding(Arbeidsverhouding arbeidsverhouding) { using (var db = new LiteDatabase(_databaseFile)) { var col = db.GetCollection <Arbeidsverhouding>("arbeidsverhouding"); col.Insert(arbeidsverhouding); db.Commit(); } }
public void RegistreerWerkgever(Werkgever werkgever) { using (var db = new LiteDatabase(_databaseFile)) { var col = db.GetCollection <Werkgever>("werkgever"); col.Insert(werkgever); db.Commit(); } }
public void RegistreerDeelnemer(Deelnemer deelnemer) { using (var db = new LiteDatabase(_databaseFile)) { var col = db.GetCollection <Deelnemer>("deelnemer"); col.Insert(deelnemer); db.Commit(); } }
public Dictionary <string, DirectoryMeta> SearchFoldersByTag(StoreBanner banner) { using (var db = new LiteDatabase(Settings.Default.ConnectionString)) { Folders = db.GetCollection <DirectoryMeta>("projdirs").Include <StoreBanner>(x => x.StoreBanner).Query().Where(x => x.StoreBanner.BANNER_CODE == banner.BANNER_CODE).ToList().ToDictionary(folder => folder.DirectoryPath); db.Commit(); } return(Folders); }
public Dictionary <string, StoreBanner> GetStoreBanners() { using (var db = new LiteDatabase(Settings.Default.ConnectionString)) { Banners = db.GetCollection <StoreBanner>("storebanners").FindAll().ToDictionary(banner => banner.BANNER_CODE); db.Commit(); } return(Banners); }
public void Transaction_Avoid_Drity_Read() { var data1 = DataGen.Person(1, 100).ToArray(); var data2 = DataGen.Person(101, 200).ToArray(); using (var db = new LiteDatabase(new MemoryStream())) { var person = db.GetCollection <Person>(); // init person collection with 100 document person.Insert(data1); // task A will open transaction and will insert +100 documents // but will commit only 1s later - this plus +100 document must be visible only inside task A var ta = new Task(() => { db.BeginTrans(); person.Insert(data2); Task.Delay(1000).Wait(); var count = person.Count(); Assert.AreEqual(data1.Length + data2.Length, count); db.Commit(); }); // task B will not open transaction and will wait 250ms before and count collection - // at this time, task A already insert +100 document but here I cann't see (are not commited yet) // after task A finish, I can see now all 200 documents var tb = new Task(() => { Task.Delay(250).Wait(); var count = person.Count(); // read 100 documents Assert.AreEqual(data1.Length, count); ta.Wait(); // read 200 documets count = person.Count(); Assert.AreEqual(data1.Length + data2.Length, count); }); ta.Start(); tb.Start(); Task.WaitAll(ta, tb); } }
public async Task Transaction_Read_Version() { var data1 = DataGen.Person(1, 100).ToArray(); var data2 = DataGen.Person(101, 200).ToArray(); using (var db = new LiteDatabase(new MemoryStream())) { var person = db.GetCollection <Person>(); // init person collection with 100 document person.Insert(data1); var taskASemaphore = new SemaphoreSlim(0, 1); var taskBSemaphore = new SemaphoreSlim(0, 1); // task A will insert more 100 documents but will commit only 1s later var ta = Task.Run(() => { db.BeginTrans(); person.Insert(data2); taskBSemaphore.Release(); taskASemaphore.Wait(); db.Commit(); taskBSemaphore.Release(); }); // task B will open transaction too and will count 100 original documents only // but now, will wait task A finish - but is in transaction and must see only initial version var tb = Task.Run(() => { db.BeginTrans(); taskBSemaphore.Wait(); var count = person.Count(); // read 100 documents count.Should().Be(data1.Length); taskASemaphore.Release(); taskBSemaphore.Wait(); // keep reading 100 documents because i'm still in same transaction count = person.Count(); count.Should().Be(data1.Length); }); await Task.WhenAll(ta, tb); } }
public async Task Transaction_Write_Lock_Timeout() { var data1 = DataGen.Person(1, 100).ToArray(); var data2 = DataGen.Person(101, 200).ToArray(); using (var db = new LiteDatabase("filename=:memory:")) { // small timeout db.Pragma(Pragmas.TIMEOUT, 1); var person = db.GetCollection <Person>(); // init person collection with 100 document person.Insert(data1); var taskASemaphore = new SemaphoreSlim(0, 1); var taskBSemaphore = new SemaphoreSlim(0, 1); // task A will open transaction and will insert +100 documents // but will commit only 2s later var ta = Task.Run(() => { db.BeginTrans(); person.Insert(data2); taskBSemaphore.Release(); taskASemaphore.Wait(); var count = person.Count(); count.Should().Be(data1.Length + data2.Length); db.Commit(); }); // task B will try delete all documents but will be locked during 1 second var tb = Task.Run(() => { taskBSemaphore.Wait(); db.BeginTrans(); person .Invoking(personCol => personCol.DeleteMany("1 = 1")) .Should() .Throw <LiteException>() .Where(ex => ex.ErrorCode == LiteException.LOCK_TIMEOUT); taskASemaphore.Release(); }); await Task.WhenAll(ta, tb); } }
public void DeleteData() { if (ExistsData()) { var dbName = DatabaseName(); using (var db = new LiteDatabase(Path.Combine(pathData, dbName + ".db"))) { db.DropCollection(nameCollection()); db.Commit(); } } }
public void Transaction_Write_Lock_Timeout() { var data1 = DataGen.Person(1, 100).ToArray(); var data2 = DataGen.Person(101, 200).ToArray(); using (var db = new LiteDatabase("filename=:memory:;timeout=1")) { var person = db.GetCollection <Person>(); // init person collection with 100 document person.Insert(data1); // task A will open transaction and will insert +100 documents // but will commit only 2s later var ta = new Task(() => { db.BeginTrans(); person.Insert(data2); Task.Delay(4000).Wait(); var count = person.Count(); Assert.AreEqual(data1.Length + data2.Length, count); db.Commit(); }); // task B will try delete all documents but will be locked during 1 second var tb = new Task(() => { Task.Delay(250).Wait(); db.BeginTrans(); try { person.DeleteMany("1 = 1"); Assert.Fail("Must be locked"); } catch (LiteException ex) when(ex.ErrorCode == LiteException.LOCK_TIMEOUT) { } }); ta.Start(); tb.Start(); Task.WaitAll(ta, tb); } }
public void Transaction_Read_Version() { var data1 = DataGen.Person(1, 100).ToArray(); var data2 = DataGen.Person(101, 200).ToArray(); using (var db = new LiteDatabase(new MemoryStream())) { var person = db.GetCollection <Person>(); // init person collection with 100 document person.Insert(data1); // task A will insert more 100 documents but will commit only 1s later var ta = new Task(() => { db.BeginTrans(); person.Insert(data2); Task.Delay(1000).Wait(); db.Commit(); }); // task B will open transaction too and will count 100 original documents only // but now, will wait task A finish - but is in transaction and must see only initial version var tb = new Task(() => { db.BeginTrans(); Task.Delay(250).Wait(); var count = person.Count(); // read 100 documents Assert.AreEqual(data1.Length, count); ta.Wait(); // keep reading 100 documets because i'm still in same transaction count = person.Count(); Assert.AreEqual(data1.Length, count); }); ta.Start(); tb.Start(); Task.WaitAll(ta, tb); } }
public void UpdateProcessState(ProcessState state) { using (var db = new LiteDatabase(_databaseFile)) { var col = db.GetCollection <ProcessState>("processtate"); ProcessState storedState = col.FindById(state.Id); storedState.DeelnemerNummer = state.DeelnemerNummer; storedState.WerkgeverNummer = state.WerkgeverNummer; storedState.Status = state.Status; storedState.Foutmelding = state.Foutmelding; col.Update(storedState); db.Commit(); } }
public void AddBanner(StoreBanner storeBanner) { IsApplicationUpdate = true; using (var db = new LiteDatabase(Settings.Default.ConnectionString)) { var collection = db.GetCollection <StoreBanner>("storebanners"); if (!collection.Exists(x => x.BANNER_CODE == storeBanner.BANNER_CODE || x.BANNER_NAME == storeBanner.BANNER_NAME)) { collection.Insert(storeBanner); } db.Commit(); Banners = collection.FindAll().ToDictionary(banner => banner.BANNER_CODE); } }
public void Test_Transaction_States() { var data0 = DataGen.Person(1, 10).ToArray(); var data1 = DataGen.Person(11, 20).ToArray(); using (var db = new LiteDatabase(new MemoryStream())) { var person = db.GetCollection <Person>(); // first time transaction will be opened Assert.IsTrue(db.BeginTrans()); // but in second type transaction will be same Assert.IsFalse(db.BeginTrans()); person.Insert(data0); // must commit transaction Assert.IsTrue(db.Commit()); // no transaction to commit Assert.IsFalse(db.Commit()); // no transaction to rollback; Assert.IsFalse(db.Rollback()); Assert.IsTrue(db.BeginTrans()); // no page was changed but ok, let's rollback anyway Assert.IsTrue(db.Rollback()); // auto-commit person.Insert(data1); Assert.AreEqual(20, person.Count()); } }
public static bool Setup(string filename = "") { if (!string.IsNullOrEmpty(filename)) { Filename = filename; } if (db != null) { CloseDatabase(); } try { db = new LiteDatabase(Filename); db.BeginTrans(); var col = db.GetCollection <WriteObject>("WriteObjects"); col.EnsureIndex(x => x.ColObj.Identity); col.EnsureIndex(x => x.RowKey); col.EnsureIndex(x => x.ColObj.ResultType); col.EnsureIndex(x => x.RunId); var cr = db.GetCollection <CompareResult>("CompareResults"); cr.EnsureIndex(x => x.BaseRunId); cr.EnsureIndex(x => x.CompareRunId); cr.EnsureIndex(x => x.ResultType); db.Commit(); } catch (Exception e) { Log.Debug(e, "Initializing database."); } if (!WriterStarted) { ((Action)(async() => { await Task.Run(() => KeepSleepAndFlushQueue()).ConfigureAwait(false); }))(); WriterStarted = true; } return(true); }
public void RegistreerDeelnemerVerhuizing(Verhuizing verhuizing) { using (var db = new LiteDatabase(_databaseFile)) { var col = db.GetCollection <Deelnemer>("deelnemer"); Deelnemer deelnemer = col.FindById(verhuizing.Id); deelnemer.WoonAdresStraat = verhuizing.Straat; deelnemer.WoonAdresHuisnummer = verhuizing.Huisnummer; deelnemer.WoonAdresHuisnummerToevoeging = verhuizing.HuisnummerToevoeging; deelnemer.WoonAdresPostcode = verhuizing.Postcode; deelnemer.WoonAdresPlaats = verhuizing.Plaats; col.Update(deelnemer); db.Commit(); } }
public void InsertCommits(params Comment[] comments) { CommentTable commentDB = new CommentTable(connection); foreach (var comment in comments) { if (comment.Files != null) { InsertAdditionFiles(comment.Files.ToArray()); } commentDB.Insert(comment); connection.Commit(); } }
public void Upsert(SubscribedFileItem item) { lock (_lockObject) { var itemEntity = SubscribedFileItemEntity.Import(item); var col = this.GetCollection(); _database.BeginTrans(); col.DeleteMany(n => n.RootHash == itemEntity.RootHash && n.Registrant == itemEntity.Registrant); col.Insert(itemEntity); _database.Commit(); } }
public void Upsert(UploadingFileItem item) { lock (_lockObject) { var itemEntity = UploadingFileItemEntity.Import(item); var col = this.GetCollection(); _database.BeginTrans(); col.DeleteMany(n => n.FilePath == item.FilePath); col.Insert(itemEntity); _database.Commit(); } }
public void Upsert(DownloadingFileItem item) { lock (_lockObject) { var itemEntity = DownloadingFileItemEntity.Import(item); var col = this.GetCollection(); _database.BeginTrans(); col.DeleteMany(n => n.Seed !.RootHash == itemEntity.Seed !.RootHash); col.Insert(itemEntity); _database.Commit(); } }
async Task <string> CreateDatabase() { string dbName = DatabaseName(); if (ExistsData()) { return(dbName); } using (var db = new LiteDatabase(Path.Combine(pathData, dbName + ".db"))) { db.Commit(); } return(dbName); }
public Dictionary <string, DirectoryMeta> GetFoldersData() { using (var db = new LiteDatabase(Settings.Default.ConnectionString)) { Folders = db.GetCollection <DirectoryMeta>("projdirs").Include <StoreBanner>(x => x.StoreBanner).FindAll().ToDictionary(folder => folder.DirectoryPath); IsApplicationUpdate = true; foreach (var folder in Folders) { if (!folder.Value.GetDirectoryInfo().Exists) { db.GetCollection <DirectoryMeta>("projdirs").Delete(folder.Value.Id); } } db.Commit(); } return(Folders); }
private void ExecuteWriter(Action <ILiteCollection <T> > writing) { if (db.BeginTrans()) { try { var col = db.GetCollection <T>(typeof(T).Name); writing(col); db.Commit(); } catch { db.Rollback(); throw; } } }
public DirectoryMeta CheckAndInsertUpdateData(DirectoryMeta data) { IsApplicationUpdate = true; using (var db = new LiteDatabase(Settings.Default.ConnectionString)) { var collection = db.GetCollection <DirectoryMeta>("projdirs"); if (!collection.Exists(x => x.DirectoryPath == data.DirectoryPath)) { collection.Insert(data); } else if (!collection.Exists(x => x.DirectoryPath == data.DirectoryPath && x.StoreBanner.BANNER_CODE == data.StoreBanner.BANNER_CODE)) { collection.Update(data); } db.Commit(); Folders = collection.Include <StoreBanner>(x => x.StoreBanner).FindAll().ToDictionary(folder => folder.DirectoryPath); } return(Folders[data.DirectoryPath]); }
public async ValueTask InsertAsync(ThumbnailCache entity) { await Task.Delay(1).ConfigureAwait(false); using (await _asyncLock.LockAsync()) { var id = new ThumbnailCacheIdEntity() { FilePath = NestedPathEntity.Import(entity.FileMeta.Path), ThumbnailWidth = (int)entity.ThumbnailMeta.Width, ThumbnailHeight = (int)entity.ThumbnailMeta.Height, ThumbnailResizeType = entity.ThumbnailMeta.ResizeType, ThumbnailFormatType = entity.ThumbnailMeta.FormatType, }; var storage = this.GetStorage(); if (!_database.BeginTrans()) { _logger.Error("current thread already in a transaction"); throw new Exception(); } try { using (var outStream = storage.OpenWrite(id, "-")) { RocketMessage.ToStream(entity, outStream); } if (!_database.Commit()) { _logger.Error("failed to commit"); throw new Exception(); } } catch (Exception e) { _logger.Debug(e); _database.Rollback(); } } }
public StoreBanner CheckAndInsertUpdateData(StoreBanner data) { IsApplicationUpdate = true; //ConfigurationManager.ConnectionStrings["LiteDB"].ConnectionString using (var db = new LiteDatabase(Settings.Default.ConnectionString)) { var collection = db.GetCollection <StoreBanner>("storebanners"); if (!collection.Exists(x => x.BANNER_CODE == data.BANNER_CODE || x.BANNER_NAME == data.BANNER_NAME)) { collection.Insert(data); } else if (!collection.Exists(x => x.BANNER_CODE == data.BANNER_CODE && x.BANNER_NAME == data.BANNER_NAME && x.CLIENT == data.CLIENT)) { collection.Update(data); } db.Commit(); Banners = collection.FindAll().ToDictionary(banner => banner.BANNER_CODE); } return(Banners[data.BANNER_CODE]); }
public void SaveNonVolatileVars() { lock (DbLock) { //save my internal state BsonDocument state = new BsonDocument(); state["_id"] = "TradingAlgoState"; state["State"] = Db.Mapper.Serialize(State); //Save derived state state["DerivedClassState"] = Db.Mapper.Serialize(GetState()); //save module states state["Sentry"] = Db.Mapper.Serialize(Sentry.GetState()); state["Allocator"] = Db.Mapper.Serialize(Allocator.GetState()); state["Executor"] = Db.Mapper.Serialize(Executor.GetState()); state["RiskManager"] = Db.Mapper.Serialize(RiskManager.GetState()); Db.GetCollection("State").Upsert(state); foreach (var symData in SymbolsData.Values) { Db.GetCollection <SymbolData>("SymbolsData").Upsert(symData); } Db.BeginTrans(); foreach (var op in ActiveOperations.Where(op => op.IsChanged)) { DbActiveOperations.Upsert(op); } foreach (var op in ClosedOperations.Where(op => op.IsChanged)) { DbClosedOperations.Upsert(op); } Db.Commit(); Db.Checkpoint(); } }
public async Task <long> StoreDataAsNew(T[] data) { var sw = new Stopwatch(); sw.Start(); DeleteData(); await CreateDatabase(); var dbName = DatabaseName(); using (var db = new LiteDatabase(Path.Combine(pathData, dbName + ".db"))) { var col = db.GetCollection <T>(nameCollection()); col.InsertBulk(data); col.EnsureIndex(x => x.ID); db.Commit(); } sw.Stop(); return(sw.ElapsedMilliseconds); }
public BsonValue Execute(LiteDatabase db, StringScanner s) { db.Commit(); return BsonValue.Null; }