public Task <byte[]> BackupDatabase() { // Find database file to make backup if (!File.Exists(DatabaseFileName)) { return(null); } try { // Commit all changes and dispose database instance to free database file lock _liteDatabase.Checkpoint(); _liteDatabase.Dispose(); // Return database content return(Task.FromResult(File.ReadAllBytes(DatabaseFileName))); } catch (Exception ex) { return(null); } finally { InitLiteDb(); } }
public Task Insert(SoundboxNode file) { GetSoundsCollection().Insert(file); //TODO interface commit Database.Checkpoint(); return(Task.FromResult(true)); }
public void InsertStation(Station station) { if (!Collection.Exists(x => x.StationCode.Equals(station.StationCode))) { Collection.Insert(station); _db.Checkpoint(); } }
public override void Shutdown() { base.Shutdown(); if (_dbRunningValues != null) { _dbRunningValues.Checkpoint(); _dbRunningValues.Dispose(); _dbRunningValues = null; } }
public string GetMessageEvent(CommandSource source) { var settings = new EngineSettings { Filename = "giveaway.db" }; var db = new LiteEngine(settings); var _liteDb = new LiteDatabase(db); var all = _liteDb.GetCollection <Giveaway>().FindAll(); var giveaway = all.LastOrDefault(); var giveawayuser = new GiveawayUsers(); giveawayuser.Username = source.User; giveawayuser.GiveawayId = giveaway.Id; _liteDb.GetCollection <GiveawayUsers>().Insert(giveawayuser); _liteDb.Checkpoint(); _liteDb.Dispose(); var message = $"{source.User} ti sei iscritto al prossimo giveaway."; return(message); }
public string GetMessageEvent(CommandSource source) { var message = string.Empty; var name = source.Message.Split(" "); if (name[1] == "winner") { var winner = DrawWinner(); message = $"Il vincitore del giveaway è: {winner}"; return(message); } var giveaway = new Giveaway(); giveaway.Name = name[1]; var settings = new EngineSettings { Filename = "giveaway.db" }; var db = new LiteEngine(settings); var _liteDb = new LiteDatabase(db); _liteDb.GetCollection <Giveaway>().Insert(giveaway); _liteDb.Checkpoint(); _liteDb.Dispose(); message = $"{source.User} hai creato il giveaway: {name[1]}"; return(message); }
public void Test_Transaction_ReleaseWhenFailToStart() { var blockingStream = new BlockingStream(); var db = new LiteDatabase(blockingStream) { Timeout = TimeSpan.FromSeconds(1) }; Thread lockerThread = null; try { lockerThread = new Thread(() => { db.GetCollection <Person>().Insert(new Person()); blockingStream.ShouldBlock = true; db.Checkpoint(); db.Dispose(); }); lockerThread.Start(); blockingStream.Blocked.WaitOne(1000).Should().BeTrue(); Assert.Throws <LiteException>(() => db.GetCollection <Person>().Insert(new Person())).Message.Should().Contain("timeout"); Assert.Throws <LiteException>(() => db.GetCollection <Person>().Insert(new Person())).Message.Should().Contain("timeout"); } finally { blockingStream.ShouldUnblock.Set(); lockerThread?.Join(); } }
private void InitDatabase() { Trace.WriteLine("Init Database."); _db = new LiteDatabase(dbFileFullPath); _collection = _db.GetCollection <Station>(collectionName); _collection.EnsureIndex(x => x.StationCode, true); _db.Checkpoint(); }
public override void Shutdown() { base.Shutdown(); _database?.Checkpoint(); _database?.Dispose(); _database = null; }
public void GlobalSetupCompoundIndexVariant() { DatabaseInstance = new LiteDatabase(ConnectionString()); _fileMetaCollection = DatabaseInstance.GetCollection <FileMetaBase>(); _fileMetaCollection.EnsureIndex(COMPOUND_INDEX_NAME, $"$.{nameof(FileMetaBase.IsFavorite)};$.{nameof(FileMetaBase.ShouldBeShown)}"); _fileMetaCollection.Insert(FileMetaGenerator <FileMetaBase> .GenerateList(DatasetSize)); // executed once per each N value DatabaseInstance.Checkpoint(); }
private void Window_Closed(object sender, EventArgs e) { Journee.Pointages = Pointages; var journees = db.GetCollection <Journee>(POINTAGES_DATA_HANDLER); journees.Update(Journee); db.Checkpoint(); db.Dispose(); }
public void GlobalSetup() { File.Delete(DatabasePath); DatabaseInstance = new LiteDatabase(ConnectionString()); _fileMetaCollection = DatabaseInstance.GetCollection <FileMetaBase>(); _fileMetaCollection.Insert(FileMetaGenerator <FileMetaBase> .GenerateList(DatasetSize)); // executed once per each N value DatabaseInstance.Checkpoint(); }
static void Main(string[] args) { Console.WriteLine("LITE DB v5"); Console.WriteLine("==========================================================="); File.Delete(@"d:\test-1m.db"); File.Delete(@"d:\test-1m-log.db"); var file = @"d:\test-1m.db"; var sw = new Stopwatch(); using (var db = new LiteDatabase(file)) { var col = db.GetCollection <Event>("event"); var tmp = new List <Event>(); for (var i = 0; i < 1_000_000; i++) { tmp.Add(new Event() { Data = "the quick brown fox jumps over the lazy dog", DateTime = DateTime.Now }); } // ----------------- sw.Restart(); col.Insert(tmp); Console.WriteLine("Insert: " + sw.Elapsed); // ----------------- sw.Restart(); db.Checkpoint(); Console.WriteLine("Checkpoint: " + sw.Elapsed); // ----------------- sw.Restart(); col.EnsureIndex(x => x.DateTime); Console.WriteLine("EnsureIndex: " + sw.Elapsed); } Console.WriteLine(" ==========================================================="); Console.WriteLine("End"); Console.ReadKey(); }
public void Rebuild_Large_Files() { // do some tests void DoTest(LiteDatabase db, LiteCollection <Zip> col) { Assert.Equal(1, col.Count()); Assert.Equal(99, db.UserVersion); }; using (var file = new TempFile()) { using (var db = new LiteDatabase(file.Filename)) { var col = db.GetCollection <Zip>(); db.UserVersion = 99; col.EnsureIndex("city", false); var inserted = col.Insert(DataGen.Zip()); // 29.353 docs var deleted = col.DeleteMany(x => x.Id != "01001"); // delete 29.352 docs Assert.Equal(29353, inserted); Assert.Equal(29352, deleted); Assert.Equal(1, col.Count()); // must checkpoint db.Checkpoint(); // file still large than 5mb (even with only 1 document) Assert.True(file.Size > 5 * 1024 * 1024); // reduce datafile var reduced = db.Rebuild(); // now file are small than 50kb Assert.True(file.Size < 50 * 1024); DoTest(db, col); } // re-open and rebuild again using (var db = new LiteDatabase(file.Filename)) { var col = db.GetCollection <Zip>(); DoTest(db, col); db.Rebuild(); DoTest(db, col); } } }
public void GlobalIndexSetup() { File.Delete(DatabasePath); DatabaseInstance = new LiteDatabase(ConnectionString()); _fileMetaExclusionCollection = DatabaseInstance.GetCollection <FileMetaWithExclusion>(); _fileMetaExclusionCollection.EnsureIndex(fileMeta => fileMeta.ShouldBeShown); _fileMetaExclusionCollection.Insert(FileMetaGenerator <FileMetaWithExclusion> .GenerateList(DatasetSize)); // executed once per each N value DatabaseInstance.Checkpoint(); }
/// <summary> /// init database /// </summary> private void InitDb() { lock (_litedb) { _litedb.Checkpoint(); _litedb.Rebuild(); lock (_cache) { _cache.EnsureIndex(c => c.cachekey); } } }
public void CloseDb() { lock (this) { if (counter == 0 && dba != null) { dba.Checkpoint(); dba.Dispose(); dba = null; } } }
public void SaveNonVolatileVars() { lock (DbLock) { //save my internal state BsonDocument state = new BsonDocument(); state["_id"] = "TradingAlgoState"; state["State"] = Db.Mapper.Serialize(State); //Save derived state state["DerivedClassState"] = Db.Mapper.Serialize(GetState()); //save module states state["Sentry"] = Db.Mapper.Serialize(Sentry.GetState()); state["Allocator"] = Db.Mapper.Serialize(Allocator.GetState()); state["Executor"] = Db.Mapper.Serialize(Executor.GetState()); state["RiskManager"] = Db.Mapper.Serialize(RiskManager.GetState()); Db.GetCollection("State").Upsert(state); foreach (var symData in SymbolsData.Values) { Db.GetCollection <SymbolData>("SymbolsData").Upsert(symData); } Db.BeginTrans(); foreach (var op in ActiveOperations.Where(op => op.IsChanged)) { DbActiveOperations.Upsert(op); } foreach (var op in ClosedOperations.Where(op => op.IsChanged)) { DbClosedOperations.Upsert(op); } Db.Commit(); Db.Checkpoint(); } }
public void GlobalSetupSimpleIndexBaseline() { File.Delete(DatabasePath); DatabaseInstance = new LiteDatabase(ConnectionString()); _fileMetaCollection = DatabaseInstance.GetCollection <FileMetaBase>(); _fileMetaCollection.EnsureIndex(fileMeta => fileMeta.ShouldBeShown); _fileMetaCollection.EnsureIndex(fileMeta => fileMeta.IsFavorite); _fileMetaCollection.Insert(FileMetaGenerator <FileMetaBase> .GenerateList(DatasetSize)); // executed once per each N value DatabaseInstance.Checkpoint(); }
public db_factory(string cid) { //var folder = AppDomain.CurrentDomain.BaseDirectory; var folder = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "caaa"); Directory.CreateDirectory(folder); var file = Path.Combine(folder, cid + ".db"); lite = new LiteDatabase(new ConnectionString() { Connection = ConnectionType.Shared, Filename = file }); lite.Checkpoint(); }
public static void Close() { try { LiteDatabase.Checkpoint(); LiteDatabase.Dispose(); LiteDatabase = null; Log.Info(Language.DatabaseClosed); } catch (Exception exception) { Log.Error(string.Format(Language.DatabaseCloseError, exception)); } }
public static void Close() { try { LiteDatabase.Checkpoint(); LiteDatabase.Dispose(); LiteDatabase = null; Log.Info("Database closed!"); } catch (Exception e) { Log.Error($"Error when try to close database:\n {e}"); } }
private void Update2() { var data = new Dictionary <string, SymbolHistoryMetaDataInternal>(); var files = Directory.GetFiles(DataDir, "*.bin2"); foreach (var filePath in files) { var fileInfo = HistoryChunkId.Parse(filePath); //-- get metaData -- if (!data.ContainsKey(fileInfo.HistoryId.Key)) { data.Add(fileInfo.HistoryId.Key, new SymbolHistoryMetaDataInternal(fileInfo.HistoryId)); } var symData = data[fileInfo.HistoryId.Key]; symData.Chunks.Add(fileInfo); //update first and last tick time HistoryChunk fileData = HistoryChunk.Load(filePath); if (fileData.Ticks.Count > 0) { symData.UpdateLastBar(fileData.Ticks.First()); symData.UpdateLastBar(fileData.Ticks.Last()); } } //reinsert everything DbSymbolsMetaData.DeleteAll(); Db.Checkpoint(); var allData = data.Values.ToArray(); foreach (var dat in allData) { DbSymbolsMetaData.Upsert(dat); } Db.Checkpoint(); }
public LPServer(string dbPath, string auth) { db = new LiteDatabase(dbPath); db.Checkpoint(); posts = db.GetCollection <LitePost>("posts"); LoadIndexTemplate(); LoadPostTemplate(); options = new JsonSerializerOptions { PropertyNamingPolicy = new CamelBack(), PropertyNameCaseInsensitive = true, }; this.auth = auth; }
public void UserVersion_Get_Set() { using (var file = new TempFile()) { using (var db = new LiteDatabase(file.Filename)) { Assert.AreEqual(0, db.UserVersion); db.UserVersion = 5; db.Checkpoint(); } using (var db = new LiteDatabase(file.Filename)) { Assert.AreEqual(5, db.UserVersion); } } }
public Task SaveChanges() { if (Interlocked.CompareExchange(ref _saveInProgress, 1, 0) == 0) { try { foreach (var remove in _entitiesToRemove.ToList()) { foreach (var removeItem in remove.Value.ToList()) { if (_entitiesToAdd.TryGetValue(remove.Key, out var toAddList)) { if (toAddList?.Contains(removeItem) == true) { remove.Value.Remove(removeItem); toAddList.Remove(removeItem); } } if (_entitiesToUpdate.TryGetValue(remove.Key, out var removeList)) { removeList.Remove(removeItem); } } } ProcessAction(_entitiesToAdd, (x, y) => y.Upsert(_mapper.ToDocument(x))); ProcessAction(_entitiesToUpdate, (x, y) => y.Update(_mapper.ToDocument(x))); ProcessAction(_entitiesToRemove, (x, y) => { if (!y.Delete(x.Id)) { throw new InvalidOperationException( "Failed to remove entity from storage " + _mapper.ToDocument(x)); } }); _database.Checkpoint(); } finally { _saveInProgress = 0; } } return(Task.CompletedTask); }
public void UserVersion_Get_Set() { using (var file = new TempFile()) { using (var db = new LiteDatabase(file.Filename)) { db.UserVersion.Should().Be(0); db.UserVersion = 5; db.Checkpoint(); } using (var db = new LiteDatabase(file.Filename)) { db.UserVersion.Should().Be(5); } } }
public void GlobalSetup() { File.Delete(DatabasePath); DatabaseInstance = new LiteDatabase(ConnectionString()); _fileMetaCollection = DatabaseInstance.GetCollection <FileMetaBase>(); _fileMetaCollection.EnsureIndex(fileMeta => fileMeta.ValidFrom); _fileMetaCollection.EnsureIndex(fileMeta => fileMeta.ValidTo); _fileMetaCollection.EnsureIndex(fileMeta => fileMeta.ShouldBeShown); _fileMetaCollection.Insert(FileMetaGenerator <FileMetaBase> .GenerateList(DatasetSize)); // executed once per each N value DatabaseInstance.Checkpoint(); _dateTimeConstraint = DateTime.Now; _dateTimeConstraintBsonValue = new BsonValue(_dateTimeConstraint); }
public void Rebuild_After_DropCollection() { using (var file = new TempFile()) using (var db = new LiteDatabase(file.Filename)) { var col = db.GetCollection <Zip>("zip"); col.Insert(DataGen.Zip()); db.DropCollection("zip"); db.Checkpoint(); // full disk usage var size = file.Size; var r = db.Rebuild(); // only header page Assert.Equal(8192, size - r); } }
private static void TestWriteDatabase(Stream stream) { Console.WriteLine($"[{DateTime.Now.ToString()}] Start writing"); using (var db = new LiteDatabase(stream)) { var collection = db.GetCollection <Book>(); ParallelEnumerable.Range(1, 1000) .ForAll(i => { var blog = new Book { Id = i, Title = "fake title " + i, Author = "fake author " + i, Description = $"fake description {i} fake description end" }; collection.Upsert(blog); }); db.Checkpoint(); // flush } Console.WriteLine($"[{DateTime.Now.ToString()}] Finish writing"); }