public static void InsertIngreso(OIngreso ingreso) { collIngresos.Insert(ingreso); }
public void Insert(Word w) => words.Insert(w);
/// <summary> /// Insert a new entity to this collection. Document Id must be a new value in collection - Returns document Id /// </summary> public Task <BsonValue> InsertAsync(City item) { return(Task.FromResult(cities.Insert(item))); }
public virtual T CreateItem(T item) { var val = _collection.Insert(item); return(item); }
private void HandleBlock(Block block) { // have we seen this block before? var blockSearch = _blocks.FindOne(x => x.BlockHash == block.BlockHash); if (blockSearch != null) { // yes; we've seen the block before. So, no need to handle it. return; } // is this the first block we've stored? blockSearch = _blocks.FindOne(x => x.IsChainTip); if (blockSearch == null) { // yes; so, just store it without PrevHash check // use JSON RPC to fetch block height var blockHeight = Program.RPCClient.GetBlockHeight(block.BlockHash); block.Height = blockHeight; // mark as best known chain tip _chainTipHash = block.BlockHash; block.IsChainTip = true; // save to db _blocks.Insert(block); return; } // no; so, check if we have the previous block in the chain blockSearch = _blocks.FindOne(x => x.BlockHash == block.Header.PrevBlockHash); if (blockSearch == null) { // we are missing the prev block; this shouldn't happen... // queue blocks backwards until we find a prev-block we have, and track the missing blocks var missingBlocks = new List <Block> { block }; var prevBlockHash = block.Header.PrevBlockHash; // loop until we have already have the previous block while (!_blocks.Exists(x => x.BlockHash == prevBlockHash)) { var prevBlockData = Program.RPCClient.GetBlockData(block.Header.PrevBlockHash); var prevBlock = new Block(ByteToHex.StringToByteArray(prevBlockData)); missingBlocks.Add(prevBlock); prevBlockHash = prevBlock.Header.PrevBlockHash; } // missingBlocks is now an ordered list of blocks we are missing - queue them in reverse missingBlocks.Reverse(); foreach (var missingBlock in missingBlocks) { Program.Database.EnqueueTask(new DatabaseWrite(missingBlock), 0); } // discard (don't save) this block, as it will be re-processed in order return; } // we already have the previous block in the chain... // so, this new block's height is prevblock height + 1 block.Height = blockSearch.Height + 1; var chainTipBlock = _blocks.FindOne(x => x.IsChainTip); // check if the prevHash block is our chaintip if (_chainTipHash != block.Header.PrevBlockHash) { // no; so, there was a re-org! // we need to invalidate transaction inclusions, back to the forking block var orphanedBlock = chainTipBlock; var newChainBlock = block; var orphanedBlocks = new List <Block> { orphanedBlock }; var newChainBlocks = new List <Block> { newChainBlock }; // step backwards on each chain in turn until the two sides of the fork are at the same height while (orphanedBlock.Height > newChainBlock.Height) { orphanedBlock = _blocks.FindOne(x => x.BlockHash == orphanedBlock.Header.PrevBlockHash); orphanedBlocks.Add(orphanedBlock); } while (orphanedBlock.Height < newChainBlock.Height) { newChainBlock = _blocks.FindOne(x => x.BlockHash == newChainBlock.Header.PrevBlockHash); newChainBlocks.Add(newChainBlock); } // orphaned chain and new chain are the same height now // step back both chains at the same time until we have a matching prevBlockHash while (orphanedBlock.Header.PrevBlockHash != newChainBlock.Header.PrevBlockHash) { orphanedBlock = _blocks.FindOne(x => x.BlockHash == orphanedBlock.Header.PrevBlockHash); orphanedBlocks.Add(orphanedBlock); newChainBlock = _blocks.FindOne(x => x.BlockHash == newChainBlock.Header.PrevBlockHash); newChainBlocks.Add(newChainBlock); } // prevBlockHash is now the forking block; // roll-back transaction inclusions var transactions = _transactions.Find(x => x.IncludedAtBlockHeight >= orphanedBlock.Height); foreach (var transaction in transactions) { transaction.IncludedAtBlockHeight = 0; transaction.IncludedInBlockHex = ""; } // mark all blocks on the orphaned side as orphaned, and vice-versa foreach (var blk in orphanedBlocks) { blk.Orphaned = true; } // this is needed in the case of re-re-orgs foreach (var blk in newChainBlocks) { blk.Orphaned = false; } // we need to re-scan transactions in higher blocks // (skip the transactions in this block itself, as they will be queued behind this insert) // for most re-orgs, this won't actually have anything to process foreach (var blk in newChainBlocks.Where(x => x.BlockHash != block.BlockHash)) { // check all transactions in the block foreach (var transaction in block.Transactions) { SubscriptionCheck.CheckForSubscription(transaction); } } // re-org is handled, and this is the new chaintip, so fall thru to insert the block normally } // this is a regular block insert - we have previous block, and the previous block is our last known chaintip chainTipBlock.IsChainTip = false; _blocks.Update(chainTipBlock); block.IsChainTip = true; _chainTipHash = block.BlockHash; // save to db _blocks.Insert(block); }
public async Task AddItemAsync(Car item) { await Task.FromResult(cars.Insert(item)).ConfigureAwait(false); }
public void Insert <T>(T item) { LiteCollection <T> col = db.GetCollection <T>(collectionDiscoveryLogic.GetName <T>()); col.Insert(item); }
public void Init() { db = new LiteDatabase(":memory:"); collection = db.GetCollection <Person>("person"); collection.Insert(this.local); }
public void Insert(BsonValue id, T entity) { DLiteUtil.Whisper(Name, "Insert", new object[] { id, entity }); LiteCollection.Insert(id, entity); }
/// <summary> /// Insert the provided EmbedEntry into the database. /// </summary> /// <param name="db"></param> /// <param name="entry"></param> /// <returns></returns> public static BsonValue InsertEntry(this LiteCollection <EmbedEntry> db, EmbedEntry entry) { entry.Id = Hash(entry.Slug, entry.Guild); return(db.Insert(entry)); }
public void Insert(Customer obj) { collection = _context.GetCollection <Customer>(nameOfCollection); collection.Insert(obj); }
private void MainProcess() { //_timer.Elapsed += Timer_Elapsed; //_timer.Interval = 10000; //_timer.Enabled = true; int totalThreads = 1000; bool flagJoin = false; using (var db = new LiteDatabase(_fdb)) // To create the file previously { var col = db.GetCollection <Log>("logs"); var context = new Log { Name = "Thread_Main", CreatedDate = DateTime.Now, StartedOn = DateTime.Now }; col.Insert(context); } List <Action> actionsList = new List <Action>(); for (int i = 0; i < totalThreads; i++) { int threadId = (i + 1); using (var db = new LiteDatabase(_fdb)) { LiteCollection <Log> col = db.GetCollection <Log>("logs"); Log context = new Log { Name = String.Concat("Thread_", threadId), CreatedDate = DateTime.Now }; col.Insert(context); } void action() => TaskInsert(threadId); actionsList.Add(action); } Parallel.ForEach(actionsList, (o => o())); /* * Thread[] threads = new Thread[totalThreads]; * * for (int i = 0; i < totalThreads; i++) * { * Thread t = new Thread(new ParameterizedThreadStart(TaskInsert)); * * threads[i] = t; * } * * for (int i = 0; i < totalThreads; i++) * { * int threadId = (i + 1); * * using (var db = new LiteDatabase(_fdb)) * { * LiteCollection<Log> col = db.GetCollection<Log>("logs"); * * Log context = new Log { Name = String.Concat("Thread_", threadId), CreatedDate = DateTime.Now }; * * col.Insert(context); * } * * threads[i].Start(threadId); * } * * if (flagJoin) * { * for (int i = 0; i < totalThreads; i++) * { * threads[i].Join(); * } * } */ //lock (thisLock) { using (var db = new LiteDatabase(_fdb)) { LiteCollection <Log> col = db.GetCollection <Log>("logs"); Log result = col.FindOne(x => x.Name.Equals("Thread_Main")); if (result != null) { result.FinishedOn = DateTime.Now; result.ModifiedDate = DateTime.Now; result.TotalMiliSeconds = (int)(DateTime.Now - result.StartedOn.Value).TotalMilliseconds; col.Update(result); } } } }
public void TaskInsert(object threadId) { //lock (thisLock) { string threadName = String.Concat("Thread_", (int)threadId); string id = Guid.NewGuid().ToString(); string code = id.Substring(0, 4); int totalBytes = 0; // 1 * 1024 * (1025 / _rnd.Next(1, 5)); //A random total bytes between 6 and 30 Mb byte[] file = new byte[totalBytes]; int sleepingMiliSeconds = 0; // _rnd.Next(200); try { Log logResult = new Log(); //Find Thread using (var db = new LiteDatabase(_fdb)) { LiteCollection <Log> col = db.GetCollection <Log>("logs"); logResult = col.FindOne(x => x.Name.Equals(threadName)); } //Update Ini Thread Ini using (var db = new LiteDatabase(_fdb)) { if (logResult != null && logResult.Id > 0) { LiteCollection <Log> col = db.GetCollection <Log>("logs"); logResult.StartedOn = DateTime.Now; logResult.SleepingMiliseconds = sleepingMiliSeconds; logResult.TotaBytes = totalBytes; col.Update(logResult); //logResult = new Log(); } } //System.Threading.Thread.Sleep(sleepingMiliSeconds); ////Upload Stream //using (var db = new LiteDatabase(_fdb)) //{ // db.FileStorage.Upload(id, String.Concat(id, ".txt"), new MemoryStream(file)); //} //Insert State using (var db = new LiteDatabase(_fdb)) { LiteCollection <State> col = db.GetCollection <State>("states"); State context = new State { Name = id, Code = code }; col.Insert(context); col.EnsureIndex(x => x.Name); var list = col.Find(x => x.Name.StartsWith(code)); } ////Find Thread //using (var db = new LiteDatabase(_fdb)) //{ // LiteCollection<Log> col = db.GetCollection<Log>("logs"); // logResult = col.FindOne(x => x.Name.Equals(threadName)); //} //Update Thread End using (var db = new LiteDatabase(_fdb)) { if (logResult != null && logResult.Id > 0) { LiteCollection <Log> col = db.GetCollection <Log>("logs"); logResult.FinishedOn = DateTime.Now; logResult.ModifiedDate = DateTime.Now; if (logResult.StartedOn != null && logResult.StartedOn.Value != null) { logResult.TotalMiliSeconds = (int)(DateTime.Now - logResult.StartedOn.Value).TotalMilliseconds; logResult.RealJobMiliSeconds = (int)(DateTime.Now - logResult.StartedOn.Value).TotalMilliseconds - sleepingMiliSeconds; } else { logResult.TotalMiliSeconds = null; logResult.RealJobMiliSeconds = null; } col.Update(logResult); //logResult = new Log(); } } } catch (Exception ex) { using (EventLog eventLog = new EventLog("Application")) { eventLog.Source = "Application"; eventLog.Log = "Application"; eventLog.WriteEntry(String.Concat("Ex: ", ex.Message, "\r\n StackTrace: ", ex.StackTrace), EventLogEntryType.Error, 4001); } } } }
private static void InitPlayers(LiteCollection <Player> collection) { // Delete previous data var allPlayers = collection.FindAll(); foreach (var player in allPlayers) { collection.Delete(player.Id); } var player1 = new Player { Id = 52, Firstname = "Novak", Lastname = "Djokovic", Shortname = "N.DJO", Sex = "M", Country = new Country { Picture = "https://i.eurosport.com/_iss_/geo/country/flag/medium/6944.png", Code = "SRB" }, Picture = "https://i.eurosport.com/_iss_/person/pp_clubteam/large/565920.jpg", Data = new Data { Rank = 2, Points = 2542, Weight = 80000, Height = 188, Age = 31, Last = new List <long> { 1, 1, 1, 1, 1 } } }; var player2 = new Player { Id = 95, Firstname = "Venus", Lastname = "Williams", Shortname = "V.WIL", Sex = "F", Country = new Country { Picture = "https://i.eurosport.com/_iss_/person/pp_clubteam/large/136449.jpg", Code = "USA" }, Picture = "https://i.eurosport.com/_iss_/person/pp_clubteam/large/136450.jpg", Data = new Data { Rank = 52, Points = 1105, Weight = 74000, Height = 185, Age = 38, Last = new List <long> { 0, 1, 0, 0, 1 } } }; var player3 = new Player { Id = 65, Firstname = "Stan", Lastname = "Wawrinka", Shortname = "S.WAW", Sex = "M", Country = new Country { Picture = "https://i.eurosport.com/_iss_/geo/country/flag/large/2213.png", Code = "SUI" }, Picture = "https://i.eurosport.com/_iss_/person/pp_clubteam/large/325225.jpg", Data = new Data { Rank = 21, Points = 1784, Weight = 81000, Height = 183, Age = 33, Last = new List <long> { 1, 1, 1, 0, 1 } } }; var player4 = new Player { Id = 102, Firstname = "Serena", Lastname = "Williams", Shortname = "S.WIL", Sex = "F", Country = new Country { Picture = "https://i.eurosport.com/_iss_/geo/country/flag/medium/2209.png", Code = "USA" }, Picture = "https://i.eurosport.com/_iss_/person/pp_clubteam/large/136450.jpg", Data = new Data { Rank = 10, Points = 3521, Weight = 72000, Height = 175, Age = 37, Last = new List <long> { 0, 1, 1, 1, 0 } } }; var player5 = new Player { Id = 17, Firstname = "Rafael", Lastname = "Nadal", Shortname = "R.NAD", Sex = "M", Country = new Country { Picture = "https://i.eurosport.com/_iss_/geo/country/flag/large/2203.png", Code = "ESP" }, Picture = "https://i.eurosport.com/_iss_/person/pp_clubteam/large/435121.jpg", Data = new Data { Rank = 1, Points = 1982, Weight = 85000, Height = 185, Age = 33, Last = new List <long> { 1, 0, 0, 0, 1 } } }; collection.Insert(player1); collection.Insert(player2); collection.Insert(player3); collection.Insert(player4); collection.Insert(player5); }
public void Create(CoreDataUnit dataUnit) { _collection.Insert(dataUnit); }
private void ApplyComSettings() { string portName = null; int baudRate = (int)SerialHandler.default_settings.BaudRate; COMSettings comSettings = new COMSettings(); if (UIcomSourcesList.SelectedIndex >= 0) { portName = portNames[UIcomSourcesList.SelectedIndex]; Program.settings.portName = portName; Program.settings.portDescription = UIcomSourcesList.Items[UIcomSourcesList.SelectedIndex].ToString(); } if (UIbaudRateInputList.SelectedIndex >= 0) { baudRate = (int)UIbaudRateInputList.Items[UIbaudRateInputList.SelectedIndex]; comSettings.baud_rate = baudRate; } // Fix this comSettings.config = "8N1"; comSettings.newline = "\r\n"; Program.settings.comSettings = comSettings; if (portName != null) { // Save last com settings to db string path = Program.settings.DatabasePath; using (LiteDatabase db = new LiteDatabase(@path)) { LiteCollection <DB_ComSettingsItem> collection = db.GetCollection <DB_ComSettingsItem>(Program.settings.COMSettingsDB); IEnumerable <DB_ComSettingsItem> results = collection.FindAll(); DB_ComSettingsItem store = new DB_ComSettingsItem { baud_rate = baudRate, Parity = (int)SerialHandler.default_settings.Parity, DataBits = (int)SerialHandler.default_settings.DataBits, StopBits = (int)SerialHandler.default_settings.StopBits, NewLine = @SerialHandler.default_settings.NewLine, Config = "8N1", PortDescription = Program.settings.portDescription, PortName = Program.settings.portName }; if (results.Count() > 0) { // Update existing item DB_ComSettingsItem existing = results.First(); ObjectId id = existing.Id; store.Id = id; collection.Update(store); } else { // Add item collection.Insert(store); } } } }
// Applies the create to the database. public void ApplyCreate(IResourceContext resource, CreateEventArgs ev) { customers.Insert((Customer)ev.Data); }
public void insertNewLogin(long time, string username) { loginColl.Insert(newLoginEntry(time, username)); }
public void Insert(ObservableRangeCollection <Mediafile> fileCol) { tracks.Insert(fileCol); }
/// <summary> /// Fetches a character from the given character.pf2.tools URL and adds it to the database. /// </summary> /// <param name="url">Url of the character</param> /// <returns>Parsed and updated character</returns> public async Task <Character> NewCharacter(string url, SocketCommandContext context) { var regex = new Regex(@"(\w*\W*)?\?(\w*)\-?"); if (!regex.IsMatch(url)) { throw new Exception("This is not a valid character.pf2.tools url. Makesure you copy the full url!"); } var match = regex.Match(url); var id = match.Groups[2].Value; Character character = new Character() { Owner = context.User.Id, RemoteId = id }; HttpResponseMessage response = await Client.GetAsync(Api + id); response.EnsureSuccessStatusCode(); string responsebody = await response.Content.ReadAsStringAsync(); var json = JObject.Parse(responsebody); if (json.ContainsKey("error")) { throw new Exception("This is not a valid character ID or the character is not set to public."); } character.LastUpdated = DateTime.Now; character.Type = Enum.Parse <SheetType>(((string)json["data"]["type"]).Uppercase()); character.Name = (string)json["data"]["name"] ?? "Unnamed Character"; var notes = json["data"]["customnotes"]; if (notes != null && notes.HasValues) { foreach (var n in notes.Where(x => (string)x["uiid"] == "character")) { if (((string)n["body"]).IsImageUrl()) { character.ImageUrl = (string)n["body"]; break; } } foreach (var n in notes.Where(x => (string)x["uiid"] == "companion")) { if (((string)n["body"]).IsImageUrl()) { character.FamImg = (string)n["body"]; break; } } } var familiar = json["data"]["familiars"][0]; if (familiar["name"] != null) { character.Familiar = (string)familiar["name"]; } else { character.Familiar = null; } collection.Insert(character); collection.EnsureIndex("character", "LOWER($.Name)"); collection.EnsureIndex(x => x.RemoteId); collection.EnsureIndex(x => x.Type); collection.EnsureIndex(x => x.Owner); return(collection.FindOne(x => x.RemoteId == id)); }
public virtual Task InsertAsync(T document, CancellationToken token = default) { return(Task.Run(() => _collection.Insert(document), token)); }
public static void SaveMachines(List <Machine> machines) { machinesCollection.Delete(Query.All()); machinesCollection.Insert(machines); }
public void InsertDocument <T>(LiteCollection <T> collection, T data) { collection.Insert(data); }
public static void SaveMameInfo(MameInfo mameInfo) { mameInfoCollection.Delete(Query.All()); mameInfoCollection.Insert(mameInfo); }
public void Add(Recipe recipe) => liteCollection.Insert(recipe);
public int Create(Note obj) { var r = col.Insert(obj); return(r.AsInt32); }
public void AddBlock(Block block) { _blocks.Insert(block); }
public void CreateIssue(string title, string content) { collection.Insert(new Issue { Title = title, Content = content }); }
public void insert(LSPMData dataInsert) { data.Insert(dataInsert); }
public static void InsertGasto(OGasto gasto) { collGastos.Insert(gasto); }