private List <string> LoadDlcListAfterFiltering(LiteCollection <T> dbDlc, List <T> toLoad, DlcLoadPolicy loadPolicy) { var timeStamp = DateTime.Now; //this might get trickier once we have mapping tables in place if (loadPolicy.ClearExistingBeforeLoad) { dbDlc.Delete(x => true); foreach (var item in toLoad.OfType <BaseEntity>()) { item.CreateDateTime = timeStamp; item.UpdatedDateTime = timeStamp; } dbDlc.InsertBulk(toLoad); } else { var inDb = dbDlc.FindAll(); var existingByUniqueKey = inDb.ToDictionary(x => x.UniqueKey); var dlcThatMatchesExisting = toLoad.Where(x => existingByUniqueKey.ContainsKey(x.UniqueKey)); var newRows = toLoad.Where(x => !existingByUniqueKey.ContainsKey(x.UniqueKey)).ToList(); if (loadPolicy.AddNewItems) { foreach (var item in newRows.OfType <BaseEntity>()) { item.CreateDateTime = timeStamp; item.UpdatedDateTime = timeStamp; } dbDlc.InsertBulk(newRows); } if (loadPolicy.UpdateExisting) { foreach (var dlcMatch in dlcThatMatchesExisting) { var existingItem = existingByUniqueKey[dlcMatch.UniqueKey]; if (_funcUpdateRow(existingItem, dlcMatch)) { var baseEntity = existingItem as BaseEntity; if (baseEntity != null) { baseEntity.UpdatedDateTime = timeStamp; } dbDlc.Update(existingItem); } } } } return(new List <string>()); }
public Task InsertRecords(IEnumerable <IDbRecord> records) { return(Task.Run(() => { currentCollection.InsertBulk(records); })); }
/// <inheritdoc/> public override void ForkTxNonces(Guid sourceChainId, Guid destinationChainId) { LiteCollection <BsonDocument> srcColl = TxNonceCollection(sourceChainId); LiteCollection <BsonDocument> destColl = TxNonceCollection(destinationChainId); destColl.InsertBulk(srcColl.FindAll()); }
public override void ForkBlockIndexes( Guid sourceChainId, Guid destinationChainId, BlockHash branchpoint) { LiteCollection <HashDoc> srcColl = IndexCollection(sourceChainId); if (!srcColl.Exists(_ => true)) { throw new ChainIdNotFoundException( sourceChainId, $"No such chain ID: {sourceChainId}." ); } LiteCollection <HashDoc> destColl = IndexCollection(destinationChainId); BlockHash?genesisHash = IterateIndexes(sourceChainId, 0, 1) .Cast <BlockHash?>() .FirstOrDefault(); if (genesisHash is null || branchpoint.Equals(genesisHash)) { return; } destColl.Delete(Query.All()); destColl.InsertBulk(srcColl.FindAll().TakeWhile(i => !i.Hash.Equals(branchpoint))); AppendIndex(destinationChainId, branchpoint); }
/// <inheritdoc/> public override void ForkStateReferences <T>( Guid sourceChainId, Guid destinationChainId, Block <T> branchPoint) { string srcCollId = StateRefId(sourceChainId); string dstCollId = StateRefId(destinationChainId); LiteCollection <StateRefDoc> srcColl = _db.GetCollection <StateRefDoc>(srcCollId), dstColl = _db.GetCollection <StateRefDoc>(dstCollId); Query srcQuery = Query.And( Query.GT("BlockIndex", 0), Query.LTE("BlockIndex", branchPoint.Index) ); IEnumerable <StateRefDoc> srcStateRefs = srcColl.Find(srcQuery); dstColl.InsertBulk(srcStateRefs); if (!dstColl.Exists(_ => true) && CountIndex(sourceChainId) < 1) { throw new ChainIdNotFoundException( sourceChainId, "The source chain to be forked does not exist." ); } dstColl.EnsureIndex("AddressString"); dstColl.EnsureIndex("BlockIndex"); _lastStateRefCaches.Remove(destinationChainId); }
/// <summary> /// Imports a CSV file into the DB, the CSV file should be formatted as /// /// Artist, ReleaseName, ReleaseYear /// /// </summary> /// <param name="path">The path to a .csv file containing appropriate data.</param> public void Import(string path) { var newEntries = new List <Record>(); using (var reader = new StreamReader(path)) { while (!reader.EndOfStream) { var line = reader.ReadLine(); var values = line.Split(','); newEntries.Add(new Record { Artist = values[0], ReleaseName = values[1], ReleaseYear = values[2] }); } } Task.Run(() => { _Records.InsertBulk(newEntries); }); Records.AddRange(newEntries); RaisePropertyChanged(nameof(Records)); }
/// <inheritdoc/> public override void ForkStateReferences <T>( Guid sourceChainId, Guid destinationChainId, Block <T> branchPoint) { string srcCollId = StateRefId(sourceChainId); string dstCollId = StateRefId(destinationChainId); LiteCollection <StateRefDoc> srcColl = _liteDb.GetCollection <StateRefDoc>(srcCollId), dstColl = _liteDb.GetCollection <StateRefDoc>(dstCollId); dstColl.InsertBulk(srcColl.Find(Query.LTE("BlockIndex", branchPoint.Index))); if (!dstColl.Exists(_ => true) && CountIndex(sourceChainId) < 1) { throw new ChainIdNotFoundException( sourceChainId, "The source chain to be forked does not exist." ); } dstColl.EnsureIndex(nameof(StateRefDoc.StateKey)); dstColl.EnsureIndex(nameof(StateRefDoc.BlockIndex)); _lastStateRefCaches.Remove(destinationChainId); }
public async Task <BookingResponse> IsAvailable(Booking booking) { BookingResponse bookingResponse = null; try { #region Code to create sample data SampleData.Delete(d => d.NoOfPax >= 0); if (SampleData.Count() == 0) { SampleData.InsertBulk(CreateBookingData()); } var all = SampleData.FindAll(); #endregion var result = SampleData.Find(b => b.StartDate == booking.StartDate && b.EndDate == booking.EndDate && b.NoOfPax >= int.Parse(booking.NoOfPax)); bookingResponse = new BookingResponse { IsAvailable = result.Count() == 0?false:true }; } catch (Exception) { //yell / shout //catch // log } return(await Task.FromResult(bookingResponse)); }
public async Task SaveBacktestCandlesBulk(List <Candle> candles, BacktestOptions backtestOptions) { var items = Mapping.Mapper.Map <List <CandleAdapter> >(candles); LiteCollection <CandleAdapter> candleCollection = DataStoreBacktest.GetInstance(GetDatabase(backtestOptions)).GetTable <CandleAdapter>("Candle_" + backtestOptions.CandlePeriod); candleCollection.EnsureIndex("Timestamp"); candleCollection.InsertBulk(items); }
public static void InsertList(List <Account> accountList) { if (db == null) { return; } accounts.InsertBulk(accountList); }
/// <summary> /// Write batch of records to database /// </summary> /// <typeparam name="T">Type of </typeparam> /// <param name="collectionName"></param> /// <param name="list"></param> /// <returns></returns> public int WriteBulk<T>(string collectionName, List<T> list) { using (var db = new LiteDatabase("Filename=LogParser.db; Mode=Exclusive;", null, new Logger(Logger.FULL, (o)=>{_logger.Debug(o);}))) { LiteCollection<T> logs = db.GetCollection<T>(collectionName); return logs.InsertBulk(list); } }
/// <inheritdoc/> public override void ForkBlockIndexes( Guid sourceChainId, Guid destinationChainId, HashDigest <SHA256> branchPoint) { LiteCollection <HashDoc> srcColl = IndexCollection(sourceChainId); LiteCollection <HashDoc> destColl = IndexCollection(destinationChainId); destColl.InsertBulk(srcColl.FindAll().TakeWhile(i => !i.Hash.Equals(branchPoint))); AppendIndex(destinationChainId, branchPoint); }
/// <summary> /// Adds a batch of items to the queue. See <see cref="Enqueue(T)"/> for adding a single item. /// </summary> /// <param name="items"></param> public void Enqueue(IEnumerable <T> items) { List <QueueEntry <T> > inserts = new List <QueueEntry <T> >(); foreach (var item in items) { inserts.Add(new QueueEntry <T>(item)); } _collection.InsertBulk(inserts); }
public void UpdateAllResult(List <MyModel> results) { foreach (var i in results) { if (string.IsNullOrEmpty(i.Id)) { i.Id = ObjectId.NewObjectId().ToString(); } } _models.Delete(m => true); _models.InsertBulk(results); }
private void SeedRepository() { try { InitRepository(); if (_learningDataCollection.Count() == 0) { _learningDataCollection.InsertBulk(_seeder.ReadLearningData()); } } catch (Exception ex) { } }
public override void ForkTxNonces(Guid sourceChainId, Guid destinationChainId) { LiteCollection <BsonDocument> srcColl = TxNonceCollection(sourceChainId); if (!srcColl.Exists(_ => true)) { throw new ChainIdNotFoundException( sourceChainId, $"No such chain ID: {sourceChainId}." ); } LiteCollection <BsonDocument> destColl = TxNonceCollection(destinationChainId); destColl.InsertBulk(srcColl.FindAll()); }
public async Task <int> Initialize() { return(await Task.Run(() => { using (var db = new LiteDatabase(configuration.ConnectionString)) { LiteCollection <SiteItem> collection = db.GetCollection <SiteItem>(); IEnumerable <SiteItem> result = collection.FindAll(); if (!result.Any()) { return collection.InsertBulk(GetItems()); } return 0; } })); }
public async Task SaveBacktestTradeSignalsBulk(List <TradeSignal> signals, BacktestOptions backtestOptions) { var items = Mapping.Mapper.Map <List <TradeSignalAdapter> >(signals); LiteCollection <TradeSignalAdapter> itemCollection = DataStoreBacktest.GetInstance(GetDatabase(backtestOptions)).GetTable <TradeSignalAdapter>("Signals_" + backtestOptions.CandlePeriod); foreach (var item in items) { itemCollection.Delete(i => i.StrategyName == item.StrategyName); } // TradeSignalAdapter lastCandle = itemCollection.Find(Query.All("Timestamp", Query.Descending), limit: 1).FirstOrDefault(); itemCollection.EnsureIndex("Timestamp"); itemCollection.InsertBulk(items); }
/// <inheritdoc/> public override void ForkBlockIndexes( Guid sourceChainId, Guid destinationChainId, HashDigest <SHA256> branchPoint) { LiteCollection <HashDoc> srcColl = IndexCollection(sourceChainId); LiteCollection <HashDoc> destColl = IndexCollection(destinationChainId); var genesisHash = IterateIndexes(sourceChainId, 0, 1).First(); destColl.InsertBulk(srcColl.FindAll() .TakeWhile(i => !i.Hash.Equals(branchPoint)).Skip(1)); if (!branchPoint.Equals(genesisHash)) { AppendIndex(destinationChainId, branchPoint); } }
/// <inheritdoc/> public override void StoreStateReference <T>( string @namespace, IImmutableSet <Address> addresses, Block <T> block) { string collId = StateRefId(@namespace); LiteCollection <StateRefDoc> coll = _db.GetCollection <StateRefDoc>(collId); coll.InsertBulk( addresses.Select(addr => new StateRefDoc { Address = addr, BlockIndex = block.Index, BlockHash = block.Hash, }) ); coll.EnsureIndex("AddressString"); coll.EnsureIndex("BlockIndex"); }
/// <inheritdoc/> public override void ForkStateReferences <T>( Guid sourceChainId, Guid destinationChainId, Block <T> branchPoint) { string srcCollId = StateRefId(sourceChainId); string dstCollId = StateRefId(destinationChainId); LiteCollection <StateRefDoc> srcColl = _db.GetCollection <StateRefDoc>(srcCollId), dstColl = _db.GetCollection <StateRefDoc>(dstCollId); dstColl.InsertBulk(srcColl.Find(Query.LTE("BlockIndex", branchPoint.Index))); if (dstColl.Count() < 1) { throw new ChainIdNotFoundException( sourceChainId, "The source chain to be forked does not exist." ); } }
/// <inheritdoc/> public override void StoreStateReference( Guid chainId, IImmutableSet <string> keys, HashDigest <SHA256> blockHash, long blockIndex) { string collId = StateRefId(chainId); LiteCollection <StateRefDoc> coll = _liteDb.GetCollection <StateRefDoc>(collId); IEnumerable <StateRefDoc> stateRefDocs = keys .Select(key => new StateRefDoc { StateKey = key, BlockIndex = blockIndex, BlockHash = blockHash, }) .Where(doc => !coll.Exists(d => d.Id == doc.Id)); coll.InsertBulk(stateRefDocs); coll.EnsureIndex(nameof(StateRefDoc.StateKey)); coll.EnsureIndex(nameof(StateRefDoc.BlockIndex)); if (!_lastStateRefCaches.ContainsKey(chainId)) { _lastStateRefCaches[chainId] = new LruCache <string, Tuple <HashDigest <SHA256>, long> >(); } LruCache <string, Tuple <HashDigest <SHA256>, long> > stateRefCache = _lastStateRefCaches[chainId]; foreach (string key in keys) { _logger.Debug($"Try to set cache {key}"); if (!stateRefCache.TryGetValue(key, out Tuple <HashDigest <SHA256>, long> cache) || cache.Item2 < blockIndex) { stateRefCache[key] = new Tuple <HashDigest <SHA256>, long>(blockHash, blockIndex); } } }
/// <inheritdoc/> public override void StoreStateReference( Guid chainId, IImmutableSet <Address> addresses, HashDigest <SHA256> blockHash, long blockIndex) { string collId = StateRefId(chainId); LiteCollection <StateRefDoc> coll = _db.GetCollection <StateRefDoc>(collId); IEnumerable <StateRefDoc> stateRefDocs = addresses .Select(addr => new StateRefDoc { Address = addr, BlockIndex = blockIndex, BlockHash = blockHash, }) .Where(doc => !coll.Exists(d => d.Id == doc.Id)); coll.InsertBulk(stateRefDocs); coll.EnsureIndex("AddressString"); coll.EnsureIndex("BlockIndex"); if (!_lastStateRefCaches.ContainsKey(chainId)) { _lastStateRefCaches[chainId] = new LruCache <Address, Tuple <HashDigest <SHA256>, long> >(); } LruCache <Address, Tuple <HashDigest <SHA256>, long> > stateRefCache = _lastStateRefCaches[chainId]; foreach (Address address in addresses) { _logger.Debug($"Try to set cache {address}"); if (!stateRefCache.TryGetValue(address, out Tuple <HashDigest <SHA256>, long> cache) || cache.Item2 < blockIndex) { stateRefCache[address] = new Tuple <HashDigest <SHA256>, long>(blockHash, blockIndex); } } }
private void FillDatabase(IReadOnlyDictionary <string, List <JMDictEntry> > root) { senses.Delete(_ => true); entries.Delete(_ => true); kvps.Delete(_ => true); version.Delete(_ => true); var sensesDict = root.Values .SelectMany(e => e.SelectMany(p => p.Senses)) .Distinct() .Select((s, i) => (s, i + 2)) .ToDictionary(kvp => kvp.Item1, kvp => DbSense.From(kvp.Item1, kvp.Item2)); var entriesDict = root.Values .SelectMany(e => e) .Distinct() .Select((e, i) => (e, i + 2)) .ToDictionary( kvp => kvp.Item1, kvp => DbDictEntry.From(kvp.Item1, s => sensesDict[s], kvp.Item2)); var kvpsDict = root .ToDictionary(kvp => DbDictEntryKeyValue.From( new KeyValuePair <string, IEnumerable <JMDictEntry> >(kvp.Key, kvp.Value), e => entriesDict[e])); senses.InsertBulk(sensesDict.Values); entries.InsertBulk(entriesDict.Values); kvps.InsertBulk(kvpsDict.Select(kvp => DbDictEntryKeyValue.From( new KeyValuePair <string, IEnumerable <JMDictEntry> >(kvp.Value.Key, kvp.Value.Value), e => entriesDict[e]))); kvps.EnsureIndex(x => x.LookupKey); version.Insert(new DbDictVersion { DbVersion = Version, OriginalFileSize = -1, OriginalFileHash = Array.Empty <byte>() }); }
/// <inheritdoc/> public override void ForkStateReferences <T>( string srcNamespace, string destNamespace, Block <T> branchPoint, IImmutableSet <Address> addressesToStrip) { string srcCollId = StateRefId(srcNamespace); string dstCollId = StateRefId(destNamespace); LiteCollection <StateRefDoc> srcColl = _db.GetCollection <StateRefDoc>(srcCollId), dstColl = _db.GetCollection <StateRefDoc>(dstCollId); dstColl.InsertBulk(srcColl.Find(Query.LTE("BlockIndex", branchPoint.Index))); if (dstColl.Count() < 1 && addressesToStrip.Any()) { throw new NamespaceNotFoundException( srcNamespace, "The source namespace to be forked does not exist." ); } }
private void CreateCreatures(LiteCollection <Creature> collection) { var stockNames = Directory.GetFiles(DirectoryConstants.StockDirectory) .Select(s => s.Replace(".lua", "") .Replace(DirectoryConstants.StockDirectory, "")) .ToList(); CreatureCombiner creatureCombiner = new CreatureCombiner(stockNames); for (int i = 0; i < stockNames.Count(); i++) { for (int j = i + 1; j < stockNames.Count(); j++) { List <Creature> creatures = creatureCombiner .CreateAllPossibleCreatures( StockNames.ProperStockNames[stockNames[i]], StockNames.ProperStockNames[stockNames[j]]); collection.InsertBulk(creatures); } } }
/// <inheritdoc/> public override void StoreStateReference( Guid chainId, IImmutableSet <Address> addresses, HashDigest <SHA256> hash, long index) { string collId = StateRefId(chainId); LiteCollection <StateRefDoc> coll = _db.GetCollection <StateRefDoc>(collId); IEnumerable <StateRefDoc> stateRefDocs = addresses .Select(addr => new StateRefDoc { Address = addr, BlockIndex = index, BlockHash = hash, }) .Where(doc => !coll.Exists(d => d.Id == doc.Id)); coll.InsertBulk(stateRefDocs); coll.EnsureIndex("AddressString"); coll.EnsureIndex("BlockIndex"); }
/// <inheritdoc cref="BaseStore.ForkBlockIndexes(Guid, Guid, BlockHash)"/> public override void ForkBlockIndexes( Guid sourceChainId, Guid destinationChainId, BlockHash branchpoint) { LiteCollection <HashDoc> srcColl = IndexCollection(sourceChainId); LiteCollection <HashDoc> destColl = IndexCollection(destinationChainId); BlockHash?genesisHash = IterateIndexes(sourceChainId, 0, 1) .Cast <BlockHash?>() .FirstOrDefault(); if (genesisHash is null || branchpoint.Equals(genesisHash)) { return; } destColl.InsertBulk(srcColl.FindAll() .TakeWhile(i => !i.Hash.Equals(branchpoint)).Skip(1)); AppendIndex(destinationChainId, branchpoint); }
public void AddRange(List <T> models) { _cache?.AddRange(models); _lite.InsertBulk(models); }
private void FillDatabase(JMNedictRoot root) { kvps.Delete(_ => true); entries.Delete(_ => true); trans.Delete(_ => true); version.Delete(_ => true); kvps.EnsureIndex(x => x.LookupKey); var transDict = root.Entries .SelectMany(e => e.TranslationalEquivalents .Select(tr => (xmlModel: tr, dbModel: new DbNeTranslation() { Type = (tr.Types ?? Array.Empty <string>()) .Select(t => JnedictTypeUtils.FromDescription(t)) .Values() .ToList(), Detail = (tr.Translation ?? Array.Empty <NeTranslation>()) .Where(t => t.Lang == null || t.Lang == "eng") .Select(t => t.Text) .ToList() }))) .ToDictionary(kvp => kvp.xmlModel, kvp => kvp.dbModel); { int id = 1; // to not make it start from 0 foreach (var t in transDict) { t.Value.Id = id++; } } var entriesDict = root.Entries .Select((e, id) => (xmlModel: e, dbModel: new DbNeEntry { Id = id + 1, // to not make it start from 0 SequenceNumber = e.SequenceNumber, Kanji = (e.KanjiElements ?? Array.Empty <KanjiElement>()) .Select(k => k.Key) .ToList(), Reading = (e.ReadingElements ?? Array.Empty <ReadingElement>()) .Select(r => r.Reb) .ToList(), Translation = (e.TranslationalEquivalents ?? Array.Empty <NeTranslationalEquivalent>()) .Select(t => transDict[t]).ToList() })) .ToDictionary(kvp => kvp.xmlModel, kvp => kvp.dbModel); var kvpsDict = new Dictionary <string, List <DbNeEntry> >(); var kvpsEn = root.Entries .SelectMany(e => (e.KanjiElements?.Select(k => k.Key) ?? Enumerable.Empty <string>()) .Concat(e.ReadingElements.Select(r => r.Reb)) .Select(k => (key: k, value: e))); foreach (var(key, value) in kvpsEn) { if (!kvpsDict.ContainsKey(key)) { kvpsDict[key] = new List <DbNeEntry>(); } kvpsDict[key].Add(entriesDict[value]); } trans.InsertBulk(transDict.Values); entries.InsertBulk(entriesDict.Values); kvps.InsertBulk(kvpsDict.Select(kvp => new DbNeDictKeyValue { LookupKey = kvp.Key, Entries = kvp.Value })); version.Insert(new DbDictVersion { DbVersion = Version, OriginalFileSize = -1, OriginalFileHash = Array.Empty <byte>() }); }