public async Task BatchInserts(List <Profile> profiles) { BatchWrite <Profile> batchWrite = _context.CreateBatchWrite <Profile>(); batchWrite.AddPutItems(profiles); await batchWrite.ExecuteAsync(); Console.WriteLine("Items inserted successfully"); }
private async Task <bool> AddManyItems(List <T> items) { _ = UpdateTableTracker(); BatchWrite <T> batch = _context.CreateBatchWrite <T>(); batch.AddPutItems(items); await batch.ExecuteAsync(); return(true); }
public async Task AddBulk(IEnumerable <CarDTO> cars) { // Adding the uid manually.. this sucks cars.Select(c => { c.Id = Guid.NewGuid().ToString(); return(c); }).ToList(); // Start the batch BatchWrite <Car> bulkBatch = dbContext.CreateBatchWrite <Car>(); bulkBatch.AddPutItems(mapper.Map <IEnumerable <Car> >(cars)); await bulkBatch.ExecuteAsync(); }
public async Task AddItems <T>(IEnumerable <T> items) { IDynamoDBContext context = new DynamoDBContext(client, new DynamoDBContextConfig() { ConsistentRead = true }); BatchWrite <T> batch = context.CreateBatchWrite <T>(); batch.AddPutItems(items); await batch.ExecuteAsync(); }
public BatchWrite <TEntity> GetBatchWrite(List <TEntity> entities, DynamoDbBatchOperator batchOperator) { using DynamoDBContext context = GetContext(); BatchWrite <TEntity> batch = context.CreateBatchWrite <TEntity>(); switch (batchOperator) { case DynamoDbBatchOperator.Delete: batch.AddDeleteItems(entities); break; case DynamoDbBatchOperator.Put: batch.AddPutItems(entities); break; } return(batch); }
public async Task InsertNewItems(List <Profile> profiles) { if (profiles != null && profiles.Count > 0) { try { BatchWrite <Profile> batchWrite = _context.CreateBatchWrite <Profile>(); batchWrite.AddPutItems(profiles); Console.WriteLine("Inserting list of profiles..."); await batchWrite.ExecuteAsync(); Console.WriteLine("Done"); } catch (Exception ex) { Console.WriteLine("[!] Insert data failed"); Console.WriteLine(ex); } } }
private void RunBloomMigration(CancellationToken token) { BlockHeader GetMissingBlockHeader(long i) { if (_logger.IsWarn) { _logger.Warn(GetLogMessage("warning", $"Header for block {i} not found. Logs will not be searchable for this block.")); } return(EmptyHeader); } if (_api.BloomStorage == null) { throw new StepDependencyException(nameof(_api.BloomStorage)); } if (_api.BlockTree == null) { throw new StepDependencyException(nameof(_api.BlockTree)); } if (_api.ChainLevelInfoRepository == null) { throw new StepDependencyException(nameof(_api.ChainLevelInfoRepository)); } IBlockTree blockTree = _api.BlockTree; IBloomStorage storage = _api.BloomStorage; long to = MinBlockNumber; long synced = storage.MigratedBlockNumber + 1; long from = synced; _migrateCount = to + 1; _averages = _api.BloomStorage.Averages.ToArray(); IChainLevelInfoRepository?chainLevelInfoRepository = _api.ChainLevelInfoRepository; _progress.Update(synced); if (_logger.IsInfo) { _logger.Info(GetLogMessage("started")); } using (Timer timer = new Timer(1000) { Enabled = true }) { timer.Elapsed += (_, _) => { if (_logger.IsInfo) { _logger.Info(GetLogMessage("in progress")); } }; try { storage.Migrate(GetHeadersForMigration()); } finally { _progress.MarkEnd(); _stopwatch?.Stop(); } IEnumerable <BlockHeader> GetHeadersForMigration() { bool TryGetMainChainBlockHashFromLevel(long number, out Keccak?blockHash) { using BatchWrite batch = chainLevelInfoRepository.StartBatch(); ChainLevelInfo?level = chainLevelInfoRepository.LoadLevel(number); if (level != null) { if (!level.HasBlockOnMainChain) { if (level.BlockInfos.Length > 0) { level.HasBlockOnMainChain = true; chainLevelInfoRepository.PersistLevel(number, level, batch); } } blockHash = level.MainChainBlock?.BlockHash; return(blockHash != null); } else { blockHash = null; return(false); } } for (long i = from; i <= to; i++) { if (token.IsCancellationRequested) { timer.Stop(); if (_logger.IsInfo) { _logger.Info(GetLogMessage("cancelled")); } yield break; } if (TryGetMainChainBlockHashFromLevel(i, out Keccak? blockHash)) { BlockHeader?header = blockTree.FindHeader(blockHash !, BlockTreeLookupOptions.None); yield return(header ?? GetMissingBlockHeader(i)); } else { yield return(GetMissingBlockHeader(i)); } _progress.Update(++synced); } } } if (!token.IsCancellationRequested) { if (_logger.IsInfo) { _logger.Info(GetLogMessage("finished")); } } }
private async Task LoadData(IEnumerable <DashboardEventRaw> data, string source) { if (data == null) { this._context.LogError("Null was provided to LoadData, no data to load."); return; } BatchWrite <DashboardEventParsed> batch = ddbContext.CreateBatchWrite <DashboardEventParsed>(); int misses = 0; foreach (DashboardEventRaw item in data) { DashboardEventParsed parsed; try { parsed = DashboardEventParsed.FromRawEvent(item); batch.AddPutItem(parsed); if (parsed.Timeline.StartTimeWasFoundInDescription == false) { this._context.LogError($"Did not find start/end in description: {item.Description}"); misses += 1; } } catch (Exception e) { this._context.LogError(e); } } try { await batch.ExecuteAsync(); } catch (Exception e) { this._context.LogError(e); } await cwClient.PutMetricDataAsync(new PutMetricDataRequest() { MetricData = new List <MetricDatum>() { new MetricDatum() { Value = misses, MetricName = Environment.GetEnvironmentVariable("ExtractionFailureMetricName"), TimestampUtc = DateTime.UtcNow, Unit = StandardUnit.Count, Dimensions = new List <Dimension>() { new Dimension() { Name = "source", Value = source } } } }, Namespace = "SHD" }); }
private void RunMigration(CancellationToken token) { Block GetMissingBlock(long i, Keccak?blockHash) { if (_logger.IsWarn) { _logger.Warn(GetLogMessage("warning", $"Block {i} not found. Logs will not be searchable for this block.")); } EmptyBlock.Header.Number = i; EmptyBlock.Header.Hash = blockHash; return(EmptyBlock); } long synced = 1; IDb receiptsDb = _dbProvider.ReceiptsDb; _progress.Reset(synced); if (_logger.IsInfo) { _logger.Info(GetLogMessage("started")); } using (Timer timer = new Timer(1000) { Enabled = true }) { timer.Elapsed += (ElapsedEventHandler)((o, e) => { if (_logger.IsInfo) { _logger.Info(GetLogMessage("in progress")); } }); try { foreach (Block block in GetBlockBodiesForMigration()) { TxReceipt?[] receipts = _receiptStorage.Get(block); TxReceipt[] notNullReceipts = receipts.Length == 0 ? receipts : receipts.Where(r => r != null).ToArray(); if (receipts.Length == 0 || notNullReceipts.Length != 0) // if notNullReceipts.Length is 0 and receipts are not 0 - we are missing all receipts, they are not processed yet. { _receiptStorage.Insert(block, notNullReceipts); _receiptStorage.MigratedBlockNumber = block.Number; for (int i = 0; i < notNullReceipts.Length; i++) { receiptsDb.Delete(notNullReceipts[i].TxHash !); } if (notNullReceipts.Length != receipts.Length) { if (_logger.IsWarn) { _logger.Warn(GetLogMessage("warning", $"Block {block.ToString(Block.Format.FullHashAndNumber)} is missing {receipts.Length - notNullReceipts.Length} of {receipts.Length} receipts!")); } } } else if (block.Number <= _blockTree.Head?.Number) { if (_logger.IsWarn) { _logger.Warn(GetLogMessage("warning", $"Block {block.ToString(Block.Format.FullHashAndNumber)} is missing {receipts.Length - notNullReceipts.Length} of {receipts.Length} receipts!")); } } } } finally { _progress.MarkEnd(); _stopwatch?.Stop(); } IEnumerable <Block> GetBlockBodiesForMigration() { bool TryGetMainChainBlockHashFromLevel(long number, out Keccak?blockHash) { using BatchWrite batch = _chainLevelInfoRepository.StartBatch(); ChainLevelInfo level = _chainLevelInfoRepository.LoadLevel(number); if (level != null) { if (!level.HasBlockOnMainChain) { if (level.BlockInfos.Length > 0) { level.HasBlockOnMainChain = true; _chainLevelInfoRepository.PersistLevel(number, level, batch); } } blockHash = level.MainChainBlock?.BlockHash; return(blockHash != null); } else { blockHash = null; return(false); } } for (long i = _toBlock - 1; i > 0; i--) { if (token.IsCancellationRequested) { timer.Stop(); if (_logger.IsInfo) { _logger.Info(GetLogMessage("cancelled")); } yield break; } if (TryGetMainChainBlockHashFromLevel(i, out Keccak? blockHash)) { Block header = _blockTree.FindBlock(blockHash, BlockTreeLookupOptions.None); yield return(header ?? GetMissingBlock(i, blockHash)); } _progress.Update(++synced); } } } if (!token.IsCancellationRequested) { if (_logger.IsInfo) { _logger.Info(GetLogMessage("finished")); } } }