/* error-prone: all methods that load a level, change it and then persist need to execute everything under a lock */ private void PersistLevel(long number, ChainLevelInfo level) { // _blockInfoCache.Set(number, level); _blockInfoDb.Set(number, Rlp.Encode(level).Bytes); }
/* error-prone: all methods that load a level, change it and then persist need to execute everything under a lock */ private void PersistLevel(BigInteger number, ChainLevelInfo level) { _blockInfoCache.Set(number, level); _blockInfoDb.Set(number, Rlp.Encode(level).Bytes); }
public Task <LevelVisitOutcome> VisitLevelStart(ChainLevelInfo chainLevelInfo, CancellationToken cancellationToken) => Task.FromResult(LevelVisitOutcome.None);
public Task <LevelVisitOutcome> VisitLevelEnd(ChainLevelInfo chainLevelInfo, long levelNumber, CancellationToken cancellationToken) => Task.FromResult(LevelVisitOutcome.None);
public void UpdateMainChain(Block[] processedBlocks) { if (processedBlocks.Length == 0) { return; } bool ascendingOrder = true; if (processedBlocks.Length > 1) { if (processedBlocks[processedBlocks.Length - 1].Number < processedBlocks[0].Number) { ascendingOrder = false; } } #if DEBUG for (int i = 0; i < processedBlocks.Length; i++) { if (i != 0) { if (ascendingOrder && processedBlocks[i].Number != processedBlocks[i - 1].Number + 1) { throw new InvalidOperationException("Update main chain invoked with gaps"); } if (!ascendingOrder && processedBlocks[i - 1].Number != processedBlocks[i].Number + 1) { throw new InvalidOperationException("Update main chain invoked with gaps"); } } } #endif UInt256 lastNumber = ascendingOrder ? processedBlocks[processedBlocks.Length - 1].Number : processedBlocks[0].Number; UInt256 previousHeadNumber = Head?.Number ?? UInt256.Zero; try { _blockInfoLock.EnterWriteLock(); if (previousHeadNumber > lastNumber) { for (UInt256 i = 0; i < UInt256.Subtract(previousHeadNumber, lastNumber); i++) { UInt256 levelNumber = previousHeadNumber - i; ChainLevelInfo level = LoadLevel(levelNumber); level.HasBlockOnMainChain = false; PersistLevel(levelNumber, level); } } for (int i = 0; i < processedBlocks.Length; i++) { _blockCache.Set(processedBlocks[i].Hash, processedBlocks[i]); MoveToMain(processedBlocks[i]); } } finally { _blockInfoLock.ExitWriteLock(); } }
public Task <LevelVisitOutcome> VisitLevelStart(ChainLevelInfo chainLevelInfo, long levelNumber, CancellationToken cancellationToken) { _currentLevel = chainLevelInfo; return(Task.FromResult(LevelVisitOutcome.None)); }
public Task <LevelVisitOutcome> VisitLevelStart(ChainLevelInfo chainLevelInfo, CancellationToken cancellationToken) { throw new System.NotImplementedException(); }
} = true; // no need to sync it at the moment public async Task LoadBlocksFromDb( CancellationToken cancellationToken, UInt256?startBlockNumber = null, int batchSize = DbLoadBatchSize, int maxBlocksToLoad = int.MaxValue) { CanAcceptNewBlocks = false; if (startBlockNumber == null) { startBlockNumber = Head?.Number ?? 0; } else { Head = startBlockNumber == 0 ? null : FindBlock(startBlockNumber.Value - 1)?.Header; } BigInteger blocksToLoad = BigInteger.Min(FindNumberOfBlocksToLoadFromDb(), maxBlocksToLoad); if (blocksToLoad == 0) { if (_logger.IsInfo) { _logger.Info("Found no blocks to load from DB."); } } else { if (_logger.IsInfo) { _logger.Info($"Found {blocksToLoad} blocks to load from DB starting from current head block {Head?.ToString(BlockHeader.Format.Short)}."); } } UInt256 blockNumber = startBlockNumber.Value; for (int i = 0; i < blocksToLoad; i++) { if (cancellationToken.IsCancellationRequested) { break; } ChainLevelInfo level = LoadLevel(blockNumber); BigInteger maxDifficultySoFar = 0; BlockInfo maxDifficultyBlock = null; for (int blockIndex = 0; blockIndex < level.BlockInfos.Length; blockIndex++) { if (level.BlockInfos[blockIndex].TotalDifficulty > maxDifficultySoFar) { maxDifficultyBlock = level.BlockInfos[blockIndex]; maxDifficultySoFar = maxDifficultyBlock.TotalDifficulty; } } if (maxDifficultyBlock == null) { throw new InvalidOperationException($"Expected at least one block at level {blockNumber}"); } Block block = FindBlock(maxDifficultyBlock.BlockHash, false); if (block == null) { if (_logger.IsError) { _logger.Error($"Could not find block {maxDifficultyBlock.BlockHash}. DB load cancelled."); } _dbBatchProcessed?.SetResult(null); break; } BestSuggested = block.Header; NewBestSuggestedBlock?.Invoke(this, new BlockEventArgs(block)); if (i % batchSize == batchSize - 1 && !(i == blocksToLoad - 1)) { if (_logger.IsInfo) { _logger.Info($"Loaded {i + 1} out of {blocksToLoad} blocks from DB into processing queue, waiting for processor before loading more."); } _dbBatchProcessed = new TaskCompletionSource <object>(); using (cancellationToken.Register(() => _dbBatchProcessed.SetCanceled())) { _currentDbLoadBatchEnd = blockNumber; await _dbBatchProcessed.Task; } } blockNumber++; } if (cancellationToken.IsCancellationRequested) { _logger.Info($"Canceled loading blocks from DB at block {blockNumber}"); } if (_logger.IsInfo) { _logger.Info($"Completed loading blocks from DB at block {blockNumber}"); } CanAcceptNewBlocks = true; }
Task <LevelVisitOutcome> IBlockTreeVisitor.VisitLevelEnd(ChainLevelInfo chainLevelInfo, long levelNumber, CancellationToken cancellationToken) { return(Task.FromResult(LevelVisitOutcome.None)); }
private void RunBloomMigration(CancellationToken token) { BlockHeader GetMissingBlockHeader(long i) { if (_logger.IsWarn) { _logger.Warn(GetLogMessage("warning", $"Header for block {i} not found. Logs will not be searchable for this block.")); } return(EmptyHeader); } if (_api.BloomStorage == null) { throw new StepDependencyException(nameof(_api.BloomStorage)); } if (_api.BlockTree == null) { throw new StepDependencyException(nameof(_api.BlockTree)); } if (_api.ChainLevelInfoRepository == null) { throw new StepDependencyException(nameof(_api.ChainLevelInfoRepository)); } IBlockTree blockTree = _api.BlockTree; IBloomStorage storage = _api.BloomStorage; long to = MinBlockNumber; long synced = storage.MigratedBlockNumber + 1; long from = synced; _migrateCount = to + 1; _averages = _api.BloomStorage.Averages.ToArray(); IChainLevelInfoRepository?chainLevelInfoRepository = _api.ChainLevelInfoRepository; _progress.Update(synced); if (_logger.IsInfo) { _logger.Info(GetLogMessage("started")); } using (Timer timer = new Timer(1000) { Enabled = true }) { timer.Elapsed += (ElapsedEventHandler)((o, e) => { if (_logger.IsInfo) { _logger.Info(GetLogMessage("in progress")); } }); try { storage.Migrate(GetHeadersForMigration()); } finally { _progress.MarkEnd(); _stopwatch?.Stop(); } IEnumerable <BlockHeader> GetHeadersForMigration() { bool TryGetMainChainBlockHashFromLevel(long number, out Keccak?blockHash) { using BatchWrite batch = chainLevelInfoRepository.StartBatch(); ChainLevelInfo level = chainLevelInfoRepository.LoadLevel(number); if (level != null) { if (!level.HasBlockOnMainChain) { if (level.BlockInfos.Length > 0) { level.HasBlockOnMainChain = true; chainLevelInfoRepository.PersistLevel(number, level, batch); } } blockHash = level.MainChainBlock?.BlockHash; return(blockHash != null); } else { blockHash = null; return(false); } } for (long i = from; i <= to; i++) { if (token.IsCancellationRequested) { timer.Stop(); if (_logger.IsInfo) { _logger.Info(GetLogMessage("cancelled")); } yield break; } if (TryGetMainChainBlockHashFromLevel(i, out Keccak? blockHash)) { BlockHeader header = blockTree.FindHeader(blockHash, BlockTreeLookupOptions.None); yield return(header ?? GetMissingBlockHeader(i)); } else { yield return(GetMissingBlockHeader(i)); } _progress.Update(++synced); } } } if (!token.IsCancellationRequested) { if (_logger.IsInfo) { _logger.Info(GetLogMessage("finished")); } } }
public void MoveToMain(Block block) { ChainLevelInfo level = LoadLevel(block.Number); MoveToMain(level, block); }
private void DeleteBlocks(Keccak deletePointer) { BlockHeader deleteHeader = FindHeader(deletePointer, BlockTreeLookupOptions.TotalDifficultyNotNeeded); long currentNumber = deleteHeader.Number; Keccak currentHash = deleteHeader.Hash; Keccak nextHash = null; ChainLevelInfo nextLevel = null; using var batch = _chainLevelInfoRepository.StartBatch(); while (true) { ChainLevelInfo currentLevel = nextLevel ?? LoadLevel(currentNumber); nextLevel = LoadLevel(currentNumber + 1); bool shouldRemoveLevel = false; if (currentLevel != null) // preparing update of the level (removal of the invalid branch block) { if (currentLevel.BlockInfos.Length == 1) { shouldRemoveLevel = true; } else { for (int i = 0; i < currentLevel.BlockInfos.Length; i++) { if (currentLevel.BlockInfos[0].BlockHash == currentHash) { currentLevel.BlockInfos = currentLevel.BlockInfos.Where(bi => bi.BlockHash != currentHash).ToArray(); break; } } } } // just finding what the next descendant will be if (nextLevel != null) { nextHash = FindChild(nextLevel, currentHash); } UpdateDeletePointer(nextHash); if (shouldRemoveLevel) { BestKnownNumber = Math.Min(BestKnownNumber, currentNumber - 1); _chainLevelInfoRepository.Delete(currentNumber, batch); } else { _chainLevelInfoRepository.PersistLevel(currentNumber, currentLevel, batch); } if (_logger.IsInfo) { _logger.Info($"Deleting invalid block {currentHash} at level {currentNumber}"); } _blockCache.Delete(currentHash); _blockDb.Delete(currentHash); _headerCache.Delete(currentHash); _headerDb.Delete(currentHash); if (nextHash == null) { break; } currentNumber++; currentHash = nextHash; nextHash = null; } }
private void LoadBestKnown() { long headNumber = Head?.Number ?? _syncConfig.PivotNumberParsed; long left = Math.Max(_syncConfig.PivotNumberParsed, headNumber); long right = headNumber + BestKnownSearchLimit; bool LevelExists(long blockNumber) { return(LoadLevel(blockNumber) != null); } bool HeaderExists(long blockNumber) { ChainLevelInfo level = LoadLevel(blockNumber); if (level == null) { return(false); } foreach (BlockInfo blockInfo in level.BlockInfos) { if (FindHeader(blockInfo.BlockHash, BlockTreeLookupOptions.None) != null) { return(true); } } return(false); } bool BodyExists(long blockNumber) { ChainLevelInfo level = LoadLevel(blockNumber); if (level == null) { return(false); } foreach (BlockInfo blockInfo in level.BlockInfos) { if (FindBlock(blockInfo.BlockHash, BlockTreeLookupOptions.None) != null) { return(true); } } return(false); } BestKnownNumber = BinarySearchBlockNumber(left, right, LevelExists) ?? 0; long bestSuggestedHeaderNumber = BinarySearchBlockNumber(left, right, HeaderExists) ?? 0; long bestSuggestedBodyNumber = BinarySearchBlockNumber(left, right, BodyExists) ?? 0; if (BestKnownNumber < 0 || bestSuggestedHeaderNumber < 0 || bestSuggestedBodyNumber < 0 || bestSuggestedHeaderNumber < bestSuggestedBodyNumber) { throw new InvalidDataException($"Invalid initial block tree state loaded - best known: {BestKnownNumber}|best header: {bestSuggestedHeaderNumber}|best body: {bestSuggestedBodyNumber}|"); } BestSuggestedHeader = FindHeader(bestSuggestedHeaderNumber, BlockTreeLookupOptions.None); var bestSuggestedBodyHeader = FindHeader(bestSuggestedBodyNumber, BlockTreeLookupOptions.None); BestSuggestedBody = bestSuggestedBodyHeader == null ? null : FindBlock(bestSuggestedBodyHeader.Hash, BlockTreeLookupOptions.None); }
public ChainLevelForRpc(ChainLevelInfo chainLevelInfo) { HasBlockOnMainChain = chainLevelInfo.HasBlockOnMainChain; BlockInfos = chainLevelInfo.BlockInfos.Select(bi => new BlockInfoForRpc(bi)).ToArray(); }
private void CleanInvalidBlocks(Keccak deletePointer) { BlockHeader deleteHeader = FindHeader(deletePointer); long currentNumber = deleteHeader.Number; Keccak currentHash = deleteHeader.Hash; Keccak nextHash = null; ChainLevelInfo nextLevel = null; while (true) { ChainLevelInfo currentLevel = nextLevel ?? LoadLevel(currentNumber); nextLevel = LoadLevel(currentNumber + 1); bool shouldRemoveLevel = false; if (currentLevel != null) // preparing update of the level (removal of the invalid branch block) { if (currentLevel.BlockInfos.Length == 1) { shouldRemoveLevel = true; } else { for (int i = 0; i < currentLevel.BlockInfos.Length; i++) { if (currentLevel.BlockInfos[0].BlockHash == currentHash) { currentLevel.BlockInfos = currentLevel.BlockInfos.Where(bi => bi.BlockHash != currentHash).ToArray(); break; } } } } if (nextLevel != null) // just finding what the next descendant will be { if (nextLevel.BlockInfos.Length == 1) { nextHash = nextLevel.BlockInfos[0].BlockHash; } else { for (int i = 0; i < nextLevel.BlockInfos.Length; i++) { BlockHeader potentialDescendant = FindHeader(nextLevel.BlockInfos[i].BlockHash); if (potentialDescendant.ParentHash == currentHash) { nextHash = potentialDescendant.Hash; break; } } } UpdateDeletePointer(nextHash); } else { UpdateDeletePointer(null); } try { _blockInfoLock.EnterWriteLock(); if (shouldRemoveLevel) { BestKnownNumber = Math.Min(BestKnownNumber, currentNumber - 1); _blockInfoCache.Delete(currentNumber); _blockInfoDb.Delete(currentNumber); } else { PersistLevel(currentNumber, currentLevel); } } finally { _blockInfoLock.ExitWriteLock(); } if (_logger.IsInfo) { _logger.Info($"Deleting invalid block {currentHash} at level {currentNumber}"); } _blockCache.Delete(currentHash); _blockDb.Delete(currentHash); _headerCache.Delete(currentHash); _headerDb.Delete(currentHash); if (nextHash == null) { break; } currentNumber++; currentHash = nextHash; nextHash = null; } }
public Rlp Encode(ChainLevelInfo item, RlpBehaviors rlpBehaviors = RlpBehaviors.None) { throw new NotImplementedException(); }
public void UpdateMainChain(Block[] processedBlocks) { if (processedBlocks.Length == 0) { return; } bool ascendingOrder = true; if (processedBlocks.Length > 1) { if (processedBlocks[processedBlocks.Length - 1].Number < processedBlocks[0].Number) { ascendingOrder = false; } } #if DEBUG for (int i = 0; i < processedBlocks.Length; i++) { if (i != 0) { if (ascendingOrder && processedBlocks[i].Number != processedBlocks[i - 1].Number + 1) { throw new InvalidOperationException("Update main chain invoked with gaps"); } if (!ascendingOrder && processedBlocks[i - 1].Number != processedBlocks[i].Number + 1) { throw new InvalidOperationException("Update main chain invoked with gaps"); } } } #endif long lastNumber = ascendingOrder ? processedBlocks[processedBlocks.Length - 1].Number : processedBlocks[0].Number; long previousHeadNumber = Head?.Number ?? 0L; try { _blockInfoLock.EnterWriteLock(); if (previousHeadNumber > lastNumber) { for (long i = 0; i < previousHeadNumber - lastNumber; i++) { long levelNumber = previousHeadNumber - i; ChainLevelInfo level = LoadLevel(levelNumber); level.HasBlockOnMainChain = false; PersistLevel(levelNumber, level); } } for (int i = 0; i < processedBlocks.Length; i++) { Block block = processedBlocks[i]; if (ShouldCache(block.Number)) { _blockCache.Set(block.Hash, processedBlocks[i]); _headerCache.Set(block.Hash, block.Header); } MoveToMain(processedBlocks[i]); } } finally { _blockInfoLock.ExitWriteLock(); } }
public int GetLength(ChainLevelInfo item, RlpBehaviors rlpBehaviors) { throw new NotImplementedException(); }
public async Task Accept(IBlockTreeVisitor visitor, CancellationToken cancellationToken) { if (visitor.PreventsAcceptingNewBlocks) { BlockAcceptingNewBlocks(); } try { long levelNumber = visitor.StartLevelInclusive; long blocksToVisit = visitor.EndLevelExclusive - visitor.StartLevelInclusive; for (long i = 0; i < blocksToVisit; i++) { if (cancellationToken.IsCancellationRequested) { break; } ChainLevelInfo level = LoadLevel(levelNumber); LevelVisitOutcome visitOutcome = await visitor.VisitLevelStart(level, levelNumber, cancellationToken); if ((visitOutcome & LevelVisitOutcome.DeleteLevel) == LevelVisitOutcome.DeleteLevel) { _chainLevelInfoRepository.Delete(levelNumber); level = null; } if ((visitOutcome & LevelVisitOutcome.StopVisiting) == LevelVisitOutcome.StopVisiting) { break; } int numberOfBlocksAtThisLevel = level?.BlockInfos.Length ?? 0; for (int blockIndex = 0; blockIndex < numberOfBlocksAtThisLevel; blockIndex++) { // if we delete blocks during the process then the number of blocks at this level will be falling and we need to adjust the index Keccak hash = level !.BlockInfos[blockIndex - (numberOfBlocksAtThisLevel - level.BlockInfos.Length)].BlockHash; Block block = FindBlock(hash, BlockTreeLookupOptions.None); if (block == null) { BlockHeader header = FindHeader(hash, BlockTreeLookupOptions.None); if (header == null) { if (await VisitMissing(visitor, hash, cancellationToken)) { break; } } else { if (await VisitHeader(visitor, header, cancellationToken)) { break; } } } else { if (await VisitBlock(visitor, block, cancellationToken)) { break; } } } visitOutcome = await visitor.VisitLevelEnd(level, levelNumber, cancellationToken); if ((visitOutcome & LevelVisitOutcome.DeleteLevel) == LevelVisitOutcome.DeleteLevel) { _chainLevelInfoRepository.Delete(levelNumber); } levelNumber++; } RecalculateTreeLevels(); string resultWord = cancellationToken.IsCancellationRequested ? "Canceled" : "Completed"; if (_logger.IsDebug) { _logger.Debug($"{resultWord} visiting blocks in DB at level {levelNumber} - best known {BestKnownNumber}"); } } finally { if (visitor.PreventsAcceptingNewBlocks) { ReleaseAcceptingNewBlocks(); } } }
} = true; // no need to sync it at the moment public async Task LoadBlocksFromDb( CancellationToken cancellationToken, UInt256?startBlockNumber = null, int batchSize = DbLoadBatchSize, int maxBlocksToLoad = int.MaxValue) { try { CanAcceptNewBlocks = false; byte[] deletePointer = _blockInfoDb.Get(DeletePointerAddressInDb); if (deletePointer != null) { Keccak deletePointerHash = new Keccak(deletePointer); if (_logger.IsInfo) { _logger.Info($"Cleaning invalid blocks starting from {deletePointer}"); } CleanInvalidBlocks(deletePointerHash); } if (startBlockNumber == null) { startBlockNumber = Head?.Number ?? 0; } else { Head = startBlockNumber == 0 ? null : FindBlock(startBlockNumber.Value - 1)?.Header; } BigInteger blocksToLoad = BigInteger.Min(FindNumberOfBlocksToLoadFromDb(), maxBlocksToLoad); if (blocksToLoad == 0) { if (_logger.IsInfo) { _logger.Info("Found no blocks to load from DB."); } } else { if (_logger.IsInfo) { _logger.Info($"Found {blocksToLoad} blocks to load from DB starting from current head block {Head?.ToString(BlockHeader.Format.Short)}."); } } UInt256 blockNumber = startBlockNumber.Value; for (int i = 0; i < blocksToLoad; i++) { if (cancellationToken.IsCancellationRequested) { break; } ChainLevelInfo level = LoadLevel(blockNumber); if (level == null) { break; } BigInteger maxDifficultySoFar = 0; BlockInfo maxDifficultyBlock = null; for (int blockIndex = 0; blockIndex < level.BlockInfos.Length; blockIndex++) { if (level.BlockInfos[blockIndex].TotalDifficulty > maxDifficultySoFar) { maxDifficultyBlock = level.BlockInfos[blockIndex]; maxDifficultySoFar = maxDifficultyBlock.TotalDifficulty; } } level = null; // ReSharper disable once ConditionIsAlwaysTrueOrFalse if (level != null) // ReSharper disable once HeuristicUnreachableCode { // ReSharper disable once HeuristicUnreachableCode throw new InvalidOperationException("just be aware that this level can be deleted by another thread after here"); } if (maxDifficultyBlock == null) { throw new InvalidOperationException($"Expected at least one block at level {blockNumber}"); } Block block = FindBlock(maxDifficultyBlock.BlockHash, false); if (block == null) { if (_logger.IsError) { _logger.Error($"Could not find block {maxDifficultyBlock.BlockHash}. DB load cancelled."); } _dbBatchProcessed?.SetResult(null); break; } BestSuggested = block.Header; NewBestSuggestedBlock?.Invoke(this, new BlockEventArgs(block)); if (i % batchSize == batchSize - 1 && !(i == blocksToLoad - 1) && (Head.Number + (UInt256)batchSize) < blockNumber) { if (_logger.IsInfo) { _logger.Info($"Loaded {i + 1} out of {blocksToLoad} blocks from DB into processing queue, waiting for processor before loading more."); } _dbBatchProcessed = new TaskCompletionSource <object>(); using (cancellationToken.Register(() => _dbBatchProcessed.SetCanceled())) { _currentDbLoadBatchEnd = blockNumber - (UInt256)batchSize; await _dbBatchProcessed.Task; } } blockNumber++; } if (cancellationToken.IsCancellationRequested) { _logger.Info($"Canceled loading blocks from DB at block {blockNumber}"); } if (_logger.IsInfo) { _logger.Info($"Completed loading blocks from DB at block {blockNumber}"); } } finally { CanAcceptNewBlocks = true; } }
} = true; // no need to sync it at the moment public async Task LoadBlocksFromDb( CancellationToken cancellationToken, long?startBlockNumber = null, int batchSize = DbLoadBatchSize, int maxBlocksToLoad = int.MaxValue) { try { CanAcceptNewBlocks = false; byte[] deletePointer = _blockInfoDb.Get(DeletePointerAddressInDb); if (deletePointer != null) { Keccak deletePointerHash = new Keccak(deletePointer); if (_logger.IsInfo) { _logger.Info($"Cleaning invalid blocks starting from {deletePointer}"); } CleanInvalidBlocks(deletePointerHash); } if (startBlockNumber == null) { startBlockNumber = Head?.Number ?? 0; } else { Head = startBlockNumber == 0 ? null : FindBlock(startBlockNumber.Value - 1)?.Header; } long blocksToLoad = Math.Min(FindNumberOfBlocksToLoadFromDb(), maxBlocksToLoad); if (blocksToLoad == 0) { if (_logger.IsInfo) { _logger.Info("Found no blocks to load from DB"); } } else { if (_logger.IsInfo) { _logger.Info($"Found {blocksToLoad} blocks to load from DB starting from current head block {Head?.ToString(BlockHeader.Format.Short)}"); } long blockNumber = startBlockNumber.Value; for (long i = 0; i < blocksToLoad; i++) { if (cancellationToken.IsCancellationRequested) { break; } ChainLevelInfo level = LoadLevel(blockNumber); if (level == null) { _logger.Warn($"Missing level - {blockNumber}"); break; } BigInteger maxDifficultySoFar = 0; BlockInfo maxDifficultyBlock = null; for (int blockIndex = 0; blockIndex < level.BlockInfos.Length; blockIndex++) { if (level.BlockInfos[blockIndex].TotalDifficulty > maxDifficultySoFar) { maxDifficultyBlock = level.BlockInfos[blockIndex]; maxDifficultySoFar = maxDifficultyBlock.TotalDifficulty; } } level = null; // ReSharper disable once ConditionIsAlwaysTrueOrFalse if (level != null) // ReSharper disable once HeuristicUnreachableCode { // ReSharper disable once HeuristicUnreachableCode throw new InvalidOperationException("just be aware that this level can be deleted by another thread after here"); } if (maxDifficultyBlock == null) { throw new InvalidOperationException($"Expected at least one block at level {blockNumber}"); } Block block = FindBlock(maxDifficultyBlock.BlockHash, false); if (block == null) { BlockHeader header = FindHeader(maxDifficultyBlock.BlockHash, false); if (header == null) { _blockInfoDb.Delete(blockNumber); BestKnownNumber = blockNumber - 1; // TODO: check if it is the last one break; } BestSuggested = header; if (i < blocksToLoad - 1024) { long jumpSize = blocksToLoad - 1024 - 1; if (_logger.IsInfo) { _logger.Info($"Switching to fast sync headers load - jumping from {i} to {i + jumpSize}."); } blockNumber += jumpSize; i += jumpSize; } // copy paste from below less batching if (i % batchSize == batchSize - 1 && i != blocksToLoad - 1 && Head.Number + batchSize < blockNumber) { if (_logger.IsInfo) { _logger.Info($"Loaded {i + 1} out of {blocksToLoad} headers from DB."); } } } else { BestSuggested = block.Header; BestSuggestedFullBlock = block.Header; NewBestSuggestedBlock?.Invoke(this, new BlockEventArgs(block)); if (i % batchSize == batchSize - 1 && i != blocksToLoad - 1 && Head.Number + batchSize < blockNumber) { if (_logger.IsInfo) { _logger.Info($"Loaded {i + 1} out of {blocksToLoad} blocks from DB into processing queue, waiting for processor before loading more."); } _dbBatchProcessed = new TaskCompletionSource <object>(); using (cancellationToken.Register(() => _dbBatchProcessed.SetCanceled())) { _currentDbLoadBatchEnd = blockNumber - batchSize; await _dbBatchProcessed.Task; } } } blockNumber++; } if (cancellationToken.IsCancellationRequested) { _logger.Info($"Canceled loading blocks from DB at block {blockNumber}"); } if (_logger.IsInfo) { _logger.Info($"Completed loading blocks from DB at block {blockNumber} - best known {BestKnownNumber}"); } } } finally { CanAcceptNewBlocks = true; } }
private void RunMigration(CancellationToken token) { Block GetMissingBlock(long i, Keccak?blockHash) { if (_logger.IsWarn) { _logger.Warn(GetLogMessage("warning", $"Block {i} not found. Logs will not be searchable for this block.")); } EmptyBlock.Header.Number = i; EmptyBlock.Header.Hash = blockHash; return(EmptyBlock); } long synced = 1; IDb receiptsDb = _dbProvider.ReceiptsDb; _progress.Reset(synced); if (_logger.IsInfo) { _logger.Info(GetLogMessage("started")); } using (Timer timer = new Timer(1000) { Enabled = true }) { timer.Elapsed += (ElapsedEventHandler)((o, e) => { if (_logger.IsInfo) { _logger.Info(GetLogMessage("in progress")); } }); try { foreach (Block block in GetBlockBodiesForMigration()) { TxReceipt?[] receipts = _receiptStorage.Get(block); TxReceipt[] notNullReceipts = receipts.Length == 0 ? receipts : receipts.Where(r => r != null).ToArray(); if (receipts.Length == 0 || notNullReceipts.Length != 0) // if notNullReceipts.Length is 0 and receipts are not 0 - we are missing all receipts, they are not processed yet. { _receiptStorage.Insert(block, notNullReceipts); _receiptStorage.MigratedBlockNumber = block.Number; for (int i = 0; i < notNullReceipts.Length; i++) { receiptsDb.Delete(notNullReceipts[i].TxHash !); } if (notNullReceipts.Length != receipts.Length) { if (_logger.IsWarn) { _logger.Warn(GetLogMessage("warning", $"Block {block.ToString(Block.Format.FullHashAndNumber)} is missing {receipts.Length - notNullReceipts.Length} of {receipts.Length} receipts!")); } } } else if (block.Number <= _blockTree.Head?.Number) { if (_logger.IsWarn) { _logger.Warn(GetLogMessage("warning", $"Block {block.ToString(Block.Format.FullHashAndNumber)} is missing {receipts.Length - notNullReceipts.Length} of {receipts.Length} receipts!")); } } } } finally { _progress.MarkEnd(); _stopwatch?.Stop(); } IEnumerable <Block> GetBlockBodiesForMigration() { bool TryGetMainChainBlockHashFromLevel(long number, out Keccak?blockHash) { using BatchWrite batch = _chainLevelInfoRepository.StartBatch(); ChainLevelInfo level = _chainLevelInfoRepository.LoadLevel(number); if (level != null) { if (!level.HasBlockOnMainChain) { if (level.BlockInfos.Length > 0) { level.HasBlockOnMainChain = true; _chainLevelInfoRepository.PersistLevel(number, level, batch); } } blockHash = level.MainChainBlock?.BlockHash; return(blockHash != null); } else { blockHash = null; return(false); } } for (long i = _toBlock - 1; i > 0; i--) { if (token.IsCancellationRequested) { timer.Stop(); if (_logger.IsInfo) { _logger.Info(GetLogMessage("cancelled")); } yield break; } if (TryGetMainChainBlockHashFromLevel(i, out Keccak? blockHash)) { Block header = _blockTree.FindBlock(blockHash, BlockTreeLookupOptions.None); yield return(header ?? GetMissingBlock(i, blockHash)); } _progress.Update(++synced); } } } if (!token.IsCancellationRequested) { if (_logger.IsInfo) { _logger.Info(GetLogMessage("finished")); } } }