public UnprocessedBlockTreeWrapper(IBlockTree blockTree) { _blockTree = blockTree; _blockTree.NewHeadBlock += (sender, args) => NewHeadBlock?.Invoke(sender, args); _blockTree.NewBestSuggestedBlock += (sender, args) => NewBestSuggestedBlock?.Invoke(sender, args); _blockTree.BlockAddedToMain += (sender, args) => BlockAddedToMain?.Invoke(sender, args); }
public AddBlockResult SuggestBlock(Block block) { if (!CanAcceptNewBlocks) { throw new InvalidOperationException($"{nameof(BlockTree)} not ready to accept new blocks."); } if (block.Number == 0) { if (BestSuggested != null) { throw new InvalidOperationException("Genesis block should be added only once"); // TODO: make sure it cannot happen } } else if (IsKnownBlock(block.Hash)) { if (_logger.IsTrace) { _logger.Trace($"Block {block.Hash} already known."); } return(AddBlockResult.AlreadyKnown); } else if (!IsKnownBlock(block.Header.ParentHash)) { if (_logger.IsTrace) { _logger.Trace($"Could not find parent ({block.Header.ParentHash}) of block {block.Hash}"); } return(AddBlockResult.UnknownParent); } _blockDb.Set(block.Hash, Rlp.Encode(block).Bytes); //_blockCache.Set(block.Hash, block); // TODO: when reviewing the entire data chain need to look at the transactional storing of level and block SetTotalDifficulty(block); SetTotalTransactions(block); BlockInfo blockInfo = new BlockInfo(block.Hash, block.TotalDifficulty.Value, block.TotalTransactions.Value); UpdateLevel(block.Number, blockInfo); if (block.TotalDifficulty > (BestSuggested?.TotalDifficulty ?? 0)) { BestSuggested = block.Header; NewBestSuggestedBlock?.Invoke(this, new BlockEventArgs(block)); } return(AddBlockResult.Added); }
private async Task <bool> VisitBlock(IBlockTreeVisitor visitor, Block block, CancellationToken cancellationToken) { BlockVisitOutcome blockVisitOutcome = await visitor.VisitBlock(block, cancellationToken); if ((blockVisitOutcome & BlockVisitOutcome.Suggest) == BlockVisitOutcome.Suggest) { // remnant after previous approach - we want to skip standard suggest processing and just invoke processor BestSuggestedHeader = block.Header; BestSuggestedBody = block; NewBestSuggestedBlock?.Invoke(this, new BlockEventArgs(block)); } if ((blockVisitOutcome & BlockVisitOutcome.StopVisiting) == BlockVisitOutcome.StopVisiting) { return(true); } return(false); }
private AddBlockResult Suggest(Block block, BlockHeader header, bool shouldProcess = true) { #if DEBUG /* this is just to make sure that we do not fall into this trap when creating tests */ if (header.StateRoot == null && !header.IsGenesis) { throw new InvalidDataException($"State root is null in {header.ToString(BlockHeader.Format.Short)}"); } #endif if (!CanAcceptNewBlocks) { return(AddBlockResult.CannotAccept); } HashSet <Keccak> invalidBlocksWithThisNumber = _invalidBlocks.Get(header.Number); if (invalidBlocksWithThisNumber?.Contains(header.Hash) ?? false) { return(AddBlockResult.InvalidBlock); } bool isKnown = IsKnownBlock(header.Number, header.Hash); if (isKnown && (BestSuggestedHeader?.Number ?? 0) >= header.Number) { if (_logger.IsTrace) { _logger.Trace($"Block {header.Hash} already known."); } return(AddBlockResult.AlreadyKnown); } if (!header.IsGenesis && !IsKnownBlock(header.Number - 1, header.ParentHash)) { if (_logger.IsTrace) { _logger.Trace($"Could not find parent ({header.ParentHash}) of block {header.Hash}"); } return(AddBlockResult.UnknownParent); } SetTotalDifficulty(header); if (block != null && !isKnown) { Rlp newRlp = _blockDecoder.Encode(block); _blockDb.Set(block.Hash, newRlp.Bytes); } if (!isKnown) { Rlp newRlp = _headerDecoder.Encode(header); _headerDb.Set(header.Hash, newRlp.Bytes); BlockInfo blockInfo = new BlockInfo(header.Hash, header.TotalDifficulty ?? 0); UpdateOrCreateLevel(header.Number, blockInfo, !shouldProcess); } if (header.IsGenesis || header.TotalDifficulty > (BestSuggestedHeader?.TotalDifficulty ?? 0)) { if (header.IsGenesis) { Genesis = header; } BestSuggestedHeader = header; if (block != null && shouldProcess) { BestSuggestedBody = block; NewBestSuggestedBlock?.Invoke(this, new BlockEventArgs(block)); } } return(AddBlockResult.Added); }
private AddBlockResult Suggest(Block block, BlockHeader header) { #if DEBUG /* this is just to make sure that we do not fall into this trap when creating tests */ if (header.StateRoot == null && !header.IsGenesis) { throw new InvalidDataException($"State root is null in {header.ToString(BlockHeader.Format.Short)}"); } #endif if (!CanAcceptNewBlocks) { return(AddBlockResult.CannotAccept); } if (_invalidBlocks.ContainsKey(header.Number) && _invalidBlocks[header.Number].Contains(header.Hash)) { return(AddBlockResult.InvalidBlock); } if (header.Number == 0) { if (BestSuggested != null) { throw new InvalidOperationException("Genesis block should be added only once"); } } else if (IsKnownBlock(header.Number, header.Hash)) { if (_logger.IsTrace) { _logger.Trace($"Block {header.Hash} already known."); } return(AddBlockResult.AlreadyKnown); } else if (!IsKnownBlock(header.Number - 1, header.ParentHash)) { if (_logger.IsTrace) { _logger.Trace($"Could not find parent ({header.ParentHash}) of block {header.Hash}"); } return(AddBlockResult.UnknownParent); } SetTotalDifficulty(header); if (block != null) { using (MemoryStream stream = Rlp.BorrowStream()) { Rlp.Encode(stream, block); byte[] newRlp = stream.ToArray(); _blockDb.Set(block.Hash, newRlp); } } using (MemoryStream stream = Rlp.BorrowStream()) { Rlp.Encode(stream, header); byte[] newRlp = stream.ToArray(); _headerDb.Set(header.Hash, newRlp); } BlockInfo blockInfo = new BlockInfo(header.Hash, header.TotalDifficulty ?? 0); try { _blockInfoLock.EnterWriteLock(); UpdateOrCreateLevel(header.Number, blockInfo); } finally { _blockInfoLock.ExitWriteLock(); } if (header.IsGenesis || header.TotalDifficulty > (BestSuggested?.TotalDifficulty ?? 0)) { BestSuggested = header; if (block != null) { BestSuggestedFullBlock = block.Header; NewBestSuggestedBlock?.Invoke(this, new BlockEventArgs(block)); } } return(AddBlockResult.Added); }
} = true; // no need to sync it at the moment public async Task LoadBlocksFromDb( CancellationToken cancellationToken, long?startBlockNumber = null, int batchSize = DbLoadBatchSize, int maxBlocksToLoad = int.MaxValue) { try { CanAcceptNewBlocks = false; byte[] deletePointer = _blockInfoDb.Get(DeletePointerAddressInDb); if (deletePointer != null) { Keccak deletePointerHash = new Keccak(deletePointer); if (_logger.IsInfo) { _logger.Info($"Cleaning invalid blocks starting from {deletePointer}"); } CleanInvalidBlocks(deletePointerHash); } if (startBlockNumber == null) { startBlockNumber = Head?.Number ?? 0; } else { Head = startBlockNumber == 0 ? null : FindBlock(startBlockNumber.Value - 1)?.Header; } long blocksToLoad = Math.Min(FindNumberOfBlocksToLoadFromDb(), maxBlocksToLoad); if (blocksToLoad == 0) { if (_logger.IsInfo) { _logger.Info("Found no blocks to load from DB"); } } else { if (_logger.IsInfo) { _logger.Info($"Found {blocksToLoad} blocks to load from DB starting from current head block {Head?.ToString(BlockHeader.Format.Short)}"); } long blockNumber = startBlockNumber.Value; for (long i = 0; i < blocksToLoad; i++) { if (cancellationToken.IsCancellationRequested) { break; } ChainLevelInfo level = LoadLevel(blockNumber); if (level == null) { _logger.Warn($"Missing level - {blockNumber}"); break; } BigInteger maxDifficultySoFar = 0; BlockInfo maxDifficultyBlock = null; for (int blockIndex = 0; blockIndex < level.BlockInfos.Length; blockIndex++) { if (level.BlockInfos[blockIndex].TotalDifficulty > maxDifficultySoFar) { maxDifficultyBlock = level.BlockInfos[blockIndex]; maxDifficultySoFar = maxDifficultyBlock.TotalDifficulty; } } level = null; // ReSharper disable once ConditionIsAlwaysTrueOrFalse if (level != null) // ReSharper disable once HeuristicUnreachableCode { // ReSharper disable once HeuristicUnreachableCode throw new InvalidOperationException("just be aware that this level can be deleted by another thread after here"); } if (maxDifficultyBlock == null) { throw new InvalidOperationException($"Expected at least one block at level {blockNumber}"); } Block block = FindBlock(maxDifficultyBlock.BlockHash, false); if (block == null) { BlockHeader header = FindHeader(maxDifficultyBlock.BlockHash, false); if (header == null) { _blockInfoDb.Delete(blockNumber); BestKnownNumber = blockNumber - 1; // TODO: check if it is the last one break; } BestSuggested = header; if (i < blocksToLoad - 1024) { long jumpSize = blocksToLoad - 1024 - 1; if (_logger.IsInfo) { _logger.Info($"Switching to fast sync headers load - jumping from {i} to {i + jumpSize}."); } blockNumber += jumpSize; i += jumpSize; } // copy paste from below less batching if (i % batchSize == batchSize - 1 && i != blocksToLoad - 1 && Head.Number + batchSize < blockNumber) { if (_logger.IsInfo) { _logger.Info($"Loaded {i + 1} out of {blocksToLoad} headers from DB."); } } } else { BestSuggested = block.Header; BestSuggestedFullBlock = block.Header; NewBestSuggestedBlock?.Invoke(this, new BlockEventArgs(block)); if (i % batchSize == batchSize - 1 && i != blocksToLoad - 1 && Head.Number + batchSize < blockNumber) { if (_logger.IsInfo) { _logger.Info($"Loaded {i + 1} out of {blocksToLoad} blocks from DB into processing queue, waiting for processor before loading more."); } _dbBatchProcessed = new TaskCompletionSource <object>(); using (cancellationToken.Register(() => _dbBatchProcessed.SetCanceled())) { _currentDbLoadBatchEnd = blockNumber - batchSize; await _dbBatchProcessed.Task; } } } blockNumber++; } if (cancellationToken.IsCancellationRequested) { _logger.Info($"Canceled loading blocks from DB at block {blockNumber}"); } if (_logger.IsInfo) { _logger.Info($"Completed loading blocks from DB at block {blockNumber} - best known {BestKnownNumber}"); } } } finally { CanAcceptNewBlocks = true; } }
public AddBlockResult SuggestBlock(Block block) { #if DEBUG /* this is just to make sure that we do not fall into this trap when creating tests */ if (block.StateRoot == null && !block.IsGenesis) { throw new InvalidDataException($"State root is null in {block.ToString(Block.Format.Short)}"); } #endif if (!CanAcceptNewBlocks) { return(AddBlockResult.CannotAccept); } if (_invalidBlocks.ContainsKey(block.Number) && _invalidBlocks[block.Number].Contains(block.Hash)) { return(AddBlockResult.InvalidBlock); } if (block.Number == 0) { if (BestSuggested != null) { throw new InvalidOperationException("Genesis block should be added only once"); // TODO: make sure it cannot happen } } else if (IsKnownBlock(block.Number, block.Hash)) { if (_logger.IsTrace) { _logger.Trace($"Block {block.Hash} already known."); } return(AddBlockResult.AlreadyKnown); } else if (!IsKnownBlock(block.Number - 1, block.Header.ParentHash)) { if (_logger.IsTrace) { _logger.Trace($"Could not find parent ({block.Header.ParentHash}) of block {block.Hash}"); } return(AddBlockResult.UnknownParent); } _blockDb.Set(block.Hash, Rlp.Encode(block).Bytes); // _blockCache.Set(block.Hash, block); // TODO: when reviewing the entire data chain need to look at the transactional storing of level and block SetTotalDifficulty(block); SetTotalTransactions(block); BlockInfo blockInfo = new BlockInfo(block.Hash, block.TotalDifficulty.Value, block.TotalTransactions.Value); try { _blockInfoLock.EnterWriteLock(); UpdateOrCreateLevel(block.Number, blockInfo); } finally { _blockInfoLock.ExitWriteLock(); } if (block.IsGenesis || block.TotalDifficulty > (BestSuggested?.TotalDifficulty ?? 0)) { BestSuggested = block.Header; NewBestSuggestedBlock?.Invoke(this, new BlockEventArgs(block)); } return(AddBlockResult.Added); }
} = true; // no need to sync it at the moment public async Task LoadBlocksFromDb( CancellationToken cancellationToken, UInt256?startBlockNumber = null, int batchSize = DbLoadBatchSize, int maxBlocksToLoad = int.MaxValue) { try { CanAcceptNewBlocks = false; byte[] deletePointer = _blockInfoDb.Get(DeletePointerAddressInDb); if (deletePointer != null) { Keccak deletePointerHash = new Keccak(deletePointer); if (_logger.IsInfo) { _logger.Info($"Cleaning invalid blocks starting from {deletePointer}"); } CleanInvalidBlocks(deletePointerHash); } if (startBlockNumber == null) { startBlockNumber = Head?.Number ?? 0; } else { Head = startBlockNumber == 0 ? null : FindBlock(startBlockNumber.Value - 1)?.Header; } BigInteger blocksToLoad = BigInteger.Min(FindNumberOfBlocksToLoadFromDb(), maxBlocksToLoad); if (blocksToLoad == 0) { if (_logger.IsInfo) { _logger.Info("Found no blocks to load from DB."); } } else { if (_logger.IsInfo) { _logger.Info($"Found {blocksToLoad} blocks to load from DB starting from current head block {Head?.ToString(BlockHeader.Format.Short)}."); } } UInt256 blockNumber = startBlockNumber.Value; for (int i = 0; i < blocksToLoad; i++) { if (cancellationToken.IsCancellationRequested) { break; } ChainLevelInfo level = LoadLevel(blockNumber); if (level == null) { break; } BigInteger maxDifficultySoFar = 0; BlockInfo maxDifficultyBlock = null; for (int blockIndex = 0; blockIndex < level.BlockInfos.Length; blockIndex++) { if (level.BlockInfos[blockIndex].TotalDifficulty > maxDifficultySoFar) { maxDifficultyBlock = level.BlockInfos[blockIndex]; maxDifficultySoFar = maxDifficultyBlock.TotalDifficulty; } } level = null; // ReSharper disable once ConditionIsAlwaysTrueOrFalse if (level != null) // ReSharper disable once HeuristicUnreachableCode { // ReSharper disable once HeuristicUnreachableCode throw new InvalidOperationException("just be aware that this level can be deleted by another thread after here"); } if (maxDifficultyBlock == null) { throw new InvalidOperationException($"Expected at least one block at level {blockNumber}"); } Block block = FindBlock(maxDifficultyBlock.BlockHash, false); if (block == null) { if (_logger.IsError) { _logger.Error($"Could not find block {maxDifficultyBlock.BlockHash}. DB load cancelled."); } _dbBatchProcessed?.SetResult(null); break; } BestSuggested = block.Header; NewBestSuggestedBlock?.Invoke(this, new BlockEventArgs(block)); if (i % batchSize == batchSize - 1 && !(i == blocksToLoad - 1) && (Head.Number + (UInt256)batchSize) < blockNumber) { if (_logger.IsInfo) { _logger.Info($"Loaded {i + 1} out of {blocksToLoad} blocks from DB into processing queue, waiting for processor before loading more."); } _dbBatchProcessed = new TaskCompletionSource <object>(); using (cancellationToken.Register(() => _dbBatchProcessed.SetCanceled())) { _currentDbLoadBatchEnd = blockNumber - (UInt256)batchSize; await _dbBatchProcessed.Task; } } blockNumber++; } if (cancellationToken.IsCancellationRequested) { _logger.Info($"Canceled loading blocks from DB at block {blockNumber}"); } if (_logger.IsInfo) { _logger.Info($"Completed loading blocks from DB at block {blockNumber}"); } } finally { CanAcceptNewBlocks = true; } }
} = true; // no need to sync it at the moment public async Task LoadBlocksFromDb( CancellationToken cancellationToken, UInt256?startBlockNumber = null, int batchSize = DbLoadBatchSize, int maxBlocksToLoad = int.MaxValue) { CanAcceptNewBlocks = false; if (startBlockNumber == null) { startBlockNumber = Head?.Number ?? 0; } else { Head = startBlockNumber == 0 ? null : FindBlock(startBlockNumber.Value - 1)?.Header; } BigInteger blocksToLoad = BigInteger.Min(FindNumberOfBlocksToLoadFromDb(), maxBlocksToLoad); if (blocksToLoad == 0) { if (_logger.IsInfo) { _logger.Info("Found no blocks to load from DB."); } } else { if (_logger.IsInfo) { _logger.Info($"Found {blocksToLoad} blocks to load from DB starting from current head block {Head?.ToString(BlockHeader.Format.Short)}."); } } UInt256 blockNumber = startBlockNumber.Value; for (int i = 0; i < blocksToLoad; i++) { if (cancellationToken.IsCancellationRequested) { break; } ChainLevelInfo level = LoadLevel(blockNumber); BigInteger maxDifficultySoFar = 0; BlockInfo maxDifficultyBlock = null; for (int blockIndex = 0; blockIndex < level.BlockInfos.Length; blockIndex++) { if (level.BlockInfos[blockIndex].TotalDifficulty > maxDifficultySoFar) { maxDifficultyBlock = level.BlockInfos[blockIndex]; maxDifficultySoFar = maxDifficultyBlock.TotalDifficulty; } } if (maxDifficultyBlock == null) { throw new InvalidOperationException($"Expected at least one block at level {blockNumber}"); } Block block = FindBlock(maxDifficultyBlock.BlockHash, false); if (block == null) { if (_logger.IsError) { _logger.Error($"Could not find block {maxDifficultyBlock.BlockHash}. DB load cancelled."); } _dbBatchProcessed?.SetResult(null); break; } BestSuggested = block.Header; NewBestSuggestedBlock?.Invoke(this, new BlockEventArgs(block)); if (i % batchSize == batchSize - 1 && !(i == blocksToLoad - 1)) { if (_logger.IsInfo) { _logger.Info($"Loaded {i + 1} out of {blocksToLoad} blocks from DB into processing queue, waiting for processor before loading more."); } _dbBatchProcessed = new TaskCompletionSource <object>(); using (cancellationToken.Register(() => _dbBatchProcessed.SetCanceled())) { _currentDbLoadBatchEnd = blockNumber; await _dbBatchProcessed.Task; } } blockNumber++; } if (cancellationToken.IsCancellationRequested) { _logger.Info($"Canceled loading blocks from DB at block {blockNumber}"); } if (_logger.IsInfo) { _logger.Info($"Completed loading blocks from DB at block {blockNumber}"); } CanAcceptNewBlocks = true; }