private GethLikeTxTrace[] TraceBlock(Block?block, GethTraceOptions options, CancellationToken cancellationToken, Keccak?txHash = null) { if (block == null) { throw new InvalidOperationException("Only canonical, historical blocks supported"); } if (!block.IsGenesis) { BlockHeader?parent = _blockTree.FindParentHeader(block.Header, BlockTreeLookupOptions.None); if (parent?.Hash is null) { throw new InvalidOperationException("Cannot trace blocks with invalid parents"); } if (!_blockTree.IsMainChain(parent.Hash)) { throw new InvalidOperationException("Cannot trace orphaned blocks"); } } GethLikeBlockTracer listener = txHash == null ? new GethLikeBlockTracer(options) : new GethLikeBlockTracer(txHash, options); _processor.Process(block, ProcessingOptions.Trace, listener.WithCancellation(cancellationToken)); return(listener.BuildResult().ToArray()); }
private void OnBlocksProcessing(object?sender, BlocksProcessingEventArgs e) { void UnFinalizeBlock(BlockHeader blockHeader, BatchWrite batch) { var(chainLevel, blockInfo) = GetBlockInfo(blockHeader); blockInfo.IsFinalized = false; _chainLevelInfoRepository.PersistLevel(blockHeader.Number, chainLevel, batch); } // rerunning block BlockHeader header = e.Blocks.First().Header; if (_blockTree.WasProcessed(header.Number, header.Hash)) { using (var batch = _chainLevelInfoRepository.StartBatch()) { // need to un-finalize blocks var minSealersForFinalization = GetMinSealersForFinalization(header.Number); for (int i = 1; i < minSealersForFinalization; i++) { header = _blockTree.FindParentHeader(header, BlockTreeLookupOptions.TotalDifficultyNotNeeded); if (header != null) { UnFinalizeBlock(header, batch); } } for (int i = 0; i < e.Blocks.Count; i++) { UnFinalizeBlock(e.Blocks[i].Header, batch); } } } }
// This validations cannot be run in AuraSealValidator because they are dependent on state. private void ValidateAuRa(Block block) { if (!block.IsGenesis) { var parentHeader = _blockTree.FindParentHeader(block.Header, BlockTreeLookupOptions.None); ValidateGasLimit(block.Header, parentHeader); ValidateTxs(block, parentHeader); } }
public void OnBlockProcessingStart(Block block, ProcessingOptions options = ProcessingOptions.None) { if (!block.IsGenesis) { bool ValidatorWasAlreadyFinalized(KeyValuePair <long, AuRaParameters.Validator> validatorInfo) => _blockFinalizationManager.LastFinalizedBlockLevel >= validatorInfo.Key; bool isProducingBlock = options.ContainsFlag(ProcessingOptions.ProducingBlock); long previousBlockNumber = block.Number - 1; bool isNotConsecutive = previousBlockNumber != _lastProcessedBlock; if (isProducingBlock || isNotConsecutive) { if (TryGetLastValidator(previousBlockNumber, out var validatorInfo)) { var parentHeader = _blockTree.FindParentHeader(block.Header, BlockTreeLookupOptions.None); if (validatorInfo.Value.ValidatorType.CanChangeImmediately() || ValidatorWasAlreadyFinalized(validatorInfo)) { SetCurrentValidator(validatorInfo, parentHeader); } else if (!isProducingBlock) { bool canSetValidatorAsCurrent = !TryGetLastValidator(validatorInfo.Key - 1, out var previousValidatorInfo); long?finalizedAtBlockNumber = null; if (!canSetValidatorAsCurrent) { SetCurrentValidator(previousValidatorInfo, parentHeader); finalizedAtBlockNumber = _blockFinalizationManager.GetFinalizationLevel(validatorInfo.Key); canSetValidatorAsCurrent = finalizedAtBlockNumber != null; } if (canSetValidatorAsCurrent) { SetCurrentValidator(finalizedAtBlockNumber ?? validatorInfo.Key, validatorInfo.Value, parentHeader); } } } } } _currentValidator?.OnBlockProcessingStart(block, options); }
private GethLikeTxTrace[] TraceBlock(Block block, GethTraceOptions options) { if (block == null) { throw new InvalidOperationException("Only canonical, historical blocks supported"); } if (block.Number != 0) { BlockHeader parent = _blockTree.FindParentHeader(block.Header, BlockTreeLookupOptions.None); if (!_blockTree.IsMainChain(parent.Hash)) { throw new InvalidOperationException("Cannot trace orphaned blocks"); } } GethLikeBlockTracer listener = new GethLikeBlockTracer(options); _processor.Process(block, ProcessingOptions.ForceProcessing | ProcessingOptions.WithRollback | ProcessingOptions.ReadOnlyChain | ProcessingOptions.NoValidation, listener); return(listener.BuildResult().ToArray()); }
private bool CanSuggestBlocks(Block block) { _firstBlockVisited = false; if (block?.ParentHash != null) { BlockHeader?parentHeader = _blockTree.FindParentHeader(block.Header, BlockTreeLookupOptions.TotalDifficultyNotNeeded); if (parentHeader == null || parentHeader.StateRoot == null || _stateDb.Get(parentHeader.StateRoot) == null) { return(false); } } else { return(false); } return(true); }
private bool IsKin(BlockHeader header, BlockHeader uncle, int relationshipLevel) { if (relationshipLevel == 0) { return(false); } if (relationshipLevel > header.Number) { return(IsKin(header, uncle, (int)header.Number)); } if (uncle.Number < header.Number - relationshipLevel) { return(false); } BlockHeader parent = _blockTree.FindParentHeader(header, BlockTreeLookupOptions.TotalDifficultyNotNeeded); if (parent == null) { return(false); } if (parent.Hash == uncle.Hash) { return(false); } if (parent.ParentHash == uncle.ParentHash) { return(true); } return(IsKin(parent, uncle, relationshipLevel - 1)); }
private void TimerOnElapsed(object sender, ElapsedEventArgs e) { try { if (_blockTree.Head == null) { _timer.Enabled = true; return; } Block?scheduledBlock = _scheduledBlock; if (scheduledBlock == null) { if (_blockTree.Head.Timestamp + _config.BlockPeriod < _timestamper.UnixTime.Seconds) { _signalsQueue.Add(_blockTree.FindBlock(_blockTree.Head.Hash, BlockTreeLookupOptions.None)); } _timer.Enabled = true; return; } string turnDescription = scheduledBlock.IsInTurn() ? "IN TURN" : "OUT OF TURN"; int wiggle = _wiggle.WiggleFor(scheduledBlock.Header); if (scheduledBlock.Timestamp * 1000 + (UInt256)wiggle < _timestamper.UnixTime.Milliseconds) { if (scheduledBlock.TotalDifficulty > _blockTree.Head.TotalDifficulty) { if (ReferenceEquals(scheduledBlock, _scheduledBlock)) { BlockHeader parent = _blockTree.FindParentHeader(scheduledBlock.Header, BlockTreeLookupOptions.TotalDifficultyNotNeeded); Address parentSigner = _snapshotManager.GetBlockSealer(parent); string parentTurnDescription = parent.IsInTurn() ? "IN TURN" : "OUT OF TURN"; string parentDetails = $"{parentTurnDescription} {parent.TimestampDate:HH:mm:ss} {parent.ToString(BlockHeader.Format.Short)} sealed by {KnownAddresses.GetDescription(parentSigner)}"; if (_logger.IsInfo) { _logger.Info( $"Suggesting own {turnDescription} {_scheduledBlock.TimestampDate:HH:mm:ss} {scheduledBlock.ToString(Block.Format.HashNumberDiffAndTx)} based on {parentDetails} after the delay of {wiggle}"); } _blockTree.SuggestBlock(scheduledBlock); } } else { if (_logger.IsInfo) { _logger.Info( $"Dropping a losing block {scheduledBlock.ToString(Block.Format.HashNumberDiffAndTx)}"); } } if (ReferenceEquals(scheduledBlock, _scheduledBlock)) { _scheduledBlock = null; } } else { if (_logger.IsTrace) { _logger.Trace($"Not yet {scheduledBlock.ToString(Block.Format.HashNumberDiffAndTx)}"); } } _timer.Enabled = true; } catch (Exception exception) { if (_logger.IsError) { _logger.Error("Clique block producer failure", exception); } } }
private async Task ExecuteRefreshTask(RefreshTotalDiffTask refreshTotalDiffTask, CancellationToken token) { ISyncPeer syncPeer = refreshTotalDiffTask.SyncPeer; if (_logger.IsTrace) { _logger.Trace($"Requesting head block info from {syncPeer.Node:s}"); } var getHeadHeaderTask = syncPeer.GetHeadBlockHeader(refreshTotalDiffTask.BlockHash ?? syncPeer.HeadHash, token); CancellationTokenSource delaySource = new CancellationTokenSource(); CancellationTokenSource linkedSource = CancellationTokenSource.CreateLinkedTokenSource(delaySource.Token, token); Task delayTask = Task.Delay(InitTimeout, linkedSource.Token); Task firstToComplete = await Task.WhenAny(getHeadHeaderTask, delayTask); await firstToComplete.ContinueWith( t => { try { if (firstToComplete == delayTask) { if (_logger.IsDebug) { _logger.Debug($"InitPeerInfo timed out for node: {syncPeer.Node:c}"); } _stats.ReportSyncEvent(syncPeer.Node, syncPeer.IsInitialized ? NodeStatsEventType.SyncFailed : NodeStatsEventType.SyncInitFailed); syncPeer.Disconnect(DisconnectReason.DisconnectRequested, "refresh peer info fault - timeout"); } else if (firstToComplete.IsFaulted) { if (_logger.IsDebug) { _logger.Debug($"InitPeerInfo failed for node: {syncPeer.Node:c}{Environment.NewLine}{t.Exception}"); } _stats.ReportSyncEvent(syncPeer.Node, syncPeer.IsInitialized ? NodeStatsEventType.SyncFailed : NodeStatsEventType.SyncInitFailed); syncPeer.Disconnect(DisconnectReason.DisconnectRequested, "refresh peer info fault - timeout"); } else if (firstToComplete.IsCanceled) { if (_logger.IsTrace) { _logger.Trace($"InitPeerInfo canceled for node: {syncPeer.Node:c}{Environment.NewLine}{t.Exception}"); } _stats.ReportSyncEvent(syncPeer.Node, syncPeer.IsInitialized ? NodeStatsEventType.SyncCancelled : NodeStatsEventType.SyncInitCancelled); token.ThrowIfCancellationRequested(); } else { delaySource.Cancel(); BlockHeader header = getHeadHeaderTask.Result; if (header == null) { if (_logger.IsDebug) { _logger.Debug($"InitPeerInfo failed for node: {syncPeer.Node:c}{Environment.NewLine}{t.Exception}"); } _stats.ReportSyncEvent(syncPeer.Node, syncPeer.IsInitialized ? NodeStatsEventType.SyncFailed : NodeStatsEventType.SyncInitFailed); syncPeer.Disconnect(DisconnectReason.DisconnectRequested, "refresh peer info fault - null response"); return; } if (_logger.IsTrace) { _logger.Trace($"Received head block info from {syncPeer.Node:c} with head block numer {header.Number}"); } if (!syncPeer.IsInitialized) { _stats.ReportSyncEvent(syncPeer.Node, NodeStatsEventType.SyncInitCompleted); } if (_logger.IsTrace) { _logger.Trace($"REFRESH Updating header of {syncPeer} from {syncPeer.HeadNumber} to {header.Number}"); } BlockHeader parent = _blockTree.FindParentHeader(header, BlockTreeLookupOptions.None); if (parent != null) { UInt256 newTotalDifficulty = (parent.TotalDifficulty ?? UInt256.Zero) + header.Difficulty; if (newTotalDifficulty >= syncPeer.TotalDifficulty) { syncPeer.TotalDifficulty = newTotalDifficulty; syncPeer.HeadNumber = header.Number; syncPeer.HeadHash = header.Hash; } } else if (header.Number > syncPeer.HeadNumber) { syncPeer.HeadNumber = header.Number; syncPeer.HeadHash = header.Hash; } syncPeer.IsInitialized = true; SignalPeersChanged(); } } finally { linkedSource.Dispose(); delaySource.Dispose(); } }, token); }
public override void OnBlockProcessingStart(Block block, ProcessingOptions options = ProcessingOptions.None) { if (block.IsGenesis) { return; } var isProducingBlock = options.IsProducingBlock(); var isProcessingBlock = !isProducingBlock; var isInitBlock = InitBlockNumber == block.Number; var headNumber = _blockTree.Head?.Number ?? -2; // -2, so genesis.Number - 1 > -2. var skippingBlocks = block.Number - 1 > headNumber; var shouldLoadValidators = Validators == null || skippingBlocks || isProducingBlock; var mainChainProcessing = !ForSealing && isProcessingBlock; if (shouldLoadValidators) { Validators = isInitBlock || skippingBlocks ? LoadValidatorsFromContract(_blockTree.FindParentHeader(block.Header, BlockTreeLookupOptions.None)) : ValidatorStore.GetValidators(); if (mainChainProcessing) { if (_logger.IsInfo) { _logger.Info($"{(isInitBlock ? "Initial" : "Current")} contract validators ({Validators.Length}): [{string.Join<Address>(", ", Validators)}]."); } } } if (isInitBlock) { if (mainChainProcessing) { ValidatorStore.SetValidators(InitBlockNumber, Validators); } InitiateChange(block, Validators.ToArray(), isProcessingBlock, true); } else { if (isProcessingBlock) { bool reorganisationHappened = block.Number <= _lastProcessedBlockNumber; if (reorganisationHappened) { var reorganisationToBlockBeforePendingValidatorsInitChange = block.Number <= CurrentPendingValidators?.BlockNumber; SetPendingValidators(reorganisationToBlockBeforePendingValidatorsInitChange ? null : LoadPendingValidators(), reorganisationToBlockBeforePendingValidatorsInitChange); } else if (block.Number > _lastProcessedBlockNumber + 1) // blocks skipped, like fast sync { SetPendingValidators(TryGetInitChangeFromPastBlocks(block.ParentHash), true); } } else { // if we are not processing blocks we are not on consecutive blocks. // We need to initialize pending validators from db on each block being produced. SetPendingValidators(LoadPendingValidators()); } } base.OnBlockProcessingStart(block, options); FinalizePendingValidatorsIfNeeded(block.Header, isProcessingBlock); _lastProcessedBlockNumber = block.Number; }
public override Task <BodiesSyncBatch> PrepareRequest() { HandleDependentBatches(); if (_pending.TryDequeue(out BodiesSyncBatch batch)) { batch.MarkRetry(); } else if (ShouldBuildANewBatch()) { long?lowestInsertedHeader = _blockTree.LowestInsertedHeader?.Number; long?lowestInsertedBody = _blockTree.LowestInsertedBody?.Number; if (lowestInsertedHeader != 1 && (lowestInsertedHeader ?? _pivotNumber) > (lowestInsertedBody ?? _pivotNumber) - 1024 * 32) { return(Task.FromResult((BodiesSyncBatch)null)); } Keccak hash = _lowestRequestedBodyHash; BlockHeader header = _blockTree.FindHeader(hash, BlockTreeLookupOptions.TotalDifficultyNotNeeded); if (header == null) { return(Task.FromResult((BodiesSyncBatch)null)); } if (_lowestRequestedBodyHash != _pivotHash) { if (header.ParentHash == _blockTree.Genesis.Hash) { return(Task.FromResult((BodiesSyncBatch)null)); } header = _blockTree.FindParentHeader(header, BlockTreeLookupOptions.TotalDifficultyNotNeeded); if (header == null) { return(Task.FromResult((BodiesSyncBatch)null)); } } int requestSize = (int)Math.Min(header.Number, _bodiesRequestSize); batch = new BodiesSyncBatch(); batch.Request = new Keccak[requestSize]; batch.Headers = new BlockHeader[requestSize]; batch.MinNumber = header.Number; int collectedRequests = 0; while (collectedRequests < requestSize) { int i = requestSize - collectedRequests - 1; // while (header != null && !header.HasBody) // { // header = _blockTree.FindHeader(header.ParentHash); // } if (header == null) { break; } batch.Headers[i] = header; collectedRequests++; _lowestRequestedBodyHash = batch.Request[i] = header.Hash; header = _blockTree.FindHeader(header.ParentHash, BlockTreeLookupOptions.TotalDifficultyNotNeeded); } if (collectedRequests == 0) { return(Task.FromResult((BodiesSyncBatch)null)); } //only for the final one if (collectedRequests < requestSize) { BlockHeader[] currentHeaders = batch.Headers; Keccak[] currentRequests = batch.Request; batch.Request = new Keccak[collectedRequests]; batch.Headers = new BlockHeader[collectedRequests]; Array.Copy(currentHeaders, requestSize - collectedRequests, batch.Headers, 0, collectedRequests); Array.Copy(currentRequests, requestSize - collectedRequests, batch.Request, 0, collectedRequests); } } if (batch != null) { _sent.TryAdd(batch, _dummyObject); if ((_blockTree.LowestInsertedBody?.Number ?? 0) - batch.Headers[0].Number < FastBlocksPriorities.ForBodies) { batch.Prioritized = true; } LogStateOnPrepare(); } return(Task.FromResult(batch)); }
/// <summary> /// Validates all the header elements (usually in relation to parent). Difficulty calculation is validated in <see cref="ISealValidator"/> /// </summary> /// <param name="header">Block header to validate</param> /// <param name="isOmmer"><value>True</value> if the <paramref name="header"/> is an ommer, otherwise <value>False</value></param> /// <returns><value>True</value> if <paramref name="header"/> is valid, otherwise <value>False</value></returns> public bool Validate(BlockHeader header, bool isOmmer = false) { BlockHeader parent = _blockTree.FindParentHeader(header, BlockTreeLookupOptions.TotalDifficultyNotNeeded); return(Validate(header, parent, isOmmer)); }
public async Task <long> DownloadBlocks(PeerInfo bestPeer, int newBlocksToSkip, CancellationToken cancellation, bool shouldProcess = true) { if (bestPeer == null) { string message = $"Not expecting best peer to be null inside the {nameof(BlockDownloader)}"; _logger.Error(message); throw new ArgumentNullException(message); } int blocksSynced = 0; int ancestorLookupLevel = 0; long currentNumber = Math.Max(0, Math.Min(_blockTree.BestKnownNumber, bestPeer.HeadNumber - 1)); while (bestPeer.TotalDifficulty > (_blockTree.BestSuggestedHeader?.TotalDifficulty ?? 0) && currentNumber <= bestPeer.HeadNumber) { if (_logger.IsDebug) { _logger.Debug($"Continue full sync with {bestPeer} (our best {_blockTree.BestKnownNumber})"); } if (ancestorLookupLevel > MaxReorganizationLength) { if (_logger.IsWarn) { _logger.Warn($"Could not find common ancestor with {bestPeer}"); } throw new EthSynchronizationException("Peer with inconsistent chain in sync"); } long blocksLeft = bestPeer.HeadNumber - currentNumber - newBlocksToSkip; int blocksToRequest = (int)BigInteger.Min(blocksLeft + 1, _syncBatchSize.Current); if (blocksToRequest <= 1) { break; } if (_logger.IsTrace) { _logger.Trace($"Full sync request {currentNumber}+{blocksToRequest} to peer {bestPeer} with {bestPeer.HeadNumber} blocks. Got {currentNumber} and asking for {blocksToRequest} more."); } var headers = await RequestHeaders(bestPeer, cancellation, currentNumber, blocksToRequest); List <Keccak> hashes = new List <Keccak>(); Dictionary <Keccak, BlockHeader> headersByHash = new Dictionary <Keccak, BlockHeader>(); for (int i = 1; i < headers.Length; i++) { if (headers[i] == null) { break; } hashes.Add(headers[i].Hash); headersByHash[headers[i].Hash] = headers[i]; } Task <BlockBody[]> bodiesTask = bestPeer.SyncPeer.GetBlocks(hashes.ToArray(), cancellation); await bodiesTask.ContinueWith(t => { if (t.IsFaulted) { _sinceLastTimeout = 0; if (t.Exception?.InnerException is TimeoutException || (t.Exception?.InnerExceptions.Any(x => x is TimeoutException) ?? false) || (t.Exception?.InnerExceptions.Any(x => x.InnerException is TimeoutException) ?? false)) { if (_logger.IsTrace) { _logger.Error("Failed to retrieve bodies when synchronizing (Timeout)", bodiesTask.Exception); } _syncBatchSize.Shrink(); } else { if (_logger.IsError) { _logger.Error("Failed to retrieve bodies when synchronizing", bodiesTask.Exception); } } throw new EthSynchronizationException("Bodies task faulted.", bodiesTask.Exception); } }); if (bodiesTask.IsCanceled) { return(blocksSynced); } BlockBody[] bodies = bodiesTask.Result; Block[] blocks = new Block[bodies.Length]; for (int i = 0; i < bodies.Length; i++) { blocks[i] = new Block(null, bodies[i].Transactions, bodies[i].Ommers); } _sinceLastTimeout++; if (_sinceLastTimeout > 2) { _syncBatchSize.Expand(); } for (int i = 0; i < blocks.Length; i++) { blocks[i].Header = headersByHash[hashes[i]]; } if (blocks.Length > 0) { BlockHeader parent = _blockTree.FindParentHeader(blocks[0].Header); if (parent == null) { ancestorLookupLevel += _syncBatchSize.Current; currentNumber = currentNumber >= _syncBatchSize.Current ? (currentNumber - _syncBatchSize.Current) : 0L; continue; } } for (int i = 0; i < blocks.Length; i++) { if (cancellation.IsCancellationRequested) { if (_logger.IsTrace) { _logger.Trace("Peer sync cancelled"); } break; } if (_logger.IsTrace) { _logger.Trace($"Received {blocks[i]} from {bestPeer}"); } // can move this to block tree now? if (!_blockValidator.ValidateSuggestedBlock(blocks[i])) { throw new EthSynchronizationException($"{bestPeer} sent an invalid block {blocks[i].ToString(Block.Format.Short)}."); } if (HandleAddResult(blocks[i].Header, i == 0, _blockTree.SuggestBlock(blocks[i], shouldProcess))) { blocksSynced++; } currentNumber = currentNumber + 1; } if (blocksSynced > 0) { _syncStats.Update(_blockTree.BestSuggestedHeader?.Number ?? 0, bestPeer.HeadNumber, 1); } } return(blocksSynced); }
public Block Process(Block suggestedBlock, ProcessingOptions options, IBlockTracer blockTracer) { if (!RunSimpleChecksAheadOfProcessing(suggestedBlock, options)) { return(null); } UInt256 totalDifficulty = suggestedBlock.TotalDifficulty ?? 0; if (_logger.IsTrace) { _logger.Trace($"Total difficulty of block {suggestedBlock.ToString(Block.Format.Short)} is {totalDifficulty}"); } BlockHeader branchingPoint = null; Block[] processedBlocks = null; if (_blockTree.Head == null || totalDifficulty > _blockTree.Head.TotalDifficulty || (options & ProcessingOptions.ForceProcessing) != 0) { List <Block> blocksToBeAddedToMain = new List <Block>(); Block toBeProcessed = suggestedBlock; do { blocksToBeAddedToMain.Add(toBeProcessed); if (_logger.IsTrace) { _logger.Trace($"To be processed (of {suggestedBlock.ToString(Block.Format.Short)}) is {toBeProcessed?.ToString(Block.Format.Short)}"); } if (toBeProcessed.IsGenesis) { break; } branchingPoint = _blockTree.FindParentHeader(toBeProcessed.Header, BlockTreeLookupOptions.TotalDifficultyNotNeeded); if (branchingPoint == null) { break; //failure here } bool isFastSyncTransition = _blockTree.Head == _blockTree.Genesis && toBeProcessed.Number > 1; if (!isFastSyncTransition) { if (_logger.IsTrace) { _logger.Trace($"Finding parent of {toBeProcessed.ToString(Block.Format.Short)}"); } toBeProcessed = _blockTree.FindParent(toBeProcessed.Header, BlockTreeLookupOptions.None); if (_logger.IsTrace) { _logger.Trace($"Found parent {toBeProcessed?.ToString(Block.Format.Short)}"); } if (toBeProcessed == null) { if (_logger.IsTrace) { _logger.Trace($"Treating this as fast sync transition for {suggestedBlock.ToString(Block.Format.Short)}"); } break; } } else { break; } } while (!_blockTree.IsMainChain(branchingPoint.Hash)); if (branchingPoint != null && branchingPoint.Hash != _blockTree.Head?.Hash) { if (_logger.IsTrace) { _logger.Trace($"Head block was: {_blockTree.Head?.ToString(BlockHeader.Format.Short)}"); } if (_logger.IsTrace) { _logger.Trace($"Branching from: {branchingPoint.ToString(BlockHeader.Format.Short)}"); } } else { if (_logger.IsTrace) { _logger.Trace(branchingPoint == null ? "Setting as genesis block" : $"Adding on top of {branchingPoint.ToString(BlockHeader.Format.Short)}"); } } Keccak stateRoot = branchingPoint?.StateRoot; if (_logger.IsTrace) { _logger.Trace($"State root lookup: {stateRoot}"); } List <Block> blocksToProcess = new List <Block>(); Block[] blocks; if ((options & ProcessingOptions.ForceProcessing) != 0) { blocksToBeAddedToMain.Clear(); blocks = new Block[1]; blocks[0] = suggestedBlock; } else { foreach (Block block in blocksToBeAddedToMain) { if (block.Hash != null && _blockTree.WasProcessed(block.Number, block.Hash)) { if (_logger.IsInfo) { _logger.Info($"Rerunning block after reorg: {block.ToString(Block.Format.FullHashAndNumber)}"); } } blocksToProcess.Add(block); } blocks = new Block[blocksToProcess.Count]; for (int i = 0; i < blocksToProcess.Count; i++) { blocks[blocks.Length - i - 1] = blocksToProcess[i]; } } if (_logger.IsTrace) { _logger.Trace($"Processing {blocks.Length} blocks from state root {stateRoot}"); } for (int i = 0; i < blocks.Length; i++) { /* this can happen if the block was loaded as an ancestor and did not go through the recovery queue */ _recoveryStep.RecoverData(blocks[i]); } try { processedBlocks = _blockProcessor.Process(stateRoot, blocks, options, blockTracer); } catch (InvalidBlockException ex) { for (int i = 0; i < blocks.Length; i++) { if (blocks[i].Hash == ex.InvalidBlockHash) { _blockTree.DeleteInvalidBlock(blocks[i]); if (_logger.IsDebug) { _logger.Debug($"Skipped processing of {suggestedBlock.ToString(Block.Format.FullHashAndNumber)} because of {blocks[i].ToString(Block.Format.FullHashAndNumber)} is invalid"); } return(null); } } } if ((options & ProcessingOptions.ReadOnlyChain) == 0) { _blockTree.UpdateMainChain(blocksToBeAddedToMain.ToArray()); } } else { if (_logger.IsDebug) { _logger.Debug($"Skipped processing of {suggestedBlock.ToString(Block.Format.FullHashAndNumber)}, Head = {_blockTree.Head?.ToString(BlockHeader.Format.Short)}, total diff = {totalDifficulty}, head total diff = {_blockTree.Head?.TotalDifficulty}"); } } Block lastProcessed = null; if (processedBlocks != null && processedBlocks.Length > 0) { lastProcessed = processedBlocks[^ 1];
public override bool Validate(BlockHeader header, bool isUncle = false) => Validate(header, _blockTree.FindParentHeader(header, BlockTreeLookupOptions.None), isUncle);
public (bool IsTerminal, bool IsPostMerge) GetBlockConsensusInfo(BlockHeader header, bool dontTrustTotalDifficulty = false) { if (_logger.IsTrace) { _logger.Trace( $"GetBlockConsensusInfo {header.ToString(BlockHeader.Format.FullHashAndNumber)} header.IsPostMerge: {header.IsPostMerge} header.TotalDifficulty {header.TotalDifficulty} header.Difficulty {header.Difficulty} TTD: {_specProvider.TerminalTotalDifficulty} MergeBlockNumber {_specProvider.MergeBlockNumber}, TransitionFinished: {TransitionFinished}"); } if ((header.TotalDifficulty ?? 0) != 0 && dontTrustTotalDifficulty && header.IsGenesis == false) { BlockHeader?parentHeader = _blockTree.FindParentHeader(header, BlockTreeLookupOptions.None); if (parentHeader != null && parentHeader.TotalDifficulty != 0) { header.TotalDifficulty = parentHeader.TotalDifficulty + header.Difficulty; } else { header.TotalDifficulty = null; } } bool isTerminal = false, isPostMerge; if (header.IsPostMerge) // block from Engine API, there is no need to check more cases { isTerminal = false; isPostMerge = true; } else if (_specProvider.TerminalTotalDifficulty == null) // TTD = null, so everything is preMerge { isTerminal = false; isPostMerge = false; } else if (header.TotalDifficulty == null || (header.TotalDifficulty == 0 && header.IsGenesis == false)) // we don't know header TD, so we consider header.Difficulty { isPostMerge = header.Difficulty == 0; isTerminal = false; // we can't say if block isTerminal if we don't have TD } else if (header.TotalDifficulty < _specProvider.TerminalTotalDifficulty) // pre TTD blocks { isTerminal = false; isPostMerge = false; } else { bool theMergeEnabled = header.Number >= _specProvider.MergeBlockNumber; if (TransitionFinished && theMergeEnabled || _terminalBlockExplicitSpecified && theMergeEnabled) // if transition finished or we know terminalBlock from config we can decide by blockNumber { isPostMerge = true; } else { isTerminal = header.IsTerminalBlock(_specProvider); // we're checking if block is terminal if not it should be PostMerge block isPostMerge = !isTerminal; } } header.IsPostMerge = isPostMerge; if (_logger.IsTrace) { _logger.Trace( $"GetBlockConsensusInfo Result: IsTerminal: {isTerminal}, IsPostMerge: {isPostMerge}, {header.ToString(BlockHeader.Format.FullHashAndNumber)} header.IsPostMerge: {header.IsPostMerge} header.TotalDifficulty {header.TotalDifficulty} header.Difficulty {header.Difficulty} TTD: {_specProvider.TerminalTotalDifficulty} MergeBlockNumber {_specProvider.MergeBlockNumber}, TransitionFinished: {TransitionFinished}"); } return(isTerminal, isPostMerge); }