private void ProcessBlock(Block block) { try { _blockTree.SuggestBlock(block); } catch (InvalidBlockException e) { _logger.Error($"Invalid block: {block.Hash}, ignoring", e); } }
public OmmersValidatorTests() { _blockTree = Build.A.BlockTree().OfChainLength(1).TestObject; _grandgrandparent = _blockTree.FindBlock(0); _grandparent = Build.A.Block.WithParent(_grandgrandparent).TestObject; _duplicateOmmer = Build.A.Block.WithParent(_grandgrandparent).TestObject; _parent = Build.A.Block.WithParent(_grandparent).WithOmmers(_duplicateOmmer).TestObject; _block = Build.A.Block.WithParent(_parent).TestObject; _blockTree.SuggestBlock(_grandparent); _blockTree.SuggestBlock(_parent); _blockTree.SuggestBlock(_block); }
private void ProduceNewBlock() { Block block = PrepareBlock(); if (block == null) { if (_logger.IsError) { _logger.Error("Failed to prepare block for mining."); } return; } Block processedBlock = _processor.Process(block, ProcessingOptions.NoValidation | ProcessingOptions.ReadOnlyChain | ProcessingOptions.WithRollback, NullBlockTracer.Instance); if (_logger.IsInfo) { _logger.Info($"Mined a DEV block {processedBlock.ToString(Block.Format.FullHashAndNumber)} State Root: {processedBlock.StateRoot}"); } if (processedBlock == null) { if (_logger.IsError) { _logger.Error("Block prepared by block producer was rejected by processor"); } return; } if (_logger.IsInfo) { _logger.Info($"Mined a DEV block {processedBlock.ToString(Block.Format.FullHashAndNumber)}"); } _blockTree.SuggestBlock(processedBlock); }
private void OnNewPendingTx(object sender, TransactionEventArgs e) { Block block = PrepareBlock(); if (block == null) { if (_logger.IsError) { _logger.Error("Failed to prepare block for mining."); } return; } Block processedBlock = _processor.Process(block, ProcessingOptions.NoValidation | ProcessingOptions.ReadOnlyChain | ProcessingOptions.WithRollback, NullBlockTracer.Instance); if (processedBlock == null) { if (_logger.IsError) { _logger.Error("Block prepared by block producer was rejected by processor"); } return; } if (_logger.IsInfo) { _logger.Info($"Suggesting newly mined block {processedBlock.ToString(Block.Format.HashAndNumber)}"); } _blockTree.SuggestBlock(processedBlock); }
private void TimerOnElapsed(object sender, ElapsedEventArgs e) { try { if (_blockTree.Head == null) { _timer.Enabled = true; return; } if (_scheduledBlock == null) { if (_blockTree.Head.Timestamp + _config.BlockPeriod < _timestamp.EpochSeconds) { _signalsQueue.Add(_blockTree.FindBlock(_blockTree.Head.Hash, false)); } _timer.Enabled = true; return; } ulong extraDelayMilliseconds = 0; if (_scheduledBlock.Difficulty == Clique.DifficultyNoTurn) { int wiggle = _snapshotManager.GetOrCreateSnapshot(_scheduledBlock.Header.Number - 1, _scheduledBlock.Header.ParentHash).Signers.Count / 2 + 1 * Clique.WiggleTime; extraDelayMilliseconds += (ulong)_cryptoRandom.NextInt(wiggle); } if (_scheduledBlock.Timestamp * 1000 + extraDelayMilliseconds < _timestamp.EpochMilliseconds) { if (_scheduledBlock.Number > _blockTree.Head.Number) { if (_logger.IsInfo) { _logger.Info($"Suggesting own block {_scheduledBlock.ToString(Block.Format.HashNumberDiffAndTx)}"); } _blockTree.SuggestBlock(_scheduledBlock); } else { if (_logger.IsInfo) { _logger.Info($"Dropping a losing block {_scheduledBlock.ToString(Block.Format.HashNumberDiffAndTx)}"); } } _scheduledBlock = null; } _timer.Enabled = true; } catch (Exception exception) { if (_logger.IsError) { _logger.Error("Clique block producer failure", exception); } } }
private void OnBlockProduced(object?sender, BlockEventArgs e) { // PostMerge blocks are suggested in Engine API if (!e.Block.IsPostMerge) { _blockTree.SuggestBlock(e.Block); } }
private static void LoadGenesisBlock( ChainSpec chainSpec, Keccak expectedGenesisHash, IBlockTree blockTree, IStateProvider stateProvider, ISpecProvider specProvider) { // if we already have a database with blocks then we do not need to load genesis from spec if (blockTree.Genesis != null) { return; } foreach ((Address address, (UInt256 balance, byte[] code)) in chainSpec.Allocations) { stateProvider.CreateAccount(address, balance); if (code != null) { Keccak codeHash = stateProvider.UpdateCode(code); stateProvider.UpdateCodeHash(address, codeHash, specProvider.GenesisSpec); } } stateProvider.Commit(specProvider.GenesisSpec); Block genesis = chainSpec.Genesis; genesis.StateRoot = stateProvider.StateRoot; genesis.Hash = BlockHeader.CalculateHash(genesis.Header); ManualResetEventSlim genesisProcessedEvent = new ManualResetEventSlim(false); bool genesisLoaded = false; void GenesisProcessed(object sender, BlockEventArgs args) { genesisLoaded = true; blockTree.NewHeadBlock -= GenesisProcessed; genesisProcessedEvent.Set(); } blockTree.NewHeadBlock += GenesisProcessed; blockTree.SuggestBlock(genesis); genesisProcessedEvent.Wait(TimeSpan.FromSeconds(5)); if (!genesisLoaded) { throw new BlockchainException("Genesis block processing failure"); } // if expectedGenesisHash is null here then it means that we do not care about the exact value in advance (e.g. in test scenarios) if (expectedGenesisHash != null && blockTree.Genesis.Hash != expectedGenesisHash) { throw new Exception($"Unexpected genesis hash, expected {expectedGenesisHash}, but was {blockTree.Genesis.Hash}"); } }
private static void SuggestNumberOfBlocks(IBlockTree blockTree, int blockAmount) { Block newParent = blockTree.Head; for (int i = 0; i < blockAmount; ++i) { Block newBlock = Build.A.Block.WithNumber(newParent !.Number + 1) .WithDifficulty(newParent.Difficulty + 1).WithParent(newParent).TestObject; blockTree.SuggestBlock(newBlock); newParent = newBlock; } }
public void Setup() { Block genesis = Build.A.Block.Genesis.TestObject; _blockTree = new BlockTree(new MemDb(), new MemDb(), new MemDb(), MainNetSpecProvider.Instance, NullTxPool.Instance, LimboLogs.Instance); _blockTree.SuggestBlock(genesis); _testHeaderMapping = new Dictionary <long, Keccak>(); _testHeaderMapping.Add(0, genesis.Hash); _responseBuilder = new ResponseBuilder(_blockTree, _testHeaderMapping); }
public void Setup() { EthashDifficultyCalculator calculator = new(new SingleReleaseSpecProvider(Frontier.Instance, ChainId.Mainnet)); _ethash = new EthashSealValidator(LimboLogs.Instance, calculator, new CryptoRandom(), new Ethash(LimboLogs.Instance), Timestamper.Default); _testLogger = new TestLogger(); MemDb blockInfoDb = new(); _blockTree = new BlockTree(new MemDb(), new MemDb(), blockInfoDb, new ChainLevelInfoRepository(blockInfoDb), FrontierSpecProvider.Instance, Substitute.For <IBloomStorage>(), LimboLogs.Instance); _specProvider = new SingleReleaseSpecProvider(Byzantium.Instance, 3); _validator = new HeaderValidator(_blockTree, _ethash, _specProvider, new OneLoggerLogManager(_testLogger)); _parentBlock = Build.A.Block.WithDifficulty(1).TestObject; _block = Build.A.Block.WithParent(_parentBlock) .WithDifficulty(131072) .WithMixHash(new Keccak("0xd7db5fdd332d3a65d6ac9c4c530929369905734d3ef7a91e373e81d0f010b8e8")) .WithNonce(0).TestObject; _blockTree.SuggestBlock(_parentBlock); _blockTree.SuggestBlock(_block); }
public Context( IBlockTree?blockTree = null, ISyncConfig?syncConfig = null, IBeaconPivot?beaconPivot = null, IDb?metadataDb = null, IMergeConfig?mergeConfig = null) { if (blockTree == null) { IDb blockInfoDb = new MemDb(); Block genesis = Build.A.Block.Genesis.TestObject; BlockTree = new BlockTree(new MemDb(), new MemDb(), blockInfoDb, new ChainLevelInfoRepository(blockInfoDb), MainnetSpecProvider.Instance, NullBloomStorage.Instance, LimboLogs.Instance); BlockTree.SuggestBlock(genesis); } else { BlockTree = blockTree; } ISyncPeerPool peerPool = Substitute.For <ISyncPeerPool>(); ISyncReport report = Substitute.For <ISyncReport>(); MeasuredProgress measuredProgress = new MeasuredProgress(); report.BeaconHeaders.Returns(measuredProgress); report.HeadersInQueue.Returns(measuredProgress); MemDb stateDb = new(); _syncConfig = syncConfig ?? new SyncConfig(); _mergeConfig = mergeConfig ?? new MergeConfig(); _metadataDb = metadataDb ?? new MemDb(); PoSSwitcher poSSwitcher = new(_mergeConfig, _syncConfig, _metadataDb, blockTree !, MainnetSpecProvider.Instance, LimboLogs.Instance); ProgressTracker progressTracker = new(BlockTree, stateDb, LimboLogs.Instance); SyncProgressResolver syncProgressResolver = new( BlockTree, NullReceiptStorage.Instance, stateDb, new TrieStore(stateDb, LimboLogs.Instance), progressTracker, _syncConfig, LimboLogs.Instance); TotalDifficultyBetterPeerStrategy bestPeerStrategy = new (LimboLogs.Instance); BeaconPivot = beaconPivot ?? new BeaconPivot(_syncConfig, _metadataDb, BlockTree, LimboLogs.Instance); BeaconSync = new(BeaconPivot, BlockTree, _syncConfig, new BlockCacheService(), LimboLogs.Instance); ISyncModeSelector selector = new MultiSyncModeSelector(syncProgressResolver, peerPool, _syncConfig, BeaconSync, bestPeerStrategy, LimboLogs.Instance); Feed = new BeaconHeadersSyncFeed(poSSwitcher, selector, blockTree, peerPool, _syncConfig, report, BeaconPivot, _mergeConfig, new NoopInvalidChainTracker(), LimboLogs.Instance); }
public static void ExtendTree(IBlockTree blockTree, long newChainLength) { Block previous = blockTree.RetrieveHeadBlock(); long initialLength = previous.Number + 1; for (long i = initialLength; i < newChainLength; i++) { previous = Build.A.Block.WithNumber(i).WithParent(previous).TestObject; blockTree.SuggestBlock(previous); blockTree.UpdateMainChain(new[] { previous }, true); } }
public async Task <BlockBody[]> BuildBlocksResponse(Keccak[] blockHashes, Response flags) { bool consistent = flags.HasFlag(Response.Consistent); bool validSeals = flags.HasFlag(Response.ValidSeals); bool noEmptySpaces = flags.HasFlag(Response.NoEmptySpace); bool justFirst = flags.HasFlag(Response.JustFirstHeader); bool allKnown = flags.HasFlag(Response.AllKnown); bool timeoutOnFullBatch = flags.HasFlag(Response.TimeoutOnFullBatch); if (timeoutOnFullBatch && blockHashes.Length == SyncBatchSize.Max) { throw new TimeoutException(); } BlockHeader startHeader = _blockTree.FindHeader(blockHashes[0], false); if (startHeader == null) { startHeader = Build.A.BlockHeader.WithHash(blockHashes[0]).TestObject; } BlockHeader[] blockHeaders = new BlockHeader[blockHashes.Length]; BlockBody[] blockBodies = new BlockBody[blockHashes.Length]; blockBodies[0] = new BlockBody(new Transaction[0], new BlockHeader[0]); blockHeaders[0] = startHeader; if (!justFirst) { for (int i = 1; i < blockHashes.Length; i++) { blockHeaders[i] = consistent ? Build.A.BlockHeader.WithParent(blockHeaders[i - 1]).TestObject : Build.A.BlockHeader.WithNumber(blockHeaders[i - 1].Number + 1).TestObject; _testHeaderMapping[startHeader.Number + i] = blockHeaders[i].Hash; Block block = consistent ? Build.A.Block.WithHeader(blockHeaders[i]).TestObject : Build.A.Block.WithHeader(blockHeaders[i - 1]).TestObject; blockBodies[i] = new BlockBody(block.Transactions, block.Ommers); if (allKnown) { _blockTree.SuggestBlock(block); } } } BlockBodiesMessage message = new BlockBodiesMessage(blockBodies); byte[] messageSerialized = _bodiesSerializer.Serialize(message); return(await Task.FromResult(_bodiesSerializer.Deserialize(messageSerialized).Bodies)); }
public static void ExtendTree(IBlockTree blockTree, int newChainLength) { Block previous = blockTree.RetrieveHeadBlock(); int initialLength = (int)previous.Number + 1; for (int i = initialLength; i < newChainLength; i++) { previous = Build.A.Block.WithNumber(i).WithParent(previous).TestObject; blockTree.SuggestBlock(previous); blockTree.MarkAsProcessed(previous.Hash); blockTree.MoveToMain(previous.Hash); } }
private void ProcessBlock(Block block) { try { _blockTree.SuggestBlock(block); if (_logger.IsInfo) { _logger.Info($"HIVE suggested {block.ToString(Block.Format.Short)}, now best suggested header {_blockTree.BestSuggestedHeader}, head {_blockTree.Head.ToString(BlockHeader.Format.Short)}"); } } catch (InvalidBlockException e) { _logger.Error($"HIVE Invalid block: {block.Hash}, ignoring", e); } }
private void OnBlockProcessorQueueEmpty(object sender, EventArgs e) { CancellationToken token; lock (_syncToken) { _cancellationTokenSource = new CancellationTokenSource(); token = _cancellationTokenSource.Token; } if (!_sealEngine.CanSeal) { return; } Block block = PrepareBlock(); if (block == null) { if (_logger.IsError) { _logger.Error("Failed to prepare block for mining."); } return; } Block processedBlock = _processor.Process(block, ProcessingOptions.NoValidation | ProcessingOptions.ReadOnlyChain | ProcessingOptions.WithRollback, NullBlockTracer.Instance); _sealEngine.SealBlock(processedBlock, token).ContinueWith(t => { if (t.IsCompletedSuccessfully) { _blockTree.SuggestBlock(t.Result); } else if (t.IsFaulted) { _logger.Error("Mining failer", t.Exception); } else if (t.IsCanceled) { if (_logger.IsDebug) { _logger.Debug($"Mining block {processedBlock.ToString(Block.Format.HashAndNumber)} cancelled"); } } }, token); }
public async Task <Block[]> BuildBlocksResponse(Keccak[] blockHashes, Response flags) { bool consistent = flags.HasFlag(Response.Consistent); bool validSeals = flags.HasFlag(Response.ValidSeals); bool noEmptySpaces = flags.HasFlag(Response.NoEmptySpace); bool justFirst = flags.HasFlag(Response.JustFirstHeader); bool allKnown = flags.HasFlag(Response.AllKnown); bool timeoutOnFullBatch = flags.HasFlag(Response.TimeoutOnFullBatch); if (timeoutOnFullBatch && blockHashes.Length == SyncBatchSize.Max) { throw new TimeoutException(); } BlockHeader startBlock = _blockTree.FindHeader(blockHashes[0], false); if (startBlock == null) { startBlock = Build.A.BlockHeader.WithHash(blockHashes[0]).TestObject; } BlockHeader[] blockHeaders = new BlockHeader[blockHashes.Length]; Block[] blocks = new Block[blockHashes.Length]; blocks[0] = new Block(startBlock); blockHeaders[0] = startBlock; if (!justFirst) { for (int i = 1; i < blockHashes.Length; i++) { blockHeaders[i] = consistent ? Build.A.BlockHeader.WithParent(blockHeaders[i - 1]).TestObject : Build.A.BlockHeader.WithNumber(blockHeaders[i - 1].Number + 1).TestObject; _testHeaderMapping[startBlock.Number + i] = blockHeaders[i].Hash; blocks[i] = consistent ? Build.A.Block.WithHeader(blockHeaders[i]).TestObject : Build.A.Block.WithHeader(blockHeaders[i - 1]).TestObject; if (allKnown) { _blockTree.SuggestBlock(blocks[i]); } } } return(await Task.FromResult(blocks)); }
private void SyncBlock(Block block, ISyncPeer syncPeer) { if (_logger.IsTrace) { _logger.Trace($"{block}"); } // we do not trust total difficulty from peers // Parity sends invalid data here and it is equally expensive to validate and to set from null block.Header.TotalDifficulty = null; bool isKnownParent = _blockTree.IsKnownBlock(block.Number - 1, block.ParentHash); if (isKnownParent) { if (!_blockValidator.ValidateSuggestedBlock(block)) { string message = $"Peer {syncPeer?.Node:c} sent an invalid block"; if (_logger.IsDebug) { _logger.Debug(message); } lock (_recentlySuggested) { _recentlySuggested.Delete(block.Hash); } throw new EthSyncException(message); } AddBlockResult result = _blockTree.SuggestBlock(block, true); if (_logger.IsTrace) { _logger.Trace($"{block.Hash} ({block.Number}) adding result is {result}"); } } // TODO: now it should be done by sync peer pool? // // do not change to if..else // // there are some rare cases when it did not work... // // do not remember why // if (result == AddBlockResult.UnknownParent) // { // _synchronizer.RequestSynchronization(SyncTriggerType.NewBlock); // } }
private void SyncBlock(Block block, ISyncPeer syncPeer) { if (_logger.IsTrace) { _logger.Trace($"{block}"); } // we do not trust total difficulty from peers // Parity sends invalid data here and it is equally expensive to validate and to set from null block.Header.TotalDifficulty = null; bool isKnownParent = _blockTree.IsKnownBlock(block.Number - 1, block.ParentHash); if (isKnownParent) { if (!_blockValidator.ValidateSuggestedBlock(block)) { string message = $"Peer {syncPeer?.Node:c} sent an invalid block"; if (_logger.IsDebug) { _logger.Debug(message); } lock (_recentlySuggested) { _recentlySuggested.Delete(block.Hash); } throw new EthSyncException(message); } AddBlockResult result = _blockTree.SuggestBlock(block); if (_logger.IsTrace) { _logger.Trace($"{block.Hash} ({block.Number}) adding result is {result}"); } } }
private void Process(Block suggestedBlock, bool forMining) { if (suggestedBlock.Number != 0 && _blockTree.FindParent(suggestedBlock) == null) { throw new InvalidOperationException("Got an orphaned block for porcessing."); } if (suggestedBlock.Header.TotalDifficulty == null) { throw new InvalidOperationException("block without total difficulty calculated was suggested for processing"); } if (!forMining && suggestedBlock.Hash == null) { throw new InvalidOperationException("block hash should be known at this stage if the block is not mining"); } foreach (BlockHeader ommerHeader in suggestedBlock.Ommers) { if (ommerHeader.Hash == null) { throw new InvalidOperationException("ommer's hash is null when processing block"); } } BigInteger totalDifficulty = suggestedBlock.TotalDifficulty ?? 0; BigInteger totalTransactions = suggestedBlock.TotalTransactions ?? 0; if (_logger.IsDebugEnabled) { _logger.Debug($"Total difficulty of block {suggestedBlock.ToString(Block.Format.Short)} is {totalDifficulty}"); _logger.Debug($"Total transactions of block {suggestedBlock.ToString(Block.Format.Short)} is {totalTransactions}"); } if (totalDifficulty > (_blockTree.Head?.TotalDifficulty ?? 0)) { List <Block> blocksToBeAddedToMain = new List <Block>(); Block toBeProcessed = suggestedBlock; do { blocksToBeAddedToMain.Add(toBeProcessed); toBeProcessed = toBeProcessed.Number == 0 ? null : _blockTree.FindParent(toBeProcessed); // TODO: need to remove the hardcoded head block store at keccak zero as it would be referenced by the genesis... if (toBeProcessed == null) { break; } } while (!_blockTree.IsMainChain(toBeProcessed.Hash)); BlockHeader branchingPoint = toBeProcessed?.Header; if (branchingPoint != null && branchingPoint.Hash != _blockTree.Head?.Hash) { if (_logger.IsDebugEnabled) { _logger.Debug($"Head block was: {_blockTree.Head?.ToString(BlockHeader.Format.Short)}"); _logger.Debug($"Branching from: {branchingPoint.ToString(BlockHeader.Format.Short)}"); } } else { if (_logger.IsDebugEnabled) { _logger.Debug(branchingPoint == null ? "Setting as genesis block" : $"Adding on top of {branchingPoint.ToString(BlockHeader.Format.Short)}"); } } Keccak stateRoot = branchingPoint?.StateRoot; if (_logger.IsTraceEnabled) { _logger.Trace($"State root lookup: {stateRoot}"); } List <Block> unprocessedBlocksToBeAddedToMain = new List <Block>(); foreach (Block block in blocksToBeAddedToMain) { if (!forMining && _blockTree.WasProcessed(block.Hash)) { stateRoot = block.Header.StateRoot; if (_logger.IsTraceEnabled) { _logger.Trace($"State root lookup: {stateRoot}"); } break; } unprocessedBlocksToBeAddedToMain.Add(block); } Block[] blocks = new Block[unprocessedBlocksToBeAddedToMain.Count]; for (int i = 0; i < unprocessedBlocksToBeAddedToMain.Count; i++) { blocks[blocks.Length - i - 1] = unprocessedBlocksToBeAddedToMain[i]; } if (_logger.IsDebugEnabled) { _logger.Debug($"Processing {blocks.Length} blocks from state root {stateRoot}"); } //TODO: process blocks one by one here, refactor this, test for (int i = 0; i < blocks.Length; i++) { if (blocks[i].Transactions.Length > 0 && blocks[i].Transactions[0].SenderAddress == null) { _signer.RecoverAddresses(blocks[i]); } } Block[] processedBlocks = _blockProcessor.Process(stateRoot, blocks, forMining); // TODO: lots of unnecessary loading and decoding here, review after adding support for loading headers only List <BlockHeader> blocksToBeRemovedFromMain = new List <BlockHeader>(); if (_blockTree.Head?.Hash != branchingPoint?.Hash && _blockTree.Head != null) { blocksToBeRemovedFromMain.Add(_blockTree.Head); BlockHeader teBeRemovedFromMain = _blockTree.FindHeader(_blockTree.Head.ParentHash); while (teBeRemovedFromMain != null && teBeRemovedFromMain.Hash != branchingPoint?.Hash) { blocksToBeRemovedFromMain.Add(teBeRemovedFromMain); teBeRemovedFromMain = _blockTree.FindHeader(teBeRemovedFromMain.ParentHash); } } if (!forMining) { foreach (Block processedBlock in processedBlocks) { if (_logger.IsDebugEnabled) { _logger.Debug($"Marking {processedBlock.ToString(Block.Format.Short)} as processed"); } // TODO: review storage and retrieval of receipts since we removed them from the block class _blockTree.MarkAsProcessed(processedBlock.Hash); } Block newHeadBlock = processedBlocks[processedBlocks.Length - 1]; newHeadBlock.Header.TotalDifficulty = suggestedBlock.TotalDifficulty; // TODO: cleanup total difficulty if (_logger.IsDebugEnabled) { _logger.Debug($"Setting head block to {newHeadBlock.ToString(Block.Format.Short)}"); } foreach (BlockHeader blockHeader in blocksToBeRemovedFromMain) { if (_logger.IsDebugEnabled) { _logger.Debug($"Moving {blockHeader.ToString(BlockHeader.Format.Short)} to branch"); } _blockTree.MoveToBranch(blockHeader.Hash); // TODO: only for miners //foreach (Transaction transaction in block.Transactions) //{ // _transactionStore.AddPending(transaction); //} if (_logger.IsDebugEnabled) { _logger.Debug($"Block {blockHeader.ToString(BlockHeader.Format.Short)} moved to branch"); } } foreach (Block block in blocksToBeAddedToMain) { if (_logger.IsDebugEnabled) { _logger.Debug($"Moving {block.ToString(Block.Format.Short)} to main"); } _blockTree.MoveToMain(block); // TODO: only for miners foreach (Transaction transaction in block.Transactions) { _transactionStore.RemovePending(transaction); } if (_logger.IsDebugEnabled) { _logger.Debug($"Block {block.ToString(Block.Format.Short)} added to main chain"); } } if (_logger.IsDebugEnabled) { _logger.Debug($"Updating total difficulty of the main chain to {totalDifficulty}"); } if (_logger.IsDebugEnabled) { _logger.Debug($"Updating total transactions of the main chain to {totalTransactions}"); } } else { Block blockToBeMined = processedBlocks[processedBlocks.Length - 1]; _miningCancellation = new CancellationTokenSource(); CancellationTokenSource anyCancellation = CancellationTokenSource.CreateLinkedTokenSource(_miningCancellation.Token, _loopCancellationSource.Token); _sealEngine.MineAsync(blockToBeMined, anyCancellation.Token).ContinueWith(t => { anyCancellation.Dispose(); if (_logger.IsInfoEnabled) { _logger.Info($"Mined a block {t.Result.ToString(Block.Format.Short)} with parent {t.Result.Header.ParentHash}"); } Block minedBlock = t.Result; if (minedBlock.Hash == null) { throw new InvalidOperationException("Mined a block with null hash"); } _blockTree.SuggestBlock(minedBlock); }, _miningCancellation.Token); } } }
private void ProduceNewBlock(BlockHeader parentHeader) { _stateProvider.StateRoot = parentHeader.StateRoot; Block block = PrepareBlock(parentHeader); if (block == null) { if (_logger.IsError) { _logger.Error("Failed to prepare block for mining."); } return; } if (block.Transactions.Length == 0) { if (_config.ForceSealing) { if (_logger.IsDebug) { _logger.Debug($"Force sealing block {block.Number} without transactions."); } } else { if (_logger.IsDebug) { _logger.Debug($"Skip seal block {block.Number}, no transactions pending."); } return; } } Block processedBlock = _processor.Process(block, ProcessingOptions.NoValidation | ProcessingOptions.ReadOnlyChain | ProcessingOptions.WithRollback, NullBlockTracer.Instance); if (_logger.IsInfo) { _logger.Info($"Mined a DEV block {processedBlock.ToString(Block.Format.FullHashAndNumber)} State Root: {processedBlock.StateRoot}"); } if (processedBlock == null) { if (_logger.IsError) { _logger.Error("Block prepared by block producer was rejected by processor"); } return; } _sealer.SealBlock(processedBlock, _cancellationTokenSource.Token).ContinueWith(t => { if (t.IsCompletedSuccessfully) { if (t.Result != null) { if (_logger.IsInfo) { _logger.Info($"Sealed block {t.Result.ToString(Block.Format.HashNumberDiffAndTx)}"); } _blockTree.SuggestBlock(t.Result); } else { if (_logger.IsInfo) { _logger.Info($"Failed to seal block {processedBlock.ToString(Block.Format.HashNumberDiffAndTx)} (null seal)"); } } } else if (t.IsFaulted) { if (_logger.IsError) { _logger.Error("Mining failed", t.Exception); } } else if (t.IsCanceled) { if (_logger.IsInfo) { _logger.Info($"Sealing block {processedBlock.Number} cancelled"); } } }, _cancellationTokenSource.Token); }
public async Task <long> DownloadBlocks(PeerInfo bestPeer, int numberOfLatestBlocksToBeIgnored, CancellationToken cancellation, BlockDownloaderOptions options = BlockDownloaderOptions.Process) { IReceiptsRecovery receiptsRecovery = new ReceiptsRecovery(); if (bestPeer == null) { string message = $"Not expecting best peer to be null inside the {nameof(BlockDownloader)}"; if (_logger.IsError) { _logger.Error(message); } throw new ArgumentNullException(message); } bool downloadReceipts = (options & BlockDownloaderOptions.DownloadReceipts) == BlockDownloaderOptions.DownloadReceipts; bool shouldProcess = (options & BlockDownloaderOptions.Process) == BlockDownloaderOptions.Process; bool shouldMoveToMain = (options & BlockDownloaderOptions.MoveToMain) == BlockDownloaderOptions.MoveToMain; int blocksSynced = 0; int ancestorLookupLevel = 0; long currentNumber = Math.Max(0, Math.Min(_blockTree.BestKnownNumber, bestPeer.HeadNumber - 1)); // pivot number - 6 for uncle validation // long currentNumber = Math.Max(Math.Max(0, pivotNumber - 6), Math.Min(_blockTree.BestKnownNumber, bestPeer.HeadNumber - 1)); while (bestPeer.TotalDifficulty > (_blockTree.BestSuggestedHeader?.TotalDifficulty ?? 0) && currentNumber <= bestPeer.HeadNumber) { if (_logger.IsDebug) { _logger.Debug($"Continue full sync with {bestPeer} (our best {_blockTree.BestKnownNumber})"); } long blocksLeft = bestPeer.HeadNumber - currentNumber - numberOfLatestBlocksToBeIgnored; int headersToRequest = (int)Math.Min(blocksLeft + 1, _syncBatchSize.Current); if (headersToRequest <= 1) { break; } headersToRequest = Math.Min(headersToRequest, bestPeer.MaxHeadersPerRequest()); if (_logger.IsTrace) { _logger.Trace($"Full sync request {currentNumber}+{headersToRequest} to peer {bestPeer} with {bestPeer.HeadNumber} blocks. Got {currentNumber} and asking for {headersToRequest} more."); } if (cancellation.IsCancellationRequested) { return(blocksSynced); // check before every heavy operation } BlockHeader[] headers = await RequestHeaders(bestPeer, cancellation, currentNumber, headersToRequest); BlockDownloadContext context = new BlockDownloadContext(_specProvider, bestPeer, headers, downloadReceipts, receiptsRecovery); if (cancellation.IsCancellationRequested) { return(blocksSynced); // check before every heavy operation } await RequestBodies(bestPeer, cancellation, context); if (downloadReceipts) { if (cancellation.IsCancellationRequested) { return(blocksSynced); // check before every heavy operation } await RequestReceipts(bestPeer, cancellation, context); } _sinceLastTimeout++; if (_sinceLastTimeout > 2) { _syncBatchSize.Expand(); } Block[] blocks = context.Blocks; Block blockZero = blocks[0]; if (context.FullBlocksCount > 0) { bool parentIsKnown = _blockTree.IsKnownBlock(blockZero.Number - 1, blockZero.ParentHash); if (!parentIsKnown) { ancestorLookupLevel++; if (ancestorLookupLevel >= _ancestorJumps.Length) { if (_logger.IsWarn) { _logger.Warn($"Could not find common ancestor with {bestPeer}"); } throw new EthSynchronizationException("Peer with inconsistent chain in sync"); } int ancestorJump = _ancestorJumps[ancestorLookupLevel] - _ancestorJumps[ancestorLookupLevel - 1]; currentNumber = currentNumber >= ancestorJump ? (currentNumber - ancestorJump) : 0L; continue; } } ancestorLookupLevel = 0; for (int blockIndex = 0; blockIndex < context.FullBlocksCount; blockIndex++) { if (cancellation.IsCancellationRequested) { if (_logger.IsTrace) { _logger.Trace("Peer sync cancelled"); } break; } Block currentBlock = blocks[blockIndex]; if (_logger.IsTrace) { _logger.Trace($"Received {currentBlock} from {bestPeer}"); } // can move this to block tree now? if (!_blockValidator.ValidateSuggestedBlock(currentBlock)) { throw new EthSynchronizationException($"{bestPeer} sent an invalid block {currentBlock.ToString(Block.Format.Short)}."); } if (HandleAddResult(bestPeer, currentBlock.Header, blockIndex == 0, _blockTree.SuggestBlock(currentBlock, shouldProcess))) { if (downloadReceipts) { for (int receiptIndex = 0; receiptIndex < (context.ReceiptsForBlocks[blockIndex]?.Length ?? 0); receiptIndex++) { _receiptStorage.Add(context.ReceiptsForBlocks[blockIndex][receiptIndex], true); } } blocksSynced++; } if (shouldMoveToMain) { _blockTree.UpdateMainChain(new[] { currentBlock }, false); } currentNumber += 1; } if (blocksSynced > 0) { _syncReport.FullSyncBlocksDownloaded.Update(_blockTree.BestSuggestedHeader?.Number ?? 0); _syncReport.FullSyncBlocksKnown = bestPeer.HeadNumber; } else { break; } } return(blocksSynced); }
private void OnBlockProduced(object?sender, BlockEventArgs e) => _blockTree.SuggestBlock(e.Block);
public static void AddBlock(IBlockTree blockTree, Block block) { blockTree.SuggestBlock(block); blockTree.UpdateMainChain(new[] { block }, true); }
private void TimerOnElapsed(object sender, ElapsedEventArgs e) { try { if (_blockTree.Head == null) { _timer.Enabled = true; return; } Block?scheduledBlock = _scheduledBlock; if (scheduledBlock == null) { if (_blockTree.Head.Timestamp + _config.BlockPeriod < _timestamper.UnixTime.Seconds) { _signalsQueue.Add(_blockTree.FindBlock(_blockTree.Head.Hash, BlockTreeLookupOptions.None)); } _timer.Enabled = true; return; } string turnDescription = scheduledBlock.IsInTurn() ? "IN TURN" : "OUT OF TURN"; int wiggle = _wiggle.WiggleFor(scheduledBlock.Header); if (scheduledBlock.Timestamp * 1000 + (UInt256)wiggle < _timestamper.UnixTime.Milliseconds) { if (scheduledBlock.TotalDifficulty > _blockTree.Head.TotalDifficulty) { if (ReferenceEquals(scheduledBlock, _scheduledBlock)) { BlockHeader parent = _blockTree.FindParentHeader(scheduledBlock.Header, BlockTreeLookupOptions.TotalDifficultyNotNeeded); Address parentSigner = _snapshotManager.GetBlockSealer(parent); string parentTurnDescription = parent.IsInTurn() ? "IN TURN" : "OUT OF TURN"; string parentDetails = $"{parentTurnDescription} {parent.TimestampDate:HH:mm:ss} {parent.ToString(BlockHeader.Format.Short)} sealed by {KnownAddresses.GetDescription(parentSigner)}"; if (_logger.IsInfo) { _logger.Info( $"Suggesting own {turnDescription} {_scheduledBlock.TimestampDate:HH:mm:ss} {scheduledBlock.ToString(Block.Format.HashNumberDiffAndTx)} based on {parentDetails} after the delay of {wiggle}"); } _blockTree.SuggestBlock(scheduledBlock); } } else { if (_logger.IsInfo) { _logger.Info( $"Dropping a losing block {scheduledBlock.ToString(Block.Format.HashNumberDiffAndTx)}"); } } if (ReferenceEquals(scheduledBlock, _scheduledBlock)) { _scheduledBlock = null; } } else { if (_logger.IsTrace) { _logger.Trace($"Not yet {scheduledBlock.ToString(Block.Format.HashNumberDiffAndTx)}"); } } _timer.Enabled = true; } catch (Exception exception) { if (_logger.IsError) { _logger.Error("Clique block producer failure", exception); } } }
private void LoadGenesisBlock(Keccak expectedGenesisHash) { // if we already have a database with blocks then we do not need to load genesis from spec if (_blockTree.Genesis != null) { ValidateGenesisHash(expectedGenesisHash); return; } Block genesis = _chainSpec.Genesis; CreateSystemAccounts(); foreach ((Address address, ChainSpecAllocation allocation) in _chainSpec.Allocations) { _stateProvider.CreateAccount(address, allocation.Balance); if (allocation.Code != null) { Keccak codeHash = _stateProvider.UpdateCode(allocation.Code); _stateProvider.UpdateCodeHash(address, codeHash, _specProvider.GenesisSpec); } if (allocation.Constructor != null) { Transaction constructorTransaction = new Transaction(true) { SenderAddress = address, Init = allocation.Constructor, GasLimit = genesis.GasLimit }; _transactionProcessor.Execute(constructorTransaction, genesis.Header, NullTxTracer.Instance); } } _storageProvider.Commit(); _stateProvider.Commit(_specProvider.GenesisSpec); _storageProvider.CommitTrees(); _stateProvider.CommitTree(); _dbProvider.StateDb.Commit(); _dbProvider.CodeDb.Commit(); genesis.StateRoot = _stateProvider.StateRoot; genesis.Hash = BlockHeader.CalculateHash(genesis.Header); ManualResetEventSlim genesisProcessedEvent = new ManualResetEventSlim(false); bool genesisLoaded = false; void GenesisProcessed(object sender, BlockEventArgs args) { genesisLoaded = true; _blockTree.NewHeadBlock -= GenesisProcessed; genesisProcessedEvent.Set(); } _blockTree.NewHeadBlock += GenesisProcessed; _blockTree.SuggestBlock(genesis); genesisProcessedEvent.Wait(TimeSpan.FromSeconds(5)); if (!genesisLoaded) { throw new BlockchainException("Genesis block processing failure"); } ValidateGenesisHash(expectedGenesisHash); }
public AddBlockResult SuggestBlock(Block block, bool shouldProcess = true) { return(_blockTree.SuggestBlock(block, shouldProcess)); }
public void AddNewBlock(Block block, Node nodeWhoSentTheBlock) { if (block.TotalDifficulty == null) { throw new InvalidOperationException("Cannot add a block with unknown total difficulty"); } _pool.TryFind(nodeWhoSentTheBlock.Id, out PeerInfo peerInfo); if (peerInfo == null) { string errorMessage = $"Received a new block from an unknown peer {nodeWhoSentTheBlock:c} {nodeWhoSentTheBlock.Id} {_pool.PeerCount}"; if (_logger.IsDebug) { _logger.Debug(errorMessage); } return; } if ((block.TotalDifficulty ?? 0) > peerInfo.TotalDifficulty) { if (_logger.IsTrace) { _logger.Trace($"ADD NEW BLOCK Updating header of {peerInfo} from {peerInfo.HeadNumber} {peerInfo.TotalDifficulty} to {block.Number} {block.TotalDifficulty}"); } peerInfo.HeadNumber = block.Number; peerInfo.HeadHash = block.Hash; peerInfo.TotalDifficulty = block.TotalDifficulty ?? peerInfo.TotalDifficulty; } if ((block.TotalDifficulty ?? 0) < _blockTree.BestSuggestedHeader.TotalDifficulty) { return; } lock (_recentlySuggested) { if (_recentlySuggested.Get(block.Hash) != null) { return; } _recentlySuggested.Set(block.Hash, _dummyValue); } if (block.Number > _blockTree.BestKnownNumber + 8) { // ignore blocks when syncing in a simple non-locking way _synchronizer.RequestSynchronization(SyncTriggerType.NewDistantBlock); return; } if (_logger.IsTrace) { _logger.Trace($"Adding new block {block.ToString(Block.Format.Short)}) from {nodeWhoSentTheBlock:c}"); } if (!_sealValidator.ValidateSeal(block.Header)) { throw new EthSynchronizationException("Peer sent a block with an invalid seal"); } if (block.Number <= _blockTree.BestKnownNumber + 1) { if (_logger.IsInfo) { string authorString = block.Author == null ? string.Empty : "sealed by " + (KnownAddresses.GoerliValidators.ContainsKey(block.Author) ? KnownAddresses.GoerliValidators[block.Author] : block.Author?.ToString()); if (_logger.IsInfo) { _logger.Info($"Discovered a new block {string.Empty.PadLeft(9 - block.Number.ToString().Length, ' ')}{block.ToString(Block.Format.HashNumberAndTx)} {authorString}, sent by {nodeWhoSentTheBlock:s}"); } } if (_logger.IsTrace) { _logger.Trace($"{block}"); } if (_synchronizer.SyncMode == SyncMode.Full) { AddBlockResult result = _blockTree.SuggestBlock(block); if (_logger.IsTrace) { _logger.Trace($"{block.Hash} ({block.Number}) adding result is {result}"); } if (result == AddBlockResult.UnknownParent) { _synchronizer.RequestSynchronization(SyncTriggerType.Reorganization); } } } else { if (_logger.IsTrace) { _logger.Trace($"Received a block {block.Hash} ({block.Number}) from {nodeWhoSentTheBlock} - need to resync"); } _synchronizer.RequestSynchronization(SyncTriggerType.NewNearBlock); } }
public async Task <long> DownloadBlocks(PeerInfo bestPeer, int newBlocksToSkip, CancellationToken cancellation, bool shouldProcess = true) { if (bestPeer == null) { string message = $"Not expecting best peer to be null inside the {nameof(BlockDownloader)}"; _logger.Error(message); throw new ArgumentNullException(message); } int blocksSynced = 0; int ancestorLookupLevel = 0; long currentNumber = Math.Max(0, Math.Min(_blockTree.BestKnownNumber, bestPeer.HeadNumber - 1)); while (bestPeer.TotalDifficulty > (_blockTree.BestSuggestedHeader?.TotalDifficulty ?? 0) && currentNumber <= bestPeer.HeadNumber) { if (_logger.IsDebug) { _logger.Debug($"Continue full sync with {bestPeer} (our best {_blockTree.BestKnownNumber})"); } if (ancestorLookupLevel > MaxReorganizationLength) { if (_logger.IsWarn) { _logger.Warn($"Could not find common ancestor with {bestPeer}"); } throw new EthSynchronizationException("Peer with inconsistent chain in sync"); } long blocksLeft = bestPeer.HeadNumber - currentNumber - newBlocksToSkip; int blocksToRequest = (int)BigInteger.Min(blocksLeft + 1, _syncBatchSize.Current); if (blocksToRequest <= 1) { break; } if (_logger.IsTrace) { _logger.Trace($"Full sync request {currentNumber}+{blocksToRequest} to peer {bestPeer} with {bestPeer.HeadNumber} blocks. Got {currentNumber} and asking for {blocksToRequest} more."); } var headers = await RequestHeaders(bestPeer, cancellation, currentNumber, blocksToRequest); List <Keccak> hashes = new List <Keccak>(); Dictionary <Keccak, BlockHeader> headersByHash = new Dictionary <Keccak, BlockHeader>(); for (int i = 1; i < headers.Length; i++) { if (headers[i] == null) { break; } hashes.Add(headers[i].Hash); headersByHash[headers[i].Hash] = headers[i]; } Task <BlockBody[]> bodiesTask = bestPeer.SyncPeer.GetBlocks(hashes.ToArray(), cancellation); await bodiesTask.ContinueWith(t => { if (t.IsFaulted) { _sinceLastTimeout = 0; if (t.Exception?.InnerException is TimeoutException || (t.Exception?.InnerExceptions.Any(x => x is TimeoutException) ?? false) || (t.Exception?.InnerExceptions.Any(x => x.InnerException is TimeoutException) ?? false)) { if (_logger.IsTrace) { _logger.Error("Failed to retrieve bodies when synchronizing (Timeout)", bodiesTask.Exception); } _syncBatchSize.Shrink(); } else { if (_logger.IsError) { _logger.Error("Failed to retrieve bodies when synchronizing", bodiesTask.Exception); } } throw new EthSynchronizationException("Bodies task faulted.", bodiesTask.Exception); } }); if (bodiesTask.IsCanceled) { return(blocksSynced); } BlockBody[] bodies = bodiesTask.Result; Block[] blocks = new Block[bodies.Length]; for (int i = 0; i < bodies.Length; i++) { BlockBody body = bodies[i]; if (body == null) { // TODO: this is how it used to be... I do not want to touch it without extensive testing throw new EthSynchronizationException($"{bestPeer} sent an empty body for {blocks[i].ToString(Block.Format.Short)}."); } blocks[i] = new Block(null, body); } _sinceLastTimeout++; if (_sinceLastTimeout > 2) { _syncBatchSize.Expand(); } for (int i = 0; i < blocks.Length; i++) { blocks[i].Header = headersByHash[hashes[i]]; } if (blocks.Length > 0) { bool parentIsKnown = _blockTree.IsKnownBlock(blocks[0].Number - 1, blocks[0].ParentHash); if (!parentIsKnown) { ancestorLookupLevel += _syncBatchSize.Current; currentNumber = currentNumber >= _syncBatchSize.Current ? (currentNumber - _syncBatchSize.Current) : 0L; continue; } } for (int i = 0; i < blocks.Length; i++) { if (cancellation.IsCancellationRequested) { if (_logger.IsTrace) { _logger.Trace("Peer sync cancelled"); } break; } if (_logger.IsTrace) { _logger.Trace($"Received {blocks[i]} from {bestPeer}"); } // can move this to block tree now? if (!_blockValidator.ValidateSuggestedBlock(blocks[i])) { throw new EthSynchronizationException($"{bestPeer} sent an invalid block {blocks[i].ToString(Block.Format.Short)}."); } if (HandleAddResult(blocks[i].Header, i == 0, _blockTree.SuggestBlock(blocks[i], shouldProcess))) { blocksSynced++; } currentNumber = currentNumber + 1; } if (blocksSynced > 0) { _syncStats.Update(_blockTree.BestSuggestedHeader?.Number ?? 0, bestPeer.HeadNumber, 1); } } return(blocksSynced); }