/// <summary> /// Cleans the batch in a way that all headers from the latest one are consecutive. /// Those that violate consecutiveness are removed. /// </summary> /// <param name="batch">Uncleaned batch that might contain non-consecutive blocks. Cannot be empty.</param> /// <returns>List of consecutive blocks.</returns> private List <BlockPair> GetBatchWithoutReorgedBlocks(List <BlockPair> batch) { this.logger.LogTrace("({0}.{1}:{2})", nameof(batch), nameof(batch.Count), batch.Count); // Initialize current with highest block from the batch. BlockPair current = batch.Last(); // List of consecutive blocks. It's a cleaned out version of batch that doesn't have blocks that were reorged. var batchCleared = new List <BlockPair>(batch.Count) { current }; // Select only those blocks that were not reorged away. for (int i = batch.Count - 2; i >= 0; i--) { if (batch[i].ChainedHeader.HashBlock != current.ChainedHeader.Previous.HashBlock) { this.logger.LogDebug("Block '{0}' removed from the batch because it was reorged.", batch[i].ChainedHeader); continue; } batchCleared.Add(batch[i]); current = batch[i]; } batchCleared.Reverse(); this.logger.LogTrace("(-):*.{0}={1}", nameof(batchCleared.Count), batchCleared.Count); return(batchCleared); }
protected override void OnNextCore(Block block) { this.logger.LogTrace("()"); if (this.storeSettings.Prune) { this.logger.LogTrace("(-)[PRUNE]"); return; } ChainedBlock chainedBlock = this.chain.GetBlock(block.GetHash()); if (chainedBlock == null) { this.logger.LogTrace("(-)[REORG]"); return; } this.logger.LogTrace("Block hash is '{0}'.", chainedBlock.HashBlock); BlockPair blockPair = new BlockPair(block, chainedBlock); // Ensure the block is written to disk before relaying. this.blockStoreLoop.AddToPending(blockPair); if (this.blockStoreLoop.InitialBlockDownloadState.IsInitialBlockDownload()) { this.logger.LogTrace("(-)[IBD]"); return; } this.logger.LogTrace("Block header '{0}' added to the announce queue.", chainedBlock); this.blocksToAnnounce.Enqueue(chainedBlock); this.logger.LogTrace("(-)"); }
/// <summary> /// Adds a block to the saving queue. /// </summary> /// <param name="blockPair">The block and its chained header pair to be added to pending storage.</param> public void AddToPending(BlockPair blockPair) { this.logger.LogTrace("({0}:'{1}')", nameof(blockPair), blockPair.ChainedHeader); this.blocksQueue.Enqueue(blockPair); this.logger.LogTrace("(-)"); }
/// <summary> /// Adds a block to Pending Storage. /// <para> /// The <see cref="BlockStoreSignaled"/> calls this method when a new block is available. Only add the block to pending storage if the store's tip is behind the given block. /// </para> /// </summary> /// <param name="blockPair">The block and its chained header pair to be added to pending storage.</param> /// <remarks>TODO: Possibly check the size of pending in memory</remarks> public void AddToPending(BlockPair blockPair) { this.logger.LogTrace("({0}:'{1}')", nameof(blockPair), blockPair.ChainedBlock); if (this.StoreTip.Height < blockPair.ChainedBlock.Height) { this.PendingStorage.TryAdd(blockPair.ChainedBlock.HashBlock, blockPair); } this.logger.LogTrace("(-)"); }
/// <summary> /// Adds a block to Pending Storage. /// <para> /// The <see cref="BlockStoreSignaled"/> calls this method when a new block is available. Only add the block to pending storage if the store's tip is behind the given block. /// </para> /// </summary> /// <param name="blockPair">The block and its chained header pair to be added to pending storage.</param> /// <remarks>TODO: Possibly check the size of pending in memory</remarks> public void AddToPending(BlockPair blockPair) { this.logger.LogTrace("({0}:'{1}')", nameof(blockPair), blockPair.ChainedHeader); if (this.StoreTip.Height < blockPair.ChainedHeader.Height) { this.PendingStorage.TryAdd(blockPair.ChainedHeader.HashBlock, blockPair); this.CachedConsensusTip = blockPair.ChainedHeader; } this.logger.LogTrace("(-)"); }
/// <summary> /// Dequeues the blocks continuously and saves them to the database when max batch size is reached or timer ran out. /// </summary> /// <remarks>Batch is always saved on shutdown.</remarks> private async Task DequeueBlocksContinuouslyAsync() { this.logger.LogTrace("()"); var batch = new List <BlockPair>(); Task <BlockPair> dequeueTask = null; Task timerTask = null; while (!this.nodeLifetime.ApplicationStopping.IsCancellationRequested) { // Start new dequeue task if not started already. dequeueTask = dequeueTask ?? this.blocksQueue.DequeueAsync(); // Wait for one of the tasks: dequeue or timer (if available) to finish. Task task = (timerTask == null) ? dequeueTask : await Task.WhenAny(dequeueTask, timerTask).ConfigureAwait(false); bool saveBatch = false; try { await task.ConfigureAwait(false); } catch (OperationCanceledException) { // Happens when node is shutting down or Dispose() is called. // We want to save whatever is in the batch before exiting the loop. saveBatch = true; this.logger.LogDebug("Node is shutting down. Save batch."); } // Save batch if timer ran out or we've dequeued a new block and reached the consensus tip // or the max batch size is reached or the node is shutting down. if (dequeueTask.Status == TaskStatus.RanToCompletion) { BlockPair item = dequeueTask.Result; // Set the dequeue task to null so it can be assigned on the next iteration. dequeueTask = null; batch.Add(item); this.currentBatchSizeBytes += item.Block.GetSerializedSize(); saveBatch = saveBatch || (item.ChainedHeader == this.chain.Tip) || (this.currentBatchSizeBytes >= BatchThresholdSizeBytes); } else { // Will be executed in case timer ran out or node is being shut down. saveBatch = true; } if (saveBatch) { if (batch.Count != 0) { await this.SaveBatchAsync(batch).ConfigureAwait(false); batch.Clear(); this.currentBatchSizeBytes = 0; } timerTask = null; } else { // Start timer if it is not started already. timerTask = timerTask ?? Task.Delay(BatchMaxSaveIntervalSeconds * 1000, this.nodeLifetime.ApplicationStopping); } } this.logger.LogTrace("(-)"); }