private ChainedBlock AddBlock(ConcurrentChain chain) { BlockHeader header = Network.Main.Consensus.ConsensusFactory.CreateBlockHeader(); header.Nonce = RandomUtils.GetUInt32(); header.HashPrevBlock = chain.Tip.HashBlock; chain.SetTip(header); return(chain.GetBlock(header.GetHash())); }
private ChainedBlock AddBlock(ConcurrentChain chain) { BlockHeader header = new BlockHeader(); header.Nonce = RandomUtils.GetUInt32(); header.HashPrevBlock = chain.Tip.HashBlock; chain.SetTip(header); return(chain.GetBlock(header.GetHash())); }
public IEnumerable <IBlockInfo> GetBlocks() { var fork = _chain.FindFork(LastProcessed.GetLocator()); var headers = _chain .EnumerateAfter(fork).Where(h => h.Height <= ToHeight) .ToList(); var first = headers.FirstOrDefault(); if (first == null) { yield break; } var height = first.Height; if (first.Height == 1) { var headersWithGenesis = new List <ChainedBlock> { fork }; headers = headersWithGenesis.Concat(headers).ToList(); height = 0; } foreach (var block in _nodeBlocks.GetBlocks(headers.Select(_ => _.HashBlock), CancellationToken)) { var header = _chain.GetBlock(height); if (block == null) { var storeTip = _nodeBlocks.GetStoreTip(); if (storeTip != null) { // Store is caught up with Chain but the block is missing from the store. if (header.Header.BlockTime <= storeTip.Header.BlockTime) { throw new InvalidOperationException($"Chained block not found in store (height = { height }). Re-create the block store."); } } // Allow Store to catch up with Chain. break; } LastProcessed = header; yield return(new BlockInfoModel() { Block = block, Hash = header.HashBlock, Height = header.Height }); height++; } }
public void CanForkBackward() { ConcurrentChain chain = new ConcurrentChain(Network.PurpleMain); AppendBlock(chain); AppendBlock(chain); var fork = AppendBlock(chain); //Test single block back fork var last = AppendBlock(chain); Assert.Equal(4, chain.Height); Assert.Equal(4, last.Height); Assert.Equal(last.HashBlock, chain.Tip.HashBlock); Assert.Equal(fork.HashBlock, chain.SetTip(fork).HashBlock); Assert.Equal(3, chain.Height); Assert.Equal(3, fork.Height); Assert.Equal(fork.HashBlock, chain.Tip.HashBlock); Assert.Null(chain.GetBlock(last.HashBlock)); Assert.NotNull(chain.GetBlock(fork.HashBlock)); //Test 3 blocks back fork var b1 = AppendBlock(chain); var b2 = AppendBlock(chain); last = AppendBlock(chain); Assert.Equal(6, chain.Height); Assert.Equal(6, last.Height); Assert.Equal(last.HashBlock, chain.Tip.HashBlock); Assert.Equal(fork.HashBlock, chain.SetTip(fork).HashBlock); Assert.Equal(3, chain.Height); Assert.Equal(3, fork.Height); Assert.Equal(fork.HashBlock, chain.Tip.HashBlock); Assert.Null(chain.GetBlock(last.HashBlock)); Assert.Null(chain.GetBlock(b1.HashBlock)); Assert.Null(chain.GetBlock(b2.HashBlock)); chain.SetTip(last); Assert.Equal(6, chain.Height); Assert.Equal(6, last.Height); Assert.Equal(last.HashBlock, chain.Tip.HashBlock); }
public void CanForkBackward() { var chain = new ConcurrentChain(this.network); this.AppendBlock(chain); this.AppendBlock(chain); ChainedHeader fork = this.AppendBlock(chain); //Test single block back fork ChainedHeader last = this.AppendBlock(chain); Assert.Equal(4, chain.Height); Assert.Equal(4, last.Height); Assert.Equal(last.HashBlock, chain.Tip.HashBlock); Assert.Equal(fork.HashBlock, chain.SetTip(fork).HashBlock); Assert.Equal(3, chain.Height); Assert.Equal(3, fork.Height); Assert.Equal(fork.HashBlock, chain.Tip.HashBlock); Assert.Null(chain.GetBlock(last.HashBlock)); Assert.NotNull(chain.GetBlock(fork.HashBlock)); //Test 3 blocks back fork ChainedHeader b1 = this.AppendBlock(chain); ChainedHeader b2 = this.AppendBlock(chain); last = this.AppendBlock(chain); Assert.Equal(6, chain.Height); Assert.Equal(6, last.Height); Assert.Equal(last.HashBlock, chain.Tip.HashBlock); Assert.Equal(fork.HashBlock, chain.SetTip(fork).HashBlock); Assert.Equal(3, chain.Height); Assert.Equal(3, fork.Height); Assert.Equal(fork.HashBlock, chain.Tip.HashBlock); Assert.Null(chain.GetBlock(last.HashBlock)); Assert.Null(chain.GetBlock(b1.HashBlock)); Assert.Null(chain.GetBlock(b2.HashBlock)); chain.SetTip(last); Assert.Equal(6, chain.Height); Assert.Equal(6, last.Height); Assert.Equal(last.HashBlock, chain.Tip.HashBlock); }
public void CanForkBackwardPartialChain() { ConcurrentChain chain = CreateChain(10); AppendBlock(chain); AppendBlock(chain); var fork = AppendBlock(chain); //Test single block back fork var last = AppendBlock(chain); Assert.Equal(14, chain.Height); Assert.Equal(14, last.Height); Assert.Equal(last.HashBlock, chain.Tip.HashBlock); Assert.Equal(fork.HashBlock, chain.SetTip(fork).HashBlock); Assert.Equal(13, chain.Height); Assert.Equal(13, fork.Height); Assert.Equal(fork.HashBlock, chain.Tip.HashBlock); Assert.Null(chain.GetBlock(last.HashBlock)); Assert.NotNull(chain.GetBlock(fork.HashBlock)); //Test 3 blocks back fork var b1 = AppendBlock(chain); var b2 = AppendBlock(chain); last = AppendBlock(chain); Assert.Equal(16, chain.Height); Assert.Equal(16, last.Height); Assert.Equal(last.HashBlock, chain.Tip.HashBlock); Assert.Equal(fork.HashBlock, chain.SetTip(fork).HashBlock); Assert.Equal(13, chain.Height); Assert.Equal(13, fork.Height); Assert.Equal(fork.HashBlock, chain.Tip.HashBlock); Assert.Null(chain.GetBlock(last.HashBlock)); Assert.Null(chain.GetBlock(b1.HashBlock)); Assert.Null(chain.GetBlock(b2.HashBlock)); chain.SetTip(last); Assert.Equal(16, chain.Height); Assert.Equal(16, last.Height); Assert.Equal(last.HashBlock, chain.Tip.HashBlock); }
public async Task Initialize(CancellationTokenSource tokenSource) { if (this.nodeArgs.Store.ReIndex) { throw new NotImplementedException(); } StoredBlock = chain.GetBlock(this.BlockRepository.BlockHash); if (StoredBlock == null) { // a reorg happened and the ChainedBlock is lost // to solve this each block needs to be pulled from storage and deleted // all the way till a common fork is found with Chain var blockstoremove = new List <uint256>(); var remove = await this.BlockRepository.GetAsync(this.BlockRepository.BlockHash); var removeHash = remove.GetHash(); // reorg - we need to delete blocks, start walking back the chain while (this.chain.GetBlock(removeHash) == null) { blockstoremove.Add(removeHash); if (remove.Header.HashPrevBlock == chain.Genesis.HashBlock) { removeHash = chain.Genesis.HashBlock; break; } remove = await this.BlockRepository.GetAsync(remove.Header.HashPrevBlock); Guard.NotNull(remove, nameof(remove)); removeHash = remove.GetHash(); } var newTip = this.chain.GetBlock(removeHash); await this.BlockRepository.DeleteAsync(newTip.HashBlock, blockstoremove); this.StoredBlock = newTip; } if (this.nodeArgs.Store.TxIndex != this.BlockRepository.TxIndex) { if (this.StoredBlock != this.chain.Genesis) { throw new BlockStoreException("You need to rebuild the database using -reindex-chainstate to change -txindex"); } if (this.nodeArgs.Store.TxIndex) { await this.BlockRepository.SetTxIndex(this.nodeArgs.Store.TxIndex); } } this.ChainState.HighestPersistedBlock = this.StoredBlock; this.Loop(tokenSource.Token); }
/// <summary> /// Remove old spent & confirmed TrackedOutpoint, old unconf operations, and old forked operations /// </summary> /// <param name="chain"></param> internal List <object> Prune(ConcurrentChain chain, int blockExpiration = 2000, TimeSpan?timeExpiration = null) { List <object> removed = new List <object>(); timeExpiration = timeExpiration ?? TimeSpan.FromDays(7.0); foreach (var op in _Operations) { if (op.Value.BlockId != null) { var chained = chain.GetBlock(op.Value.BlockId); var isForked = chained == null; if (!isForked) { bool isOldConfirmed = chain.Height - chained.Height + 1 > blockExpiration; if (isOldConfirmed) { foreach (var spent in op.Value.SpentCoins) //Stop tracking the outpoints { TrackedOutpoint unused; if (_TrackedOutpoints.TryRemove(TrackedOutpoint.GetId(spent.Item1.Outpoint), out unused)) { removed.Add(unused); } } } } else { var isOldFork = chain.Height - op.Value.Height + 1 > blockExpiration; if (isOldFork) //clear any operation belonging to an old fork { Operation unused; if (_Operations.TryRemove(op.Key, out unused)) { removed.Add(unused); } } } } else { var isOldUnconf = (DateTimeOffset.UtcNow - op.Value.AddedDate) > timeExpiration; if (isOldUnconf) //clear any old unconfirmed { Operation unused; if (_Operations.TryRemove(op.Key, out unused)) { removed.Add(unused); } } } } return(removed); }
public async Task Initialize(CancellationToken cancellationToken) { foreach (var type in Enum.GetValues(typeof(IndexType)).OfType <IndexType>()) { var tip = await _checkpointStore.GetCheckpointAsync(type).ConfigureAwait(false); _indexers.Add(type, new IndexerTarget { Type = type, Checkpoint = tip, Tip = _chain.FindFork(tip.BlockLocator) }); } Indexers = new ReadOnlyCollection <IIndexerTarget>(_indexers.Values.ToList()); var minHeight = _indexers.Values.Min(i => i.Tip.Height); Tip = _settings.IgnoreCheckpoints ? _chain.GetBlock(_settings.From) : _chain.GetBlock(minHeight); }
public DeploymentFlags(ChainedHeader nextBlock, ThresholdState[] prevBlockStates, IConsensus chainparams, ConcurrentChain chain) { this.EnforceBIP30 = EnforceBIP30ForBlock(nextBlock); // Once BIP34 activated it was not possible to create new duplicate coinbases and thus other than starting // with the 2 existing duplicate coinbase pairs, not possible to create overwriting txs. But by the // time BIP34 activated, in each of the existing pairs the duplicate coinbase had overwritten the first // before the first had been spent. Since those coinbases are sufficiently buried its no longer possible to create further // duplicate transactions descending from the known pairs either. // If we're on the known chain at height greater than where BIP34 activated, we can save the db accesses needed for the BIP30 check. ChainedHeader bip34HeightChainedHeader = chain.GetBlock(chainparams.BuriedDeployments[BuriedDeployments.BIP34]); // Only continue to enforce if we're below BIP34 activation height or the block hash at that height doesn't correspond. this.EnforceBIP30 = this.EnforceBIP30 && ((bip34HeightChainedHeader == null) || !(bip34HeightChainedHeader.HashBlock == chainparams.BIP34Hash)); // BIP16 didn't become active until Apr 1 2012. DateTimeOffset nBIP16SwitchTime = Utils.UnixTimeToDateTime(1333238400); bool fStrictPayToScriptHash = (nextBlock.Header.BlockTime >= nBIP16SwitchTime); this.ScriptFlags = fStrictPayToScriptHash ? ScriptVerify.P2SH : ScriptVerify.None; // Start enforcing the DERSIG (BIP66) rule. if (nextBlock.Height >= chainparams.BuriedDeployments[BuriedDeployments.BIP66]) { this.ScriptFlags |= ScriptVerify.DerSig; } // Start enforcing CHECKLOCKTIMEVERIFY, (BIP65) for block.nVersion=4 // blocks, when 75% of the network has upgraded. if (nextBlock.Height >= chainparams.BuriedDeployments[BuriedDeployments.BIP65]) { this.ScriptFlags |= ScriptVerify.CheckLockTimeVerify; } // Start enforcing BIP68 (sequence locks), BIP112 (CHECKSEQUENCEVERIFY) and BIP113 (Median Time Past) using versionbits logic. if (prevBlockStates[(int)BIP9Deployments.CSV] == ThresholdState.Active) { this.ScriptFlags |= ScriptVerify.CheckSequenceVerify; this.LockTimeFlags |= Transaction.LockTimeFlags.VerifySequence; this.LockTimeFlags |= Transaction.LockTimeFlags.MedianTimePast; } // Start enforcing WITNESS rules using versionbits logic. if (prevBlockStates[(int)BIP9Deployments.Segwit] == ThresholdState.Active) { this.ScriptFlags |= ScriptVerify.Witness; } // Enforce block.nVersion=2 rule that the coinbase starts with serialized block height if (nextBlock.Height >= chainparams.BuriedDeployments[BuriedDeployments.BIP34]) { this.EnforceBIP34 = true; } }
public void ConstructProvenHeaderPayload_Consecutive_Headers() { var provenHeaderChain = BuildProvenHeaderChain(10); var chain = new ConcurrentChain(this.Network, provenHeaderChain); var consensusManager = new Mock <IConsensusManager>(); consensusManager.Setup(c => c.Tip).Returns(provenHeaderChain); var behavior = new ProvenHeadersConsensusManagerBehavior(chain, this.initialBlockDownloadState, consensusManager.Object, this.peerBanning, this.extendedLoggerFactory, this.Network, this.chainState, this.checkpoints, this.provenBlockHeaderStore, this.connectionManagerSettings); var hashes = new List <uint256>(); for (int i = 1; i < 5; i++) { var chainedHeaderToAdd = chain.GetBlock(i); hashes.Add(chainedHeaderToAdd.HashBlock); } hashes.Reverse(); var blockLocator = new BlockLocator { Blocks = hashes }; var peerMock = CreatePeerMock(); behavior.Attach(peerMock.Object); var incomingMessage = new IncomingMessage { Message = new Message(new PayloadProvider().DiscoverPayloads()) { Magic = this.Network.Magic, Payload = new GetProvenHeadersPayload(blockLocator), } }; var provenBlockHeadersToVerifyAgainst = new List <ProvenBlockHeader>(); for (int i = 5; i <= provenHeaderChain.Height; i++) { provenBlockHeadersToVerifyAgainst.Add((ProvenBlockHeader)provenHeaderChain.GetAncestor(i).Header); } //Trigger the event handler peerMock.Object.MessageReceived.ExecuteCallbacksAsync(peerMock.Object, incomingMessage).GetAwaiter().GetResult(); // Check that the headers we sent is the correct headers. var payload = new ProvenHeadersPayload(provenBlockHeadersToVerifyAgainst.ToArray()); peerMock.Verify(p => p.SendMessageAsync(It.Is <ProvenHeadersPayload>(pl => VerifyHeaders(pl.Headers, provenBlockHeadersToVerifyAgainst)), default(CancellationToken))); }
public void EnumerateAndCheckTipBlock() { var store = new BlockStore(TestDataLocations.BlockFolderLocation, Network.StratisMain); // use the synchronize chain method to load all blocks and look for the tip (currently block 100k) var block100K = uint256.Parse("af380a53467b70bc5d1ee61441586398a0a5907bb4fad7855442575483effa54"); ConcurrentChain chain = store.GetStratisChain(); ChainedBlock lastblk = chain.GetBlock(block100K); Assert.Equal(block100K, lastblk.Header.GetHash()); Assert.Equal(100000, lastblk.Height); }
private BlockNotifier CreateNotifier(ConcurrentChain chain) { var rpc = new MockRpcClient(); rpc.OnGetBestBlockHashAsync = () => Task.FromResult(chain.Tip.HashBlock); rpc.OnGetBlockAsync = (blockHash) => Task.FromResult(Block.CreateBlock(chain.GetBlock(blockHash).Header, rpc.Network)); rpc.OnGetBlockHeaderAsync = (blockHash) => Task.FromResult(chain.GetBlock(blockHash).Header); var notifier = new BlockNotifier(TimeSpan.FromMilliseconds(100), rpc); return(notifier); }
public void CanBuildConcurrentChain() { var cchain = new ConcurrentChain(); var chain = new ConcurrentChain(this.network); Assert.Null(cchain.SetTip(chain.Tip)); ChainedHeader b0 = cchain.Tip; Assert.Equal(cchain.Tip, chain.Tip); ChainedHeader b1 = this.AddBlock(chain); ChainedHeader b2 = this.AddBlock(chain); this.AddBlock(chain); this.AddBlock(chain); ChainedHeader b5 = this.AddBlock(chain); Assert.Equal(cchain.SetTip(chain.Tip), b0); Assert.Equal(cchain.Tip, chain.Tip); Assert.Equal(cchain.GetBlock(5), chain.Tip); Assert.Equal(cchain.GetBlock(b5.HashBlock), chain.Tip); Assert.Equal(cchain.SetTip(b1), b1); Assert.Null(cchain.GetBlock(b5.HashBlock)); Assert.Null(cchain.GetBlock(b2.HashBlock)); Assert.Equal(cchain.SetTip(b5), b1); Assert.Equal(cchain.GetBlock(b5.HashBlock), chain.Tip); chain.SetTip(b2); this.AddBlock(chain); this.AddBlock(chain); ChainedHeader b5b = this.AddBlock(chain); ChainedHeader b6b = this.AddBlock(chain); Assert.Equal(cchain.SetTip(b6b), b2); Assert.Null(cchain.GetBlock(b5.HashBlock)); Assert.Equal(cchain.GetBlock(b2.HashBlock), b2); Assert.Equal(cchain.GetBlock(6), b6b); Assert.Equal(cchain.GetBlock(5), b5b); }
public void CanBuildConcurrentChain() { ConcurrentChain cchain = new ConcurrentChain(); ConcurrentChain chain = new ConcurrentChain(Network.Main); Assert.Null(cchain.SetTip(chain.Tip)); var b0 = cchain.Tip; Assert.Equal(cchain.Tip, chain.Tip); var b1 = AddBlock(chain); var b2 = AddBlock(chain); AddBlock(chain); AddBlock(chain); var b5 = AddBlock(chain); Assert.Equal(cchain.SetTip(chain.Tip), b0); Assert.Equal(cchain.Tip, chain.Tip); Assert.Equal(cchain.GetBlock(5), chain.Tip); Assert.Equal(cchain.GetBlock(b5.HashBlock), chain.Tip); Assert.Equal(cchain.SetTip(b1), b1); Assert.Null(cchain.GetBlock(b5.HashBlock)); Assert.Null(cchain.GetBlock(b2.HashBlock)); Assert.Equal(cchain.SetTip(b5), b1); Assert.Equal(cchain.GetBlock(b5.HashBlock), chain.Tip); chain.SetTip(b2); AddBlock(chain); AddBlock(chain); var b5b = AddBlock(chain); var b6b = AddBlock(chain); Assert.Equal(cchain.SetTip(b6b), b2); Assert.Null(cchain.GetBlock(b5.HashBlock)); Assert.Equal(cchain.GetBlock(b2.HashBlock), b2); Assert.Equal(cchain.GetBlock(6), b6b); Assert.Equal(cchain.GetBlock(5), b5b); }
public void CreateNewBlock_WithScript_ValidatesTemplateUsingRuleContext() { var newOptions = new PowConsensusOptions() { MaxBlockWeight = 1500 }; this.ExecuteWithConsensusOptions(newOptions, () => { ConcurrentChain chain = GenerateChainWithHeight(5, this.network, this.key); this.SetupRulesEngine(chain); this.dateTimeProvider.Setup(d => d.GetAdjustedTimeAsUnixTimestamp()) .Returns(new DateTime(2017, 1, 7, 0, 0, 1, DateTimeKind.Utc).ToUnixTimestamp()); this.consensusLoop.Setup(c => c.Tip) .Returns(chain.GetBlock(5)); Transaction transaction = CreateTransaction(this.network, this.key, 5, new Money(400 * 1000 * 1000), new Key(), new uint256(124124)); var txFee = new Money(1000); SetupTxMempool(chain, this.network.Consensus.Options as PowConsensusOptions, txFee, transaction); ValidationContext validationContext = null; var powRuleContext = new PowRuleContext(new ValidationContext(), this.network.Consensus, chain.Tip, this.dateTimeProvider.Object.GetTimeOffset()); this.consensusRules .Setup(s => s.CreateRuleContext(It.IsAny <ValidationContext>(), It.IsAny <ChainedHeader>())).Callback <ValidationContext, ChainedHeader>((r, s) => validationContext = r) .Returns(powRuleContext); var blockDefinition = new PowBlockDefinition(this.consensusLoop.Object, this.dateTimeProvider.Object, this.LoggerFactory.Object, this.txMempool.Object, new MempoolSchedulerLock(), this.network, this.consensusRules.Object); BlockTemplate blockTemplate = blockDefinition.Build(chain.Tip, this.key.ScriptPubKey); Assert.NotNull(this.callbackRuleContext); Assert.True(this.callbackRuleContext.MinedBlock); Assert.Equal(blockTemplate.Block.GetHash(), validationContext.Block.GetHash()); Assert.Equal(chain.GetBlock(5).HashBlock, powRuleContext.ConsensusTip.HashBlock); Assert.Equal(1500, this.callbackRuleContext.Consensus.Option <PowConsensusOptions>().MaxBlockWeight); this.consensusLoop.Verify(); }); }
public void CanCalculateDifficulty() { var histories = File.ReadAllLines(TestDataLocations.DataFolder(@"targethistory.csv")); var store = new BlockStore(TestDataLocations.BlockFolderLocation, Network.Main); // todo: load the chain with a header only file ConcurrentChain chain = store.GetChain(); var stakeChain = new MemoryStakeChain(Network.Main); var indexStore = new IndexedBlockStore(new InMemoryNoSqlRepository(), store); var reindexed = indexStore.ReIndex(); Assert.Equal(reindexed, 103952); var lastIndex = 0; foreach (var history in histories) { var height = int.Parse(history.Split(',')[0]); var expectedTarget = new Target(new BigInteger(history.Split(',')[1].Trim(), 10)); var chainedBlock = chain.GetBlock(height); for (int i = height; i > lastIndex; i--) { var g = chain.GetBlock(i); var block = indexStore.Get(g.HashBlock); stakeChain.Set(g.HashBlock, new BlockStake(block)); } lastIndex = height; Assert.Equal(expectedTarget, chainedBlock.Header.Bits); var target = stakeChain.GetWorkRequired(chainedBlock, stakeChain.Get(chainedBlock.HashBlock), Network.Main.Consensus); //var target = chain.GetWorkRequired(Network.Main, height); Assert.Equal(expectedTarget, target); } }
private async Task <ChainedBlock> AddBlockAsync(ConcurrentChain chain, bool wait = true) { BlockHeader header = Network.RegTest.Consensus.ConsensusFactory.CreateBlockHeader(); header.Nonce = RandomUtils.GetUInt32(); header.HashPrevBlock = chain.Tip.HashBlock; chain.SetTip(header); var block = chain.GetBlock(header.GetHash()); if (wait) { await Task.Delay(TimeSpan.FromSeconds(1)); } return(block); }
public async Task ReorgedBlocksAreNotSavedAsync() { this.repositoryTipHashAndHeight = new HashHeightPair(this.chain.Genesis.HashBlock, 0); var blockStoreFlushConditionMock = new Mock <IBlockStoreQueueFlushCondition>(); blockStoreFlushConditionMock.Setup(s => s.ShouldFlush).Returns(false); this.blockStoreQueue = new BlockStoreQueue(this.chain, this.chainState, blockStoreFlushConditionMock.Object, new StoreSettings(NodeSettings.Default(this.network)), this.blockRepositoryMock.Object, new LoggerFactory(), new Mock <INodeStats>().Object); await this.blockStoreQueue.InitializeAsync().ConfigureAwait(false); int reorgedChainLenght = 3; int realChainLenght = 6; // First present a short chain. ConcurrentChain alternativeChain = CreateChain(reorgedChainLenght); for (int i = 1; i < alternativeChain.Height; i++) { Block block = this.network.Consensus.ConsensusFactory.CreateBlock(); block.GetSerializedSize(); this.blockStoreQueue.AddToPending(new ChainedHeaderBlock(block, alternativeChain.GetBlock(i))); } // Present second chain which has more work and reorgs blocks from genesis. for (int i = 1; i < realChainLenght; i++) { Block block = this.network.Consensus.ConsensusFactory.CreateBlock(); block.GetSerializedSize(); this.blockStoreQueue.AddToPending(new ChainedHeaderBlock(block, this.chain.GetBlock(i))); } await this.WaitUntilQueueIsEmptyAsync().ConfigureAwait(false); Assert.Equal(this.chainState.BlockStoreTip, this.chain.Genesis); Assert.Equal(0, this.repositorySavesCount); // Dispose block store to trigger save. this.nodeLifetime.StopApplication(); this.blockStoreQueue.Dispose(); // Make sure that blocks only from 2nd chain were saved. Assert.Equal(this.chain.GetBlock(realChainLenght - 1), this.chainState.BlockStoreTip); Assert.Equal(1, this.repositorySavesCount); Assert.Equal(realChainLenght - 1, this.repositoryTotalBlocksSaved); }
public ICoin[] GetSpendableCoins() { return(_Blocks .Select(b => b.Value) .SelectMany(b => b.Transactions.Select(t => new { Tx = t, Block = b })) .Where(b => !b.Tx.IsCoinBase || (_Chain.Height + 1) - _Chain.GetBlock(b.Block.GetHash()).Height >= 100) .Select(b => b.Tx) .SelectMany(b => b.Outputs.AsIndexedOutputs()) .Where(o => o.TxOut.ScriptPubKey == this.MinerScriptPubKey) .Select(o => new Coin(o)) .ToArray()); }
public void CanCalculateDifficulty() { var main = new ConcurrentChain(LoadMainChain(), Network.Main); var histories = File.ReadAllText("data/targethistory.csv").Split(new string[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries); foreach (var history in histories) { var height = int.Parse(history.Split(',')[0]); var expectedTarget = new Target(new BouncyCastle.Math.BigInteger(history.Split(',')[1], 10)); var block = main.GetBlock(height).Header; Assert.Equal(expectedTarget, block.Bits); var target = main.GetWorkRequired(Network.Main, height); Assert.Equal(expectedTarget, target); } }
public void CanCalculateDifficulty() { var main = new ConcurrentChain(this.network, this.LoadMainChain()); // The state of the line separators may be affected by copy operations - so do an environment independent line split... string[] histories = File.ReadAllText(TestDataLocations.GetFileFromDataFolder("targethistory.csv")).Split(new string[] { "\r\n", "\r", "\n" }, StringSplitOptions.RemoveEmptyEntries); foreach (string history in histories) { int height = int.Parse(history.Split(',')[0]); var expectedTarget = new Target(new BouncyCastle.Math.BigInteger(history.Split(',')[1], 10)); BlockHeader block = main.GetBlock(height).Header; Assert.Equal(expectedTarget, block.Bits); Target target = main.GetWorkRequired(this.network, height); Assert.Equal(expectedTarget, target); } }
public void GetBlockCount_ReturnsHeightFromChainState() { var logger = new Mock <ILoggerFactory>(); var cache = new Mock <IBlockStoreCache>(); var chainState = new Mock <IChainState>(); ConcurrentChain chain = WalletTestsHelpers.GenerateChainWithHeight(3, Network.StratisTest); logger.Setup(l => l.CreateLogger(It.IsAny <string>())).Returns(Mock.Of <ILogger>); chainState.Setup(c => c.ConsensusTip) .Returns(chain.GetBlock(2)); var controller = new BlockStoreController(logger.Object, cache.Object, chainState.Object); var json = (JsonResult)controller.GetBlockCount(); int result = int.Parse(json.Value.ToString()); Assert.Equal(2, result); }
public async Task ReorgedBlocksAreNotSavedAsync() { this.repositoryBlockHash = this.chain.Genesis.HashBlock; await this.blockStoreQueue.InitializeAsync().ConfigureAwait(false); int reorgedChainLenght = 3; int realChainLenght = 6; // First present a short chain. ConcurrentChain alternativeChain = this.CreateChain(reorgedChainLenght); for (int i = 1; i < alternativeChain.Height; i++) { Block block = new Block(); block.GetSerializedSize(); this.blockStoreQueue.AddToPending(new BlockPair(block, alternativeChain.GetBlock(i))); } // Present second chain which has more work and reorgs blocks from genesis. for (int i = 1; i < realChainLenght; i++) { Block block = new Block(); block.GetSerializedSize(); this.blockStoreQueue.AddToPending(new BlockPair(block, this.chain.GetBlock(i))); } await this.WaitUntilQueueIsEmptyAsync().ConfigureAwait(false); Assert.Equal(this.chainState.BlockStoreTip, this.chain.Genesis); Assert.Equal(0, this.repositorySavesCount); // Dispose block store to trigger save. this.nodeLifetime.StopApplication(); this.blockStoreQueue.Dispose(); // Make sure that blocks only from 2nd chain were saved. Assert.Equal(this.chain.GetBlock(realChainLenght - 1), this.chainState.BlockStoreTip); Assert.Equal(1, this.repositorySavesCount); Assert.Equal(realChainLenght - 1, this.repositoryTotalBlocksSaved); }
public void CanCalculateDifficulty() { var histories = File.ReadAllLines(TestDataLocations.DataFolder(@"targethistory.csv")); var store = new BlockStore(TestDataLocations.BlockFolderLocation, Network.Main); // todo: load the chain with a header only file ConcurrentChain chain = store.GetChain(); foreach (var history in histories) { var height = int.Parse(history.Split(',')[0]); var expectedTarget = new Target(new BigInteger(history.Split(',')[1].Trim(), 10)); var block = chain.GetBlock(height).Header; Assert.Equal(expectedTarget, block.Bits); var target = chain.GetWorkRequired(Network.Main, height); Assert.Equal(expectedTarget, target); } }
public void SyncIndexer() { _Tester.Indexer.IndexChain(_Chain); var walletRules = _Tester.Client.GetAllWalletRules(); foreach (var b in _UnsyncBlocks) { var height = _Chain.GetBlock(b.GetHash()).Height; _Tester.Indexer.IndexOrderedBalance(height, b); foreach (var tx in b.Transactions) { _Tester.Indexer.Index(new[] { new TransactionEntry.Entity(tx.GetHash(), tx, b.GetHash()) }); } if (walletRules.Count() != 0) { _Tester.Indexer.IndexWalletOrderedBalance(height, b, walletRules); } } _UnsyncBlocks.Clear(); }
private TransactionResult ToTransactionResult(bool includeTransaction, ConcurrentChain chain, Repository.SavedTransaction[] result) { var noDate = NBitcoin.Utils.UnixTimeToDateTime(0); var oldest = result .Where(o => o.Timestamp != noDate) .OrderBy(o => o.Timestamp).FirstOrDefault() ?? result.First(); var confBlock = result .Where(r => r.BlockHash != null) .Select(r => chain.GetBlock(r.BlockHash)) .Where(r => r != null) .FirstOrDefault(); var conf = confBlock == null ? 0 : chain.Tip.Height - confBlock.Height + 1; return(new TransactionResult() { Confirmations = conf, BlockId = confBlock?.HashBlock, Transaction = includeTransaction ? oldest.Transaction : null, Height = confBlock?.Height, Timestamp = oldest.Timestamp }); }
public void CanCalculateDifficulty() { var histories = File.ReadAllLines("data/targethistory.csv"); var store = new BlockStore(@"download\blocks", Network.Main); // todo: load the chain with a heder only file ConcurrentChain chain = store.GetChain(); foreach (var history in histories) { var height = int.Parse(history.Split(',')[0]); var expectedTarget = new Target(BigInteger.Parse(history.Split(',')[1])); var block = chain.GetBlock(height).Header; Assert.Equal(expectedTarget, block.Bits); var target = chain.GetWorkRequired(Network.Main, height); Assert.Equal(expectedTarget, target); } }
public void CanLoadAndSaveConcurrentChain() { ConcurrentChain cchain = new ConcurrentChain(); ConcurrentChain chain = new ConcurrentChain(Network.Main); AddBlock(chain); AddBlock(chain); AddBlock(chain); cchain.SetTip(chain); var bytes = cchain.ToBytes(); cchain = new ConcurrentChain(); cchain.Load(bytes); Assert.Equal(cchain.Tip, chain.Tip); Assert.NotNull(cchain.GetBlock(0)); cchain = new ConcurrentChain(Network.TestNet); cchain.Load(cchain.ToBytes()); Assert.NotNull(cchain.GetBlock(0)); }
public void ProcessBlock_NewBlock_BlockNotOnBestChain_ReOrgWalletManagerUsingBlockStoreCache() { (ConcurrentChain LeftChain, ConcurrentChain RightChain, List <Block> LeftForkBlocks, List <Block> RightForkBlocks)result = WalletTestsHelpers.GenerateForkedChainAndBlocksWithHeight(5, KnownNetworks.StratisMain, 2); // left side chain containing the 'old' fork. ConcurrentChain leftChain = result.LeftChain; // right side chain containing the 'new' fork. Work on this. this.chain = result.RightChain; var walletSyncManager = new WalletSyncManagerOverride(this.LoggerFactory.Object, this.walletManager.Object, this.chain, KnownNetworks.StratisMain, this.blockStore.Object, this.storeSettings, this.signals); // setup blockstore to return blocks on the chain. this.blockStore.Setup(b => b.GetBlockAsync(It.IsAny <uint256>())) .ReturnsAsync((uint256 hashblock) => { return(result.LeftForkBlocks.Union(result.RightForkBlocks).Single(b => b.GetHash() == hashblock)); }); // set 4th block of the old chain as tip. 2 ahead of the fork thus not being on the right chain. walletSyncManager.SetWalletTip(leftChain.GetBlock(result.LeftForkBlocks[3].Header.GetHash())); //process 5th block from the right side of the fork in the list does not have same prevhash as which is loaded. Block blockToProcess = result.RightForkBlocks[4]; blockToProcess.SetPrivatePropertyValue("BlockSize", 1L); walletSyncManager.ProcessBlock(blockToProcess); this.AssertTipBlockHash(walletSyncManager, 5); // walletmanager removes all blocks up to the fork. this.walletManager.Verify(w => w.RemoveBlocks(ExpectChainedBlock(this.chain.GetBlock(2)))); //verify manager processes each missing block until caught up. // height 3 this.walletManager.Verify(w => w.ProcessBlock(ExpectBlock(result.RightForkBlocks[2]), ExpectChainedBlock(this.chain.GetBlock(3)))); // height 4 this.walletManager.Verify(w => w.ProcessBlock(ExpectBlock(result.RightForkBlocks[3]), ExpectChainedBlock(this.chain.GetBlock(4)))); // height 5 this.walletManager.Verify(w => w.ProcessBlock(ExpectBlock(result.RightForkBlocks[4]), ExpectChainedBlock(this.chain.GetBlock(5))), Times.Exactly(2)); }
public void CanLoadAndSaveConcurrentChain() { var cchain = new ConcurrentChain(this.network); var chain = new ConcurrentChain(this.network); this.AddBlock(chain); this.AddBlock(chain); this.AddBlock(chain); cchain.SetTip(chain); byte[] bytes = cchain.ToBytes(); cchain = new ConcurrentChain(this.network); cchain.Load(bytes); Assert.Equal(cchain.Tip, chain.Tip); Assert.NotNull(cchain.GetBlock(0)); cchain = new ConcurrentChain(this.networkTest); cchain.Load(cchain.ToBytes()); Assert.NotNull(cchain.GetBlock(0)); }
private ChainedBlock AddBlock(ConcurrentChain chain) { BlockHeader header = new BlockHeader(); header.Nonce = RandomUtils.GetUInt32(); header.HashPrevBlock = chain.Tip.HashBlock; chain.SetTip(header); return chain.GetBlock(header.GetHash()); }
public void CanCalculateDifficulty() { var main = new ConcurrentChain(LoadMainChain()); var histories = File.ReadAllText("data/targethistory.csv").Split(new string[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries); foreach(var history in histories) { var height = int.Parse(history.Split(',')[0]); var expectedTarget = new Target(BigInteger.Parse(history.Split(',')[1])); var block = main.GetBlock(height).Header; Assert.Equal(expectedTarget, block.Bits); var target = main.GetWorkRequired(Network.Main, height); Assert.Equal(expectedTarget, target); } }
public void CanForkBackward() { ConcurrentChain chain = new ConcurrentChain(Network.Main); AppendBlock(chain); AppendBlock(chain); var fork = AppendBlock(chain); //Test single block back fork var last = AppendBlock(chain); Assert.Equal(4, chain.Height); Assert.Equal(4, last.Height); Assert.Equal(last.HashBlock, chain.Tip.HashBlock); Assert.Equal(fork.HashBlock, chain.SetTip(fork).HashBlock); Assert.Equal(3, chain.Height); Assert.Equal(3, fork.Height); Assert.Equal(fork.HashBlock, chain.Tip.HashBlock); Assert.Null(chain.GetBlock(last.HashBlock)); Assert.NotNull(chain.GetBlock(fork.HashBlock)); //Test 3 blocks back fork var b1 = AppendBlock(chain); var b2 = AppendBlock(chain); last = AppendBlock(chain); Assert.Equal(6, chain.Height); Assert.Equal(6, last.Height); Assert.Equal(last.HashBlock, chain.Tip.HashBlock); Assert.Equal(fork.HashBlock, chain.SetTip(fork).HashBlock); Assert.Equal(3, chain.Height); Assert.Equal(3, fork.Height); Assert.Equal(fork.HashBlock, chain.Tip.HashBlock); Assert.Null(chain.GetBlock(last.HashBlock)); Assert.Null(chain.GetBlock(b1.HashBlock)); Assert.Null(chain.GetBlock(b2.HashBlock)); chain.SetTip(last); Assert.Equal(6, chain.Height); Assert.Equal(6, last.Height); Assert.Equal(last.HashBlock, chain.Tip.HashBlock); }
public void CanForkSide() { ConcurrentChain side = new ConcurrentChain(Network.Main); ConcurrentChain main = new ConcurrentChain(Network.Main); AppendBlock(side, main); AppendBlock(side, main); var common = AppendBlock(side, main); var sideb = AppendBlock(side); var mainb1 = AppendBlock(main); var mainb2 = AppendBlock(main); var mainb3 = AppendBlock(main); Assert.Equal(common.HashBlock, side.SetTip(main.Tip).HashBlock); Assert.NotNull(side.GetBlock(mainb1.HashBlock)); Assert.NotNull(side.GetBlock(mainb2.HashBlock)); Assert.NotNull(side.GetBlock(mainb3.HashBlock)); Assert.NotNull(side.GetBlock(common.HashBlock)); Assert.Null(side.GetBlock(sideb.HashBlock)); Assert.Equal(common.HashBlock, side.SetTip(sideb).HashBlock); Assert.Null(side.GetBlock(mainb1.HashBlock)); Assert.Null(side.GetBlock(mainb2.HashBlock)); Assert.Null(side.GetBlock(mainb3.HashBlock)); Assert.NotNull(side.GetBlock(sideb.HashBlock)); }
public void CanForkSidePartialChain() { var genesis = TestUtils.CreateFakeBlock(); ConcurrentChain side = new ConcurrentChain(genesis.Header); ConcurrentChain main = new ConcurrentChain(genesis.Header); AppendBlock(side, main); AppendBlock(side, main); var common = AppendBlock(side, main); var sideb = AppendBlock(side); var mainb1 = AppendBlock(main); var mainb2 = AppendBlock(main); var mainb3 = AppendBlock(main); Assert.Equal(common.HashBlock, side.SetTip(main.Tip).HashBlock); Assert.NotNull(side.GetBlock(mainb1.HashBlock)); Assert.NotNull(side.GetBlock(mainb2.HashBlock)); Assert.NotNull(side.GetBlock(mainb3.HashBlock)); Assert.NotNull(side.GetBlock(common.HashBlock)); Assert.Null(side.GetBlock(sideb.HashBlock)); Assert.Equal(common.HashBlock, side.SetTip(sideb).HashBlock); Assert.Null(side.GetBlock(mainb1.HashBlock)); Assert.Null(side.GetBlock(mainb2.HashBlock)); Assert.Null(side.GetBlock(mainb3.HashBlock)); Assert.NotNull(side.GetBlock(sideb.HashBlock)); }
public void CanBuildConcurrentChain() { ConcurrentChain cchain = new ConcurrentChain(); ConcurrentChain chain = new ConcurrentChain(Network.Main); Assert.Null(cchain.SetTip(chain.Tip)); var b0 = cchain.Tip; Assert.Equal(cchain.Tip, chain.Tip); var b1 = AddBlock(chain); var b2 = AddBlock(chain); AddBlock(chain); AddBlock(chain); var b5 = AddBlock(chain); Assert.Equal(cchain.SetTip(chain.Tip), b0); Assert.Equal(cchain.Tip, chain.Tip); Assert.Equal(cchain.GetBlock(5), chain.Tip); Assert.Equal(cchain.GetBlock(b5.HashBlock), chain.Tip); Assert.Equal(cchain.SetTip(b1), b1); Assert.Equal(cchain.GetBlock(b5.HashBlock), null); Assert.Equal(cchain.GetBlock(b2.HashBlock), null); Assert.Equal(cchain.SetTip(b5), b1); Assert.Equal(cchain.GetBlock(b5.HashBlock), chain.Tip); chain.SetTip(b2); AddBlock(chain); AddBlock(chain); var b5b = AddBlock(chain); var b6b = AddBlock(chain); Assert.Equal(cchain.SetTip(b6b), b2); Assert.Equal(cchain.GetBlock(b5.HashBlock), null); Assert.Equal(cchain.GetBlock(b2.HashBlock), b2); Assert.Equal(cchain.GetBlock(6), b6b); Assert.Equal(cchain.GetBlock(5), b5b); }
/// <summary> /// Remove old spent & confirmed TrackedOutpoint, old unconf operations, and old forked operations /// </summary> /// <param name="chain"></param> internal List<object> Prune(ConcurrentChain chain, int blockExpiration = 2000, TimeSpan? timeExpiration = null) { List<object> removed = new List<object>(); timeExpiration = timeExpiration ?? TimeSpan.FromDays(7.0); foreach(var op in _Operations) { if(op.Value.BlockId != null) { var chained = chain.GetBlock(op.Value.BlockId); var isForked = chained == null; if(!isForked) { bool isOldConfirmed = chain.Height - chained.Height + 1 > blockExpiration; if(isOldConfirmed) { foreach(var spent in op.Value.SpentCoins) //Stop tracking the outpoints { TrackedOutpoint unused; if(_TrackedOutpoints.TryRemove(TrackedOutpoint.GetId(spent.Item1.Outpoint), out unused)) removed.Add(unused); } } } else { var isOldFork = chain.Height - op.Value.Height + 1 > blockExpiration; if(isOldFork) //clear any operation belonging to an old fork { Operation unused; if(_Operations.TryRemove(op.Key, out unused)) removed.Add(unused); } } } else { var isOldUnconf = (DateTimeOffset.UtcNow - op.Value.AddedDate) > timeExpiration; if(isOldUnconf) //clear any old unconfirmed { Operation unused; if(_Operations.TryRemove(op.Key, out unused)) removed.Add(unused); } } } return removed; }