public override void ForkBlockIndexes( Guid sourceChainId, Guid destinationChainId, BlockHash branchpoint) { LiteCollection <HashDoc> srcColl = IndexCollection(sourceChainId); if (!srcColl.Exists(_ => true)) { throw new ChainIdNotFoundException( sourceChainId, $"No such chain ID: {sourceChainId}." ); } LiteCollection <HashDoc> destColl = IndexCollection(destinationChainId); BlockHash?genesisHash = IterateIndexes(sourceChainId, 0, 1) .Cast <BlockHash?>() .FirstOrDefault(); if (genesisHash is null || branchpoint.Equals(genesisHash)) { return; } destColl.Delete(Query.All()); destColl.InsertBulk(srcColl.FindAll().TakeWhile(i => !i.Hash.Equals(branchpoint))); AppendIndex(destinationChainId, branchpoint); }
public bool Equals(Eth1Data?other) { return(!(other is null) && BlockHash.Equals(other.BlockHash) && DepositCount == other.DepositCount && DepositRoot.Equals(other.DepositRoot)); }
public void ExistsBlockState(bool secure) { var stateStore = MakeTrieStateStoreFixture(secure); BlockHash randomBlockHash; do { randomBlockHash = new BlockHash(TestUtils.GetRandomBytes(32)); }while (randomBlockHash.Equals(_fx.GenesisBlock.Hash)); Assert.False(stateStore.ContainsBlockStates(randomBlockHash)); Assert.True(stateStore.ContainsBlockStates(_fx.GenesisBlock.Hash)); }
/// <inheritdoc cref="BaseStore.ForkBlockIndexes(Guid, Guid, BlockHash)"/> public override void ForkBlockIndexes( Guid sourceChainId, Guid destinationChainId, BlockHash branchpoint ) { BlockHash?genesisHash = IterateIndexes(sourceChainId, 0, 1).FirstOrDefault(); if (genesisHash is null || branchpoint.Equals(genesisHash)) { return; } ColumnFamilyHandle cf = GetColumnFamily(_chainDb, destinationChainId); var writeBatch = new WriteBatch(); long index = 0; try { foreach (Iterator it in IterateDb(_chainDb, IndexKeyPrefix, sourceChainId)) { byte[] hashBytes = it.Value(); writeBatch.Put(it.Key(), hashBytes, cf); index += 1; if (writeBatch.Count() >= ForkWriteBatchSize) { _chainDb.Write(writeBatch); writeBatch.Dispose(); writeBatch = new WriteBatch(); } if (branchpoint.ByteArray.SequenceEqual(hashBytes)) { break; } } } finally { _chainDb.Write(writeBatch); writeBatch.Dispose(); } _chainDb.Put( IndexCountKey, RocksDBStoreBitConverter.GetBytes(index), cf ); }
/// <inheritdoc cref="BaseStore.ForkBlockIndexes(Guid, Guid, BlockHash)"/> public override void ForkBlockIndexes( Guid sourceChainId, Guid destinationChainId, BlockHash branchpoint ) { BlockHash?genesisHash = IterateIndexes(sourceChainId, 0, 1).FirstOrDefault(); if (genesisHash is null || branchpoint.Equals(genesisHash)) { return; } ColumnFamilyHandle srcCf = GetColumnFamily(_chainDb, sourceChainId); ColumnFamilyHandle destCf = GetColumnFamily(_chainDb, destinationChainId); foreach (Iterator k in IterateDb(_chainDb, IndexKeyPrefix, destinationChainId)) { _chainDb.Remove(k.Key(), destCf); } long bpIndex = GetBlockIndex(branchpoint).Value; if (GetPreviousChainInfo(srcCf) is { } chainInfo&& chainInfo.Item2 == bpIndex) { ForkBlockIndexes(chainInfo.Item1, destinationChainId, branchpoint); return; } _chainDb.Put(PreviousChainIdKey, sourceChainId.ToByteArray(), destCf); _chainDb.Put( PreviousChainIndexKey, RocksDBStoreBitConverter.GetBytes(bpIndex), destCf ); _chainDb.Put( IndexCountKey, RocksDBStoreBitConverter.GetBytes(bpIndex + 1), destCf ); AddFork(srcCf, destinationChainId); }
/// <inheritdoc cref="BaseStore.ForkBlockIndexes(Guid, Guid, BlockHash)"/> public override void ForkBlockIndexes( Guid sourceChainId, Guid destinationChainId, BlockHash branchpoint) { LiteCollection <HashDoc> srcColl = IndexCollection(sourceChainId); LiteCollection <HashDoc> destColl = IndexCollection(destinationChainId); BlockHash?genesisHash = IterateIndexes(sourceChainId, 0, 1) .Cast <BlockHash?>() .FirstOrDefault(); if (genesisHash is null || branchpoint.Equals(genesisHash)) { return; } destColl.InsertBulk(srcColl.FindAll() .TakeWhile(i => !i.Hash.Equals(branchpoint)).Skip(1)); AppendIndex(destinationChainId, branchpoint); }
/// <summary> /// Recalculates and complements all <i>missing</i> block states including and upto given /// <paramref name="blockHash"/> starting from the genesis block. /// </summary> /// <param name="blockHash">The inclusive limit of target hash to terminate complementation. /// </param> /// <remarks> /// <para> /// If a complementation of the entire blockchain is needed, call with the tip hash of the /// <see cref="BlockChain{T}"/>. /// </para> /// <para> /// Unlike <see cref="RecalculateBlockStates"/>, this method skips recalculations if states /// are found for intermediate blocks. This may not be fully secure if states for /// blocks in <see cref="IStateStore"/> are somehow corrupted. /// </para> /// </remarks> internal void ComplementAllBlockStates(BlockHash blockHash) { _logger.Verbose("Complementing all block states upto {BlockHash}...", blockHash); // Prevent recursive trial to recalculate & complement incomplete block states by // mistake; if the below code works as intended, these state completers must never // be invoked. StateCompleterSet <T> stateCompleters = StateCompleterSet <T> .Reject; // Calculates and fills the incomplete states on the fly. foreach (BlockHash hash in BlockHashes) { Block <T> block = this[hash]; if (StateStore.ContainsStateRoot(block.StateRootHash)) { continue; } IReadOnlyList <ActionEvaluation> evaluations = ActionEvaluator.Evaluate( block, stateCompleters); _rwlock.EnterWriteLock(); try { SetStates(block, evaluations); } finally { _rwlock.ExitWriteLock(); } if (blockHash.Equals(hash)) { break; } } }