/// <exception cref="BlockStoreException"/> private void CreateNewStore(NetworkParameters networkParams, FileInfo file) { // Create a new block store if the file wasn't found or anything went wrong whilst reading. _blockMap.Clear(); try { if (_stream != null) { _stream.Dispose(); } _stream = file.OpenWrite(); // Do not append, create fresh. _stream.Write(1); // Version. } catch (IOException e1) { // We could not load a block store nor could we create a new one! throw new BlockStoreException(e1); } try { // Set up the genesis block. When we start out fresh, it is by definition the top of the chain. var genesis = networkParams.GenesisBlock.CloneAsHeader(); var storedGenesis = new StoredBlock(genesis, genesis.GetWork(), 0); _chainHead = storedGenesis.Header.Hash; _stream.Write(_chainHead.Bytes); Put(storedGenesis); } catch (IOException e) { throw new BlockStoreException(e); } }
public void CanReIndex() { var source = new BlockStore(TestDataLocations.DataFolder(@"blocks"), Network.StratisMain); BlockStore store = CreateBlockStore("CanReIndexFolder"); store.AppendAll(source.Enumerate(false).Take(100).Select(b => b.Item)); var test = new IndexedBlockStore(new InMemoryNoSqlRepository(), store); var reIndexed = test.ReIndex(); Assert.Equal(100, reIndexed); int i = 0; foreach (StoredBlock b in store.Enumerate(true)) { Block result = test.Get(b.Item.GetHash()); Assert.Equal(result.GetHash(), b.Item.GetHash()); i++; } Assert.Equal(100, i); StoredBlock last = source.Enumerate(false).Skip(100).FirstOrDefault(); store.Append(last.Item); reIndexed = test.ReIndex(); Assert.Equal(1, reIndexed); reIndexed = test.ReIndex(); Assert.Equal(0, reIndexed); }
/// <exception cref="BlockStoreException"/> public void Put(StoredBlock block) { lock (this) { var hash = block.Header.Hash; _blockMap[hash] = block; } }
public void AddBlock(StoredBlock block) { JoinCurrentTransaction(); using (BlockchainRepository repo = new BlockchainRepository(conn)) { repo.InsertBlock(block); } }
protected ABlockStore(NetworkParameters parameters) { // Insert the genesis block. var genesisHeader = parameters.GenesisBlock.CloneAsHeader(); StoredBlock storedGenesis = new StoredBlock(genesisHeader, genesisHeader.GetWork(), 0); Put(storedGenesis); ChainHead = storedGenesis; }
public void CanValidateBlocks() { foreach (var block in StoredBlock.EnumerateFolder(@"data\blocks")) { ValidationState validation = Network.Main.CreateValidationState(); validation.Now = block.Item.Header.BlockTime; Assert.True(validation.CheckBlock(block.Item)); } }
public MemoryBlockStore(NetworkParameters @params) { _blockMap = new Dictionary<Sha256Hash, StoredBlock>(); // Insert the genesis block. var genesisHeader = @params.GenesisBlock.CloneAsHeader(); var storedGenesis = new StoredBlock(genesisHeader, genesisHeader.GetWork(), 0); Put(storedGenesis); SetChainHead(storedGenesis); }
public static SpentOutput Create(StoredBlock block, UnspentOutput unspentOutput) { return new SpentOutput( unspentOutput.SourceBlockHeight, block.Height, unspentOutput.TransactionHash, unspentOutput.OutputNumber, unspentOutput.Sum, unspentOutput.PublicScript); }
public MemoryBlockStore(NetworkParameters @params) { _blockMap = new Dictionary <Sha256Hash, StoredBlock>(); // Insert the genesis block. var genesisHeader = @params.GenesisBlock.CloneAsHeader(); var storedGenesis = new StoredBlock(genesisHeader, genesisHeader.GetWork(), 0); Put(storedGenesis); SetChainHead(storedGenesis); }
public void CanEnumerateBlockCountRange() { var store = new BlockStore(TestDataLocations.DataFolder(@"blocks"), Network.StratisMain); StoredBlock expectedBlock = store.Enumerate(false).Skip(4).First(); StoredBlock[] actualBlocks = store.Enumerate(false, 4, 2).ToArray(); Assert.Equal(2, actualBlocks.Length); Assert.Equal(expectedBlock.Item.Header.GetHash(), actualBlocks[0].Item.Header.GetHash()); Assert.True(actualBlocks[0].Item.CheckMerkleRoot()); }
public void CanReadStoredBlockFolder() { var blk0 = StoredBlock.EnumerateFile(@"data\blocks\blk00000.dat", (uint)0).ToList(); var blk1 = StoredBlock.EnumerateFile(@"data\blocks\blk00001.dat", (uint)1).ToList(); int count = 0; foreach (var stored in StoredBlock.EnumerateFolder(@"data\blocks")) { if (count == 0) { Assert.Equal(blk0[0].Item.GetHash(), stored.Item.GetHash()); } if (count == 300) { Assert.Equal(blk1[0].Item.GetHash(), stored.Item.GetHash()); } Assert.True(stored.Item.Header.CheckProofOfWork()); Assert.True(stored.Item.CheckMerkleRoot()); count++; } Assert.Equal(600, count); count = 0; foreach (var stored in StoredBlock.EnumerateFolder(@"data\blocks", new DiskBlockPosRange(blk1[298].BlockPosition))) { count++; } Assert.Equal(2, count); count = 0; foreach (var stored in StoredBlock.EnumerateFolder(@"data\blocks", new DiskBlockPosRange(blk0[298].BlockPosition))) { count++; } Assert.Equal(302, count); count = 0; foreach (var stored in StoredBlock.EnumerateFolder(@"data\blocks", new DiskBlockPosRange(blk0[298].BlockPosition, blk1[2].BlockPosition))) { count++; } Assert.Equal(4, count); count = 0; foreach (var stored in StoredBlock.EnumerateFolder(@"data\blocks", new DiskBlockPosRange(blk0[30].BlockPosition, blk0[34].BlockPosition))) { count++; } Assert.Equal(4, count); }
public void CanStoreInBlockRepository() { BlockRepository blockRepository = CreateBlockRepository(); StoredBlock firstblk1 = StoredBlock.EnumerateFile(TestDataLocations.DataBlockFolder("blk0001.dat"), network: Network.StratisMain).First(); blockRepository.WriteBlockHeader(firstblk1.Item.Header); Block result = blockRepository.GetBlock(firstblk1.Item.GetHash()); Assert.True(result.HeaderOnly); blockRepository.WriteBlock(firstblk1.Item); result = blockRepository.GetBlock(firstblk1.Item.GetHash()); Assert.False(result.HeaderOnly); }
public void CanReadStoredBlockFolder() { var blk0 = StoredBlock.EnumerateFile(TestDataLocations.DataBlockFolder("blk0001.dat"), (uint)1, network: Network.StratisMain).ToList(); var blk1 = StoredBlock.EnumerateFile(TestDataLocations.DataBlockFolder("blk0002.dat"), (uint)2, network: Network.StratisMain).ToList(); int count = 0; foreach (var stored in StoredBlock.EnumerateFolder(TestDataLocations.DataFolder(@"blocks"), network: Network.StratisMain)) { if (count == 0) { Assert.Equal(blk0[0].Item.GetHash(), stored.Item.GetHash()); } if (count == 2000) { Assert.Equal(blk1[0].Item.GetHash(), stored.Item.GetHash()); } Assert.True(stored.Item.Check()); count++; } Assert.Equal(4000, count); count = 0; foreach (var stored in StoredBlock.EnumerateFolder(TestDataLocations.DataFolder(@"blocks"), new DiskBlockPosRange(blk1[1998].BlockPosition), network: Network.StratisMain)) { count++; } Assert.Equal(2, count); count = 0; foreach (var stored in StoredBlock.EnumerateFolder(TestDataLocations.DataFolder(@"blocks"), new DiskBlockPosRange(blk0[1998].BlockPosition), network: Network.StratisMain)) { count++; } Assert.Equal(2002, count); count = 0; foreach (var stored in StoredBlock.EnumerateFolder(TestDataLocations.DataFolder(@"blocks"), new DiskBlockPosRange(blk0[1998].BlockPosition, blk1[2].BlockPosition), network: Network.StratisMain)) { count++; } Assert.Equal(4, count); count = 0; foreach (var stored in StoredBlock.EnumerateFolder(TestDataLocations.DataFolder(@"blocks"), new DiskBlockPosRange(blk0[30].BlockPosition, blk0[34].BlockPosition), network: Network.StratisMain)) { count++; } Assert.Equal(4, count); }
public void CanStoreInBlockRepository() { var blockRepository = CreateBlockRepository(); var firstblk1 = StoredBlock.EnumerateFile(@"data\blocks\blk00000.dat").First(); blockRepository.WriteBlockHeader(firstblk1.Item.Header); var result = blockRepository.GetBlock(firstblk1.Item.GetHash()); Assert.True(result.HeaderOnly); blockRepository.WriteBlock(firstblk1.Item); result = blockRepository.GetBlock(firstblk1.Item.GetHash()); Assert.False(result.HeaderOnly); }
/// <exception cref="BlockStoreException"/> public StoredBlock Get(Sha256Hash hash) { lock (this) { // Check the memory cache first. StoredBlock fromMem; if (_blockCache.TryGetValue(hash, out fromMem)) { return(fromMem); } if (_notFoundCache.TryGetValue(hash, out fromMem) && (fromMem == _notFoundMarker)) { return(null); } try { var fromDisk = GetRecord(hash); StoredBlock block = null; if (fromDisk == null) { _notFoundCache[hash] = _notFoundMarker; while (_notFoundCache.Count > 2050) { _notFoundCache.RemoveAt(0); } } else { block = fromDisk.ToStoredBlock(_params); _blockCache[hash] = block; while (_blockCache.Count > 2050) { _blockCache.RemoveAt(0); } } return(block); } catch (IOException e) { throw new BlockStoreException(e); } catch (ProtocolException e) { throw new BlockStoreException(e); } } }
public void CanIndexBlock() { var index = CreateIndexedStore(); foreach (var block in StoredBlock.EnumerateFile(@"data\blocks\blk0001.dat").Take(50)) { index.Put(block.Item); } var genesis = index.Get(uint256.Parse("0x0000066e91e46e5a264d42c89e1204963b2ee6be230b443e9159020539d972af")); Assert.NotNull(genesis); var invalidBlock = index.Get(uint256.Parse("0x0000066e91e46e5a264d42c89e1204963b2ee6be230b443e9159020539d972ae")); Assert.Null(invalidBlock); }
public void CanIndexBlock() { var index = CreateIndexedStore(); foreach (var block in StoredBlock.EnumerateFile(@"data\blocks\blk00000.dat").Take(50)) { index.Put(block.Item); } var genesis = index.Get(uint256.Parse("000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f")); Assert.NotNull(genesis); var invalidBlock = index.Get(uint256.Parse("000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26e")); Assert.Null(invalidBlock); }
public void CanIndexBlock() { var index = CreateIndexedStore(); foreach (var block in StoredBlock.EnumerateFile(TestDataLocations.DataBlockFolder("blk0001.dat"), network: Network.StratisMain).Take(50)) { index.Put(block.Item); } var genesis = index.Get(uint256.Parse("0x0000066e91e46e5a264d42c89e1204963b2ee6be230b443e9159020539d972af")); Assert.NotNull(genesis); var invalidBlock = index.Get(uint256.Parse("0x0000066e91e46e5a264d42c89e1204963b2ee6be230b443e9159020539d972ae")); Assert.Null(invalidBlock); }
public void CanStoreBlocksInMultipleFiles() { var store = CreateBlockStore(); store.MaxFileSize = 10; //Verify break all block in one respective file with extreme settings var allBlocks = StoredBlock.EnumerateFile(@"data\blocks\blk00000.dat").Take(10).ToList(); foreach (var s in allBlocks) { store.Append(s.Item); } var storedBlocks = store.Enumerate(true).ToList(); Assert.Equal(allBlocks.Count, storedBlocks.Count); Assert.Equal(11, store.Folder.GetFiles().Length); //10 files + lock file }
/// <exception cref="BlockStoreException"/> public void SetChainHead(StoredBlock chainHead) { lock (this) { try { _chainHead = chainHead.Header.Hash; // Write out new hash to the first 32 bytes of the file past one (first byte is version number). _stream.Seek(1, SeekOrigin.Begin); var bytes = _chainHead.Bytes; _stream.Write(bytes, 0, bytes.Length); } catch (IOException e) { throw new BlockStoreException(e); } } }
/// <exception cref="BlockStoreException"/> private void CreateNewStore(NetworkParameters @params, FileInfo file) { // Create a new block store if the file wasn't found or anything went wrong whilst reading. _blockCache.Clear(); try { if (_channel != null) { _channel.Dispose(); _channel = null; } if (file.Exists) { try { file.Delete(); } catch (IOException) { throw new BlockStoreException("Could not delete old store in order to recreate it"); } } _channel = file.Create(); // Create fresh. _channel.Write(_fileFormatVersion); } catch (IOException e1) { // We could not load a block store nor could we create a new one! throw new BlockStoreException(e1); } try { // Set up the genesis block. When we start out fresh, it is by definition the top of the chain. var genesis = @params.GenesisBlock.CloneAsHeader(); var storedGenesis = new StoredBlock(genesis, genesis.GetWork(), 0); _chainHead = storedGenesis.Header.Hash; _channel.Write(_chainHead.Bytes); Put(storedGenesis); } catch (IOException e) { throw new BlockStoreException(e); } }
public void CanReadStoredBlockFile() { int count = 0; foreach (var stored in StoredBlock.EnumerateFile(@"data\blocks\blk0001.dat")) { Assert.True(stored.Item.Check()); count++; } Assert.Equal(2000, count); count = 0; var twoLast = StoredBlock.EnumerateFile(@"data\blocks\blk0001.dat").Skip(1998).ToList(); foreach (var stored in StoredBlock.EnumerateFile(@"data\blocks\blk0001.dat", range: new DiskBlockPosRange(twoLast[0].BlockPosition))) { count++; } Assert.Equal(2, count); }
public void CanReadStoredBlockFile() { int count = 0; foreach (var stored in StoredBlock.EnumerateFile(TestDataLocations.DataBlockFolder("blk0001.dat"), network: Network.StratisMain)) { Assert.True(stored.Item.Check()); count++; } Assert.Equal(2000, count); count = 0; var twoLast = StoredBlock.EnumerateFile(TestDataLocations.DataBlockFolder("blk0001.dat"), network: Network.StratisMain).Skip(1998).ToList(); foreach (var stored in StoredBlock.EnumerateFile(TestDataLocations.DataBlockFolder("blk0001.dat"), network: Network.StratisMain, range: new DiskBlockPosRange(twoLast[0].BlockPosition))) { count++; } Assert.Equal(2, count); }
public void CanStoreBlocksInMultipleFiles() { BlockStore store = CreateBlockStore(); store.MaxFileSize = 10; // Verify break all block in one respective file with extreme settings. var allBlocks = StoredBlock.EnumerateFile(TestDataLocations.DataBlockFolder("blk0001.dat"), network: Network.StratisMain).Take(10).ToList(); foreach (StoredBlock s in allBlocks) { store.Append(s.Item); } var storedBlocks = store.Enumerate(true).ToList(); Assert.Equal(allBlocks.Count, storedBlocks.Count); Assert.Equal(11, store.Folder.GetFiles().Length); //10 files + lock file }
public void CanStoreBlocks() { var store = CreateBlockStore(); var allBlocks = StoredBlock.EnumerateFile(TestDataLocations.DataBlockFolder("blk0001.dat")).Take(50).ToList(); foreach (var s in allBlocks) { store.Append(s.Item); } var storedBlocks = store.Enumerate(true).ToList(); Assert.Equal(allBlocks.Count, storedBlocks.Count); foreach (var s in allBlocks) { var retrieved = store.Enumerate(true).First(b => b.Item.GetHash() == s.Item.GetHash()); Assert.True(retrieved.Item.HeaderOnly); } }
public void CanStoreBlocks() { var store = CreateBlockStore(); var allBlocks = StoredBlock.EnumerateFile(@"data\blocks\blk00000.dat").Take(50).ToList(); foreach (var s in allBlocks) { store.Append(s.Item); } var storedBlocks = store.Enumerate(true).ToList(); Assert.Equal(allBlocks.Count, storedBlocks.Count); foreach (var s in allBlocks) { var retrieved = store.Enumerate(true).First(b => b.BlockPosition == s.BlockPosition); Assert.True(retrieved.Item.HeaderOnly); } }
public void CanReadStoredBlockFile() { int count = 0; foreach (var stored in StoredBlock.EnumerateFile(@"data\blocks\blk00000.dat")) { Assert.True(stored.Item.Header.CheckProofOfWork(Network.Main.Consensus)); Assert.True(stored.Item.CheckMerkleRoot()); count++; } Assert.Equal(300, count); count = 0; var twoLast = StoredBlock.EnumerateFile(@"data\blocks\blk00000.dat").Skip(298).ToList(); foreach (var stored in StoredBlock.EnumerateFile(@"data\blocks\blk00000.dat", range: new DiskBlockPosRange(twoLast[0].BlockPosition))) { count++; } Assert.Equal(2, count); }
/// <exception cref="BlockStoreException"/> public void Put(StoredBlock block) { lock (this) { try { var hash = block.Header.Hash; Debug.Assert(!_blockMap.ContainsKey(hash), "Attempt to insert duplicate"); // Append to the end of the file. The other fields in StoredBlock will be recalculated when it's reloaded. var bytes = block.Header.BitcoinSerialize(); _stream.Write(bytes); _stream.Flush(); _blockMap[hash] = block; } catch (IOException e) { throw new BlockStoreException(e); } } }
/// <exception cref="IOException"/> public static void Write(Stream channel, StoredBlock block) { using (var buf = ByteBuffer.Allocate(Size)) { buf.PutInt((int)block.Height); var chainWorkBytes = block.ChainWork.ToByteArray(); Debug.Assert(chainWorkBytes.Length <= _chainWorkBytes, "Ran out of space to store chain work!"); if (chainWorkBytes.Length < _chainWorkBytes) { // Pad to the right size. buf.Put(_emptyBytes, 0, _chainWorkBytes - chainWorkBytes.Length); } buf.Put(chainWorkBytes); buf.Put(block.Header.BitcoinSerialize()); buf.Position = 0; channel.Position = channel.Length; channel.Write(buf.ToArray()); channel.Position = channel.Length - Size; } }
/// <exception cref="BlockStoreException"/> public void SetChainHead(StoredBlock chainHead) { lock (this) { try { _chainHead = chainHead.Header.Hash; // Write out new hash to the first 32 bytes of the file past one (first byte is version number). var originalPos = _channel.Position; _channel.Position = 1; var bytes = _chainHead.Bytes; _channel.Write(bytes, 0, bytes.Length); _channel.Position = originalPos; } catch (IOException e) { throw new BlockStoreException(e); } } }
/// <exception cref="BlockStoreException"/> public void Put(StoredBlock block) { lock (this) { try { var hash = block.Header.Hash; // Append to the end of the file. Record.Write(_channel, block); _blockCache[hash] = block; while (_blockCache.Count > 2050) { _blockCache.RemoveAt(0); } } catch (IOException e) { throw new BlockStoreException(e); } } }
/// <exception cref="IOException"/> /// <exception cref="BlockStoreException"/> private void Load(FileInfo file) { _log.InfoFormat("Reading block store from {0}", file); using (var input = file.OpenRead()) { // Read a version byte. var version = input.Read(); if (version == -1) { // No such file or the file was empty. throw new FileNotFoundException(file.Name + " does not exist or is empty"); } if (version != 1) { throw new BlockStoreException("Bad version number: " + version); } // Chain head pointer is the first thing in the file. var chainHeadHash = new byte[32]; if (input.Read(chainHeadHash) < chainHeadHash.Length) throw new BlockStoreException("Truncated block store: cannot read chain head hash"); _chainHead = new Sha256Hash(chainHeadHash); _log.InfoFormat("Read chain head from disk: {0}", _chainHead); var now = Environment.TickCount; // Rest of file is raw block headers. var headerBytes = new byte[Block.HeaderSize]; try { while (true) { // Read a block from disk. if (input.Read(headerBytes) < 80) { // End of file. break; } // Parse it. var b = new Block(_params, headerBytes); // Look up the previous block it connects to. var prev = Get(b.PrevBlockHash); StoredBlock s; if (prev == null) { // First block in the stored chain has to be treated specially. if (b.Equals(_params.GenesisBlock)) { s = new StoredBlock(_params.GenesisBlock.CloneAsHeader(), _params.GenesisBlock.GetWork(), 0); } else { throw new BlockStoreException("Could not connect " + b.Hash + " to " + b.PrevBlockHash); } } else { // Don't try to verify the genesis block to avoid upsetting the unit tests. b.VerifyHeader(); // Calculate its height and total chain work. s = prev.Build(b); } // Save in memory. _blockMap[b.Hash] = s; } } catch (ProtocolException e) { // Corrupted file. throw new BlockStoreException(e); } catch (VerificationException e) { // Should not be able to happen unless the file contains bad blocks. throw new BlockStoreException(e); } var elapsed = Environment.TickCount - now; _log.InfoFormat("Block chain read complete in {0}ms", elapsed); } }
/// <exception cref="IOException"/> public static void Write(Stream channel, StoredBlock block) { using (var buf = ByteBuffer.Allocate(Size)) { buf.PutInt((int) block.Height); var chainWorkBytes = block.ChainWork.ToByteArray(); Debug.Assert(chainWorkBytes.Length <= _chainWorkBytes, "Ran out of space to store chain work!"); if (chainWorkBytes.Length < _chainWorkBytes) { // Pad to the right size. buf.Put(_emptyBytes, 0, _chainWorkBytes - chainWorkBytes.Length); } buf.Put(chainWorkBytes); buf.Put(block.Header.BitcoinSerialize()); buf.Position = 0; channel.Position = channel.Length; channel.Write(buf.ToArray()); channel.Position = channel.Length - Size; } }
//The last block is off by 1 byte + lots of padding zero at the end public void CanEnumerateIncompleteBlk() { Assert.Equal(301, StoredBlock.EnumerateFile(@"data\blocks\incompleteblk.dat").Count()); }
/// <exception cref="BlockStoreException"/> public void SetChainHead(StoredBlock chainHead) { _chainHead = chainHead; }
public void InsertBlock(StoredBlock block) { var command = commandCache.CreateCommand( "insert into Blocks(Header, Hash, Height, TotalWork, HasContent, IsInBestHeaderChain, IsInBestBlockChain)" + "values (@Header, @Hash, @Height, @TotalWork, @HasContent, @IsInBestHeaderChain, @IsInBestBlockChain)"); command.Parameters.Add("@Header", DbType.Binary).Value = BitcoinStreamWriter.GetBytes(block.Header.Write); command.Parameters.Add("@Hash", DbType.Binary).Value = block.Hash; command.Parameters.Add("@Height", DbType.Int32).Value = block.Height; command.Parameters.Add("@TotalWork", DbType.Double).Value = block.TotalWork; command.Parameters.Add("@HasContent", DbType.Boolean).Value = block.HasContent; command.Parameters.Add("@IsInBestHeaderChain", DbType.Boolean).Value = block.IsInBestHeaderChain; command.Parameters.Add("@IsInBestBlockChain", DbType.Boolean).Value = block.IsInBestBlockChain; command.ExecuteNonQuery(); //todo: block.Id = connection.LastInsertRowId; }
public void UpdateBlock(StoredBlock block) { //todo: Header is not updated. Describe it in XMLDOC? var command = CreateCommand( "update Blocks" + " set" + " Height=@Height, " + " TotalWork=@TotalWork, " + " HasContent=@HasContent, " + " IsInBestHeaderChain=@IsInBestHeaderChain, " + " IsInBestBlockChain=@IsInBestBlockChain" + " where Hash=@Hash"); command.Parameters.Add("@Hash", DbType.Binary).Value = block.Hash; command.Parameters.Add("@Height", DbType.Int32).Value = block.Height; command.Parameters.Add("@TotalWork", DbType.Double).Value = block.TotalWork; command.Parameters.Add("@HasContent", DbType.Boolean).Value = block.HasContent; command.Parameters.Add("@IsInBestHeaderChain", DbType.Boolean).Value = block.IsInBestHeaderChain; command.Parameters.Add("@IsInBestBlockChain", DbType.Boolean).Value = block.IsInBestBlockChain; command.ExecuteNonQuery(); }
// The last block is off by 1 byte + lots of padding zero at the end. public void CanEnumerateIncompleteBlk() { Assert.Equal(300, StoredBlock.EnumerateFile(TestDataLocations.DataBlockFolder("incompleteblk.dat"), network: Network.StratisMain).Count()); }
/// <exception cref="BlockStoreException"/> private void CreateNewStore(NetworkParameters @params, FileInfo file) { // Create a new block store if the file wasn't found or anything went wrong whilst reading. _blockMap.Clear(); try { if (_stream != null) { _stream.Dispose(); } _stream = file.OpenWrite(); // Do not append, create fresh. _stream.Write(1); // Version. } catch (IOException e1) { // We could not load a block store nor could we create a new one! throw new BlockStoreException(e1); } try { // Set up the genesis block. When we start out fresh, it is by definition the top of the chain. var genesis = @params.GenesisBlock.CloneAsHeader(); var storedGenesis = new StoredBlock(genesis, genesis.GetWork(), 0); _chainHead = storedGenesis.Header.Hash; _stream.Write(_chainHead.Bytes); Put(storedGenesis); } catch (IOException e) { throw new BlockStoreException(e); } }