public async Task EthHash_VerifyAsync_Should_Throw_On_Null_Argument() { using (var ethash = new Ethash(3)) { await Assert.ThrowsAsync <ArgumentNullException>(async() => await ethash.VerifyAsync(null)); } }
public void TestCacheGeneration() { // Generate a cache for our future block. BigInteger blockNum = 5650000; Memory <byte> cache = Ethash.MakeCache(blockNum); // Flatten the array and hash it. AssertSHA1("CBFD542DF1457676C766997504074B7FB126C05C", cache.Span); }
public void TestCacheGenerationSmall() { // Generate a cache for our future block. BigInteger blockNum = 2; Memory <byte> cache = Ethash.MakeCache(blockNum); // Flatten the array and hash it. AssertSHA1("780ff0a0259531c918d50c78bb52c291587c4ccd", cache.Span); }
[Fact(Skip = "Too intensive for build server")] //(TODO: Check this test periodically, but we don't include it since it runs for a long time). public void TestFullDataSet() { // Create our hash object and our cache BigInteger blockNum = 0; Memory <byte> cache = Ethash.MakeCache(blockNum); // Create our data set and flatten it to check the hash. byte[] dataSet = Ethash.CalculateDataset(cache, Ethash.GetDataSetSize(blockNum)); AssertSHA1("78cadf0b9653a3eaa8ba98a64d5fcb6b9450df49", dataSet); }
public async Task Ethhash_Verify_Invalid_Blocks() { var hasher = new Sha3_256(); var invalidBlocks = new[] { // totally nonsense block new Block { Height = 61440000, HashNoNonce = hasher.Digest(Encoding.UTF8.GetBytes("foo")), Difficulty = new BigInteger(0), Nonce = 0xcafebabec00000fe, MixDigest = hasher.Digest(Encoding.UTF8.GetBytes("bar")), }, new Block { // from proof of concept nine testnet, epoch 0 - altered Nonce Height = 22, HashNoNonce = "372eca2454ead349c3df0ab5d00b0b706b23e49d469387db91811cee0358fc6d".HexToByteArray(), Difficulty = new BigInteger(132416), Nonce = 0x495732e0ed7a801d, MixDigest = "2f74cdeb198af0b9abe65d22d372e22fb2d474371774a9583c1cc427a07939f5".HexToByteArray(), }, new Block { // from proof of concept nine testnet, epoch 0 - altered HashNoNonce Height = 22, HashNoNonce = "472eca2454ead349c3df0ab5d00b0b706b23e49d469387db91811cee0358fc6d".HexToByteArray(), Difficulty = new BigInteger(132416), Nonce = 0x495732e0ed7a801c, MixDigest = "2f74cdeb198af0b9abe65d22d372e22fb2d474371774a9583c1cc427a07939f5".HexToByteArray(), }, new Block { // from proof of concept nine testnet, epoch 0 - altered MixDigest Height = 22, HashNoNonce = "372eca2454ead349c3df0ab5d00b0b706b23e49d469387db91811cee0358fc6d".HexToByteArray(), Difficulty = new BigInteger(132416), Nonce = 0x495732e0ed7a801c, MixDigest = "3f74cdeb198af0b9abe65d22d372e22fb2d474371774a9583c1cc427a07939f5".HexToByteArray(), }, }; using (var ethash = new Ethash(3)) { Assert.False(await ethash.VerifyAsync(invalidBlocks[0])); Assert.False(await ethash.VerifyAsync(invalidBlocks[1])); Assert.False(await ethash.VerifyAsync(invalidBlocks[2])); Assert.False(await ethash.VerifyAsync(invalidBlocks[3])); } }
public void Test(EthashTest test) { BlockHeader blockHeader = Rlp.Decode <BlockHeader>(new Rlp(test.Header)); Assert.AreEqual(test.Nonce, blockHeader.Nonce, "header nonce vs test nonce"); Assert.AreEqual(test.MixHash.Bytes, blockHeader.MixHash.Bytes, "header mix hash vs test mix hash"); Keccak headerHash = Keccak.Compute(Rlp.Encode(blockHeader, RlpBehaviors.ForSealing).Bytes); Assert.AreEqual(test.HeaderHash, headerHash, "header hash"); // seed is correct Ethash ethash = new Ethash(NullLogManager.Instance); uint epoch = Ethash.GetEpoch(blockHeader.Number); Assert.AreEqual(test.Seed, Ethash.GetSeedHash(epoch), "seed"); uint cacheSize = Ethash.GetCacheSize(Ethash.GetEpoch(blockHeader.Number)); Assert.AreEqual((ulong)test.CacheSize, cacheSize, "cache size requested"); IEthashDataSet cache = new EthashCache(cacheSize, test.Seed.Bytes); Assert.AreEqual((ulong)test.CacheSize, (ulong)cache.Size, "cache size returned"); // below we confirm that headerAndNonceHashed is calculated correctly // & that the method for calculating the result from mix hash is correct byte[] nonceBytes = new byte[8]; BinaryPrimitives.WriteUInt64LittleEndian(nonceBytes, test.Nonce); byte[] headerAndNonceHashed = Keccak512.Compute(Bytes.Concat(headerHash.Bytes, nonceBytes)).Bytes; byte[] resultHalfTest = Keccak.Compute(Bytes.Concat(headerAndNonceHashed, test.MixHash.Bytes)).Bytes; Assert.AreEqual(resultHalfTest, test.Result.Bytes, "half test"); // here we confirm that the whole mix hash calculation is fine (byte[] mixHash, byte[] result, bool success) = ethash.Hashimoto((ulong)test.FullSize, cache, headerHash, blockHeader.MixHash, test.Nonce); Assert.AreEqual(test.MixHash.Bytes, mixHash, "mix hash"); Assert.AreEqual(test.Result.Bytes, result, "result"); // not that the test's result value suggests that the result of the PoW operation is not below difficulty / block is invalid... // Assert.True(ethash.Validate(blockHeader), "validation"); // seems it is just testing the nonce and mix hash but not difficulty ulong dataSetSize = Ethash.GetDataSize(epoch); Assert.AreEqual((ulong)test.FullSize, dataSetSize, "data size requested"); }
public async Task Find_nonce() { BlockHeader parentHeader = new(Keccak.Zero, Keccak.OfAnEmptySequenceRlp, Address.Zero, 131072, 0, 21000, 0, new byte[] { }); parentHeader.Hash = parentHeader.CalculateHash(); BlockHeader blockHeader = new(parentHeader.Hash, Keccak.OfAnEmptySequenceRlp, Address.Zero, 131136, 1, 21000, 1, new byte[] { }); blockHeader.Nonce = 7217048144105167954; blockHeader.MixHash = new Keccak("0x37d9fb46a55e9dbbffc428f3a1be6f191b3f8eaf52f2b6f53c4b9bae62937105"); blockHeader.Hash = blockHeader.CalculateHash(); Block block = new(blockHeader); IEthash ethash = new Ethash(LimboLogs.Instance); EthashSealer ethashSealer = new(ethash, NullSigner.Instance, LimboLogs.Instance); await ethashSealer.MineAsync(CancellationToken.None, block, 7217048144105167954); Assert.True(ethash.Validate(block.Header)); Console.WriteLine(block.Header.Nonce); Console.WriteLine(block.Header.MixHash); }
public void TestPartialDataSet() { // This is based off a cut-up version of the full set generator. Thus if that is updated, this should be too! Look at the Full Data Set Test for more info about how full data sets are generated. // Create our hash object and our cache BigInteger blockNum = 0; Memory <byte> cache = Ethash.MakeCache(blockNum); // Allocate our data set (with only 10 hashes) uint hashCount = 0x10; int hashSize = 0x40; Memory <byte> result = new byte[hashCount * hashSize]; // Populate all items for (var i = 0; i < result.Length / hashSize; i++) { Ethash.CalculateDatasetItem(cache, (uint)i, result.Slice(i * hashSize, hashSize).Span); } // Return the result AssertSHA1("72E61249707E414862063C5B8763AC9C337ACB09", result.Span); }
public async Task Ethhash_Verify_Valid_Blocks() { var validBlocks = new[] { new Block { // from proof of concept nine testnet, epoch 0 Height = 22, HashNoNonce = "372eca2454ead349c3df0ab5d00b0b706b23e49d469387db91811cee0358fc6d".HexToByteArray(), Difficulty = new BigInteger(132416), Nonce = 0x495732e0ed7a801c, MixDigest = "2f74cdeb198af0b9abe65d22d372e22fb2d474371774a9583c1cc427a07939f5".HexToByteArray(), }, // from proof of concept nine testnet, epoch 1 new Block { Height = 30001, HashNoNonce = "7e44356ee3441623bc72a683fd3708fdf75e971bbe294f33e539eedad4b92b34".HexToByteArray(), Difficulty = new BigInteger(1532671), Nonce = 0x318df1c8adef7e5e, MixDigest = "144b180aad09ae3c81fb07be92c8e6351b5646dda80e6844ae1b697e55ddde84".HexToByteArray(), }, // from proof of concept nine testnet, epoch 2 new Block { Height = 60000, HashNoNonce = "5fc898f16035bf5ac9c6d9077ae1e3d5fc1ecc3c9fd5bee8bb00e810fdacbaa0".HexToByteArray(), Difficulty = new BigInteger(2467358), Nonce = 0x50377003e5d830ca, MixDigest = "ab546a5b73c452ae86dadd36f0ed83a6745226717d3798832d1b20b489e82063".HexToByteArray(), }, }; using (var ethash = new Ethash(3)) { Assert.True(await ethash.VerifyAsync(validBlocks[0])); Assert.True(await ethash.VerifyAsync(validBlocks[1])); Assert.True(await ethash.VerifyAsync(validBlocks[2])); } }