Пример #1
0
 public FullDataSet(ulong setSize, IEthashDataSet cache)
 {
     Data = new uint[(uint)(setSize / Ethash.HashBytes)][];
     for (uint i = 0; i < Data.Length; i++)
     {
         Data[i] = cache.CalcDataSetItem(i);
     }
 }
Пример #2
0
        public FullDataSet(ulong setSize, IEthashDataSet cache)
        {
            //Console.WriteLine($"building data set of length {setSize}"); // TODO: temp, remove
            Data = new uint[(uint)(setSize / Ethash.HashBytes)][];
            for (uint i = 0; i < Data.Length; i++)
            {
                if (i % 100000 == 0)
                {
                    //Console.WriteLine($"building data set of length {setSize}, built {i}"); // TODO: temp, remove
                }

                Data[i] = cache.CalcDataSetItem(i);
            }
        }
Пример #3
0
        public (byte[], byte[]) Hashimoto(ulong fullSize, IEthashDataSet dataSet, Keccak headerHash, Keccak expectedMixHash, ulong nonce)
        {
            uint       hashesInFull = (uint)(fullSize / HashBytes); // TODO: at current rate would cover around 200 years... but will the block rate change? what with private chains with shorter block times?
            const uint wordsInMix   = MixBytes / WordBytes;
            const uint hashesInMix  = MixBytes / HashBytes;

            byte[] nonceBytes = new byte[8];
            BinaryPrimitives.WriteUInt64LittleEndian(nonceBytes, nonce);

            byte[] headerAndNonceHashed = Keccak512.Compute(Bytes.Concat(headerHash.Bytes, nonceBytes)).Bytes; // this tests fine
            uint[] mixInts = new uint[MixBytes / WordBytes];

            for (int i = 0; i < hashesInMix; i++)
            {
                Buffer.BlockCopy(headerAndNonceHashed, 0, mixInts, i * headerAndNonceHashed.Length, headerAndNonceHashed.Length);
            }

            uint firstOfHeaderAndNonce = GetUInt(headerAndNonceHashed, 0);

            for (uint i = 0; i < Accesses; i++)
            {
                uint   p       = Fnv(i ^ firstOfHeaderAndNonce, mixInts[i % wordsInMix]) % (hashesInFull / hashesInMix) * hashesInMix; // since we take 'hashesInMix' consecutive blocks we want only starting indices of such blocks
                uint[] newData = new uint[wordsInMix];
                for (uint j = 0; j < hashesInMix; j++)
                {
                    uint[] item = dataSet.CalcDataSetItem(p + j);
                    Buffer.BlockCopy(item, 0, newData, (int)(j * item.Length * 4), item.Length * 4);
                }

                Fnv(mixInts, newData);
            }

            uint[] cmixInts = new uint[MixBytes / WordBytes / 4];
            for (uint i = 0; i < mixInts.Length; i += 4)
            {
                cmixInts[i / 4] = Fnv(Fnv(Fnv(mixInts[i], mixInts[i + 1]), mixInts[i + 2]), mixInts[i + 3]);
            }

            byte[] cmix = new byte[MixBytes / WordBytes];
            Buffer.BlockCopy(cmixInts, 0, cmix, 0, cmix.Length);

            if (expectedMixHash != null && !Bytes.AreEqual(cmix, expectedMixHash.Bytes))
            {
                // TODO: handle properly
                throw new InvalidOperationException(); // TODO: need to change this
            }

            return(cmix, Keccak.Compute(Bytes.Concat(headerAndNonceHashed, cmix)).Bytes);  // this tests fine
        }