public KeccakRlpStream() { KeccakHash keccakHash = KeccakHash.Create(); _keccakHash = keccakHash; }
static void KeccakHashString(string str, Span <byte> output) { var strBytes = StringUtil.UTF8.GetBytes(str); KeccakHash.ComputeHash(strBytes, output); }
/// <summary> /// Given a key, obtains the corresponding value from our trie. /// </summary> /// <param name="key">The key to grab the corresponding value for.</param> /// <returns>Returns the value which corresponds to the provided key.</returns> public override byte[] Get(byte[] key) { // Use the hash of our provided key as a key. return(base.Get(KeccakHash.ComputeHashBytes(key))); }
public override object ParseFromStorage(StorageManager storageManager, StorageLocation storageLocation) { // Obtain our storage value for our given storage location. Memory <byte> storageData = storageManager.ReadStorageSlot(storageLocation.SlotKey, storageLocation.DataOffset, SizeBytes); BigInteger storageValue = BigIntegerConverter.GetBigInteger(storageData.Span, false, SizeBytes); // The lowest bit of our value signifies if it was stored in multiple slots, or if it fit in a single slot. bool requiresMultipleSlots = (storageValue & 1) != 0; if (requiresMultipleSlots) { // The length is shifted one bit to the left as a result of our flag encoded at bit 0. // So we shift to obtain length. int length = (int)(storageValue >> 1); // Calculate our slot count. int slotCount = (int)Math.Ceiling((double)length / UInt256.SIZE); // Define our result Memory <byte> result = new byte[length]; // Calculate the slot key for our array data (dynamic array's data slot keys are // the array's slot key hashed, with subsequent slot keys being + 1 to the previous) Memory <byte> arrayDataSlotKey = KeccakHash.ComputeHashBytes(storageLocation.SlotKey.Span); // Define our slot location to iterate over. StorageLocation slotLocation = new StorageLocation(arrayDataSlotKey, 0); // Loop for every byte we wish to copy. for (int i = 0; i < length;) { // Obtain the slot Memory <byte> arrayDataSlotValue = storageManager.ReadStorageSlot(slotLocation.SlotKey); // Calculate the remainder of our bytes int remainder = length - i; // Determine the remainder in this slot. int remainderInSlot = Math.Min(remainder, UInt256.SIZE); // Copy our data into our result. arrayDataSlotValue.Slice(0, remainderInSlot).CopyTo(result.Slice(i)); // Increment our slot key slotLocation.SlotKeyInteger++; // Increment our byte index. i += remainderInSlot; } // Return our result return(result); } else { // We did not require multiple storage slots, so it is embedded in this slot. // But the count for data size remains at the end of this storage slot, so we // first obtain the data size from that byte. int length = ((int)(storageValue & 0xFF)) >> 1; // Slice off the desired data from our storage slot data and return it. return(storageData.Slice(0, length)); } }
void GenerateSources() { Stopwatch sw = new Stopwatch(); sw.Start(); string[] solFiles; if (!string.IsNullOrEmpty(_solSourceSingleFile)) { solFiles = new[] { _solSourceSingleFile }; } else { solFiles = GetSolContractFiles(_solSourceDirectory); } var outputFlags = new[] { OutputType.Abi, OutputType.EvmBytecodeObject, OutputType.EvmBytecodeOpcodes, OutputType.EvmBytecodeSourceMap, OutputType.EvmDeployedBytecodeObject, OutputType.EvmDeployedBytecodeOpcodes, OutputType.EvmDeployedBytecodeSourceMap, OutputType.DevDoc, OutputType.UserDoc, OutputType.Metadata, OutputType.Ast }; var solcOptimizerSettings = new Optimizer(); if (_solcOptimzer > 0) { solcOptimizerSettings.Enabled = true; solcOptimizerSettings.Runs = _solcOptimzer; } _logger("Compiling solidity files in " + _solSourceDirectory); var soliditySourceContent = new Dictionary <string, string>(); var solcOutput = _solcLib.Compile(solFiles, outputFlags, solcOptimizerSettings, soliditySourceFileContent: soliditySourceContent); sw.Stop(); _logger($"Compiling solidity completed in {Math.Round(sw.Elapsed.TotalSeconds, 2)} seconds"); #region Generated hashes for solidity sources sw.Restart(); // Calculate a deterministic hash of the solidity source code base, including file paths and the Meadow assembly version. var codeBaseHash = KeccakHash.FromString(string.Join('|', soliditySourceContent .OrderBy(k => k.Key) .SelectMany(k => new[] { k.Key, k.Value }) .Concat(new[] { _assemblyVersion }))); _genResults.SolcCodeBaseHash = HexUtil.GetHexFromBytes(codeBaseHash); var flattenedContracts = solcOutput.ContractsFlattened.OrderBy(c => c.SolFile).ToArray(); ContractInfo[] contractInfos = new ContractInfo[solcOutput.ContractsFlattened.Length]; for (var i = 0; i < contractInfos.Length; i++) { var c = flattenedContracts[i]; // Check if any previous contracts have the same name as this one. int dupNames = 0; for (var f = 0; f < i; f++) { if (flattenedContracts[f].ContractName == c.ContractName) { dupNames++; } } string generatedContractName = c.ContractName; // If there are duplicate contract names, prepend a unique amount of underscore suffixes. if (dupNames > 0) { generatedContractName += new string(Enumerable.Repeat('_', dupNames).ToArray()); } contractInfos[i] = new ContractInfo( Util.GetRelativeFilePath(_solSourceDirectory, c.SolFile), generatedContractName, c.Contract, GetSourceHashesXor(c.Contract), c.Contract.Evm.Bytecode.Object); } _logger($"Generated sol source file hashes in {Math.Round(sw.Elapsed.TotalSeconds, 2)} seconds"); sw.Stop(); #endregion _logger("Writing generated output to directory: " + _generatedContractsDirectory); #region Output directory cleanup if (!Directory.Exists(_generatedContractsDirectory)) { _logger("Creating directory: " + _generatedContractsDirectory); Directory.CreateDirectory(_generatedContractsDirectory); } else { var expectedFiles = contractInfos .Select(c => c.ContractName) .Concat(new[] { EventHelperFile, SolcOutputDataHelperFile }) .Select(c => NormalizePath($"{_generatedContractsDirectory}/{c}{G_CS_FILE_EXT}")) .ToArray(); var existingFiles = Directory .GetFiles(_generatedContractsDirectory, $"*{G_CS_FILE_EXT}", SearchOption.TopDirectoryOnly) .Where(f => f.EndsWith(".sol.cs", StringComparison.Ordinal) || f.EndsWith(".sol.resx", StringComparison.Ordinal)) .Select(f => NormalizePath(f)) .ToArray(); // Delete existing files with no corresponding file that can be generated foreach (var existingFile in existingFiles) { bool found = false; foreach (var expected in expectedFiles) { if (expected.Equals(existingFile, StringComparison.InvariantCultureIgnoreCase)) { found = true; break; } } if (!found) { _logger("Deleting outdated file: " + existingFile); File.Delete(existingFile); } } } #endregion #region AST output generation sw.Restart(); GenerateSolcOutputDataFiles(solcOutput, soliditySourceContent, codeBaseHash); sw.Stop(); _logger($"Resx file for solc output generation took: {Math.Round(sw.Elapsed.TotalSeconds, 2)} seconds"); #endregion #region sw.Restart(); var generatedEvents = new List <GeneratedEventMetadata>(); GeneratedContractSourceFiles(contractInfos, generatedEvents); GenerateEventHelper(generatedEvents); sw.Stop(); _logger($"Contract and event source code generation took: {Math.Round(sw.Elapsed.TotalSeconds, 2)} seconds"); #endregion }
public byte[] GetHash() { return(KeccakHash.ComputeHashBytes(RLP.Encode(Serialize()))); }
public void DeriveEncryptionParameters() { // Verify the session state is correct. if (SessionState != RLPxSessionState.AcknowledgementCompleted) { throw new Exception("RLPx encryption parameter deriviation should only occur after authentication and acknowledgement was processed."); } // Verify we have all required information if (AuthData == null || AuthAckData == null || RemoteEphermalPublicKey == null || InitiatorNonce == null || ResponderNonce == null) { throw new Exception("RLPx deriving encryption information failed: Insufficient data collected from handshake."); } // Generate the ecdh key with both ephemeral keys byte[] ecdhEphemeralKey = LocalEphemeralPrivateKey.ComputeECDHKey(RemoteEphermalPublicKey); // Generate a shared secret: Keccak256(ecdhEphemeralKey || Keccak256(ResponderNonce || InitiatorNonce)) byte[] combinedNonceHash = KeccakHash.ComputeHashBytes(ResponderNonce.Concat(InitiatorNonce)); byte[] sharedSecret = KeccakHash.ComputeHashBytes(ecdhEphemeralKey.Concat(combinedNonceHash)); // Derive the token as a hash of the shared secret. Token = KeccakHash.ComputeHashBytes(sharedSecret); // Derive AES secret: Keccak256(ecdhEphemeralKey || sharedSecret) AesSecret = KeccakHash.ComputeHashBytes(ecdhEphemeralKey.Concat(sharedSecret)); // Derive Mac secret: Keccak256(ecdhEphemeralKey || AesSecret) MacSecret = KeccakHash.ComputeHashBytes(ecdhEphemeralKey.Concat(AesSecret)); // Create our AES providers for incoming and outgoing traffic/frames. // Counter is 0, so it doesn't need to be provided, default value will handle this. IngressAes = new AesCtr(AesSecret); EgressAes = new AesCtr(AesSecret); // Next we'll want to derive our incoming (ingress) and outgoing (egress) traffic message authentication code ("MAC") // The initial state is based off of keccak((MacSecret ^ nonce) || auth/auth-ack). Later states update data from packet frames. // We begin by calculating the xor'd nonces byte[] initiatorTranformedNonce = (byte[])InitiatorNonce.Clone(); byte[] responderTransformedNonce = (byte[])ResponderNonce.Clone(); int loopSize = Math.Min(initiatorTranformedNonce.Length, MacSecret.Length); for (int i = 0; i < loopSize; i++) { initiatorTranformedNonce[i] ^= MacSecret[i]; responderTransformedNonce[i] ^= MacSecret[i]; } // Next we'll want to hash the data with our hash providers. KeccakHash initiatorOutgoing = KeccakHash.Create(); initiatorOutgoing.Update(responderTransformedNonce, 0, responderTransformedNonce.Length); initiatorOutgoing.Update(AuthData, 0, AuthData.Length); KeccakHash responderOutgoing = KeccakHash.Create(); responderOutgoing.Update(initiatorTranformedNonce, 0, initiatorTranformedNonce.Length); responderOutgoing.Update(AuthAckData, 0, AuthAckData.Length); // Assign the correct hash providers based off of role if (Role == RLPxSessionRole.Initiator) { EgressMac = initiatorOutgoing; IngressMac = responderOutgoing; } else { EgressMac = responderOutgoing; IngressMac = initiatorOutgoing; } }
public KeccakRlpStream(KeccakHash keccakHash) { _keccakHash = keccakHash; }
public void TestKeccakUpdate() { // Create our random provider. Random random = new Random(); // Loop for a few test rounds. for (int i = 0; i < 5; i++) { // Generate random data byte[] bufferArray = new byte[random.Next(3, 1024 * 20)]; Span <byte> buffer = bufferArray; random.NextBytes(buffer); // Split threshold int splitThreshold = random.Next(33, 1024); // Compute the overall hash on the data byte[] singleStepHash = KeccakHash.ComputeHashBytes(buffer); // Create our keccak hash provider for multi step hash calculation. KeccakHash keccak = KeccakHash.Create(); keccak.Update(bufferArray, 0, buffer.Length); // Assert the hashes are equal Assert.Equal(singleStepHash.ToHexString(), keccak.Hash.ToHexString()); // Recompute on the same keccak instance to check if it's reusable. keccak.Reset(); keccak.Update(bufferArray, 0, buffer.Length); Assert.Equal(singleStepHash.ToHexString(), keccak.Hash.ToHexString()); // Recompute the hash but add empty array hashing. keccak.Reset(); keccak.Update(bufferArray, 0, bufferArray.Length); keccak.Update(Array.Empty <byte>(), 0, 0); keccak.Update(Array.Empty <byte>(), 0, 0); // Assert the hashes are equal Assert.Equal(singleStepHash.ToHexString(), keccak.Hash.ToHexString()); // Assert blank hashes keccak.Reset(); keccak.Update(Array.Empty <byte>(), 0, 0); keccak.Update(Array.Empty <byte>(), 0, 0); byte[] blankHash = KeccakHash.ComputeHashBytes(Array.Empty <byte>()); Assert.Equal(blankHash.ToHexString(), keccak.Hash.ToHexString()); // Refresh our new keccak instance keccak.Reset(); while (buffer.Length > 0) { // Check if this is the last round bool lastRound = buffer.Length <= splitThreshold; // Obtain the data for this round. byte[] roundData = buffer.Slice(0, lastRound ? buffer.Length : splitThreshold).ToArray(); if (lastRound) { // Obtain the final multistep hash. keccak.Update(roundData, 0, roundData.Length); // Assert the hashes are equal Assert.Equal(singleStepHash.ToHexString(), keccak.Hash.ToHexString()); // Reset keccak.Reset(); break; } else { keccak.Update(roundData, 0, roundData.Length); } // Advance our pointer. buffer = buffer.Slice(roundData.Length); } } }
private static void Keccak512(Span <byte> message, Span <byte> output) { KeccakHash.ComputeHash(message, output, 0x40); }
public ResxWriter GenerateResx() { var resxWriter = new ResxWriter(); resxWriter.AddEntry("SolidityCompilerVersion", _solidityCompilerVersion); // Make paths relative var solSourceContent = _solSourceContent.ToDictionary(d => Util.GetRelativeFilePath(_solSourceDir, d.Key), d => d.Value); // Scan ast json for absolute paths and make them relative foreach (var absPathToken in _solcOutput.JObject["sources"].SelectTokens("$..absolutePath").OfType <JValue>()) { var absPath = Util.GetRelativeFilePath(_solSourceDir, absPathToken.Value <string>()); absPathToken.Value = absPath; } var solcSourceInfos = new List <SolcSourceInfo>(); foreach (JProperty item in _solcOutput.JObject["sources"]) { var fileName = Util.GetRelativeFilePath(_solSourceDir, item.Name); var id = item.Value.Value <int>("id"); var astObj = (JObject)item.Value["ast"]; var sourceContent = solSourceContent[fileName]; var sourceInfo = new SolcSourceInfo { AstJson = astObj, FileName = fileName, ID = id, SourceCode = sourceContent }; solcSourceInfos.Add(sourceInfo); } var solcSourceInfosJson = JsonConvert.SerializeObject(solcSourceInfos, Formatting.Indented); resxWriter.AddEntry("SourcesList", solcSourceInfosJson); var solcBytecodeInfos = new List <SolcBytecodeInfo>(); foreach (JProperty solFile in _solcOutput.JObject["contracts"]) { foreach (JProperty solContract in solFile.Value) { var fileName = Util.GetRelativeFilePath(_solSourceDir, solFile.Name); var contractName = solContract.Name; var bytecodeObj = solContract.Value["evm"]["bytecode"]; var deployedBytecodeObj = solContract.Value["evm"]["deployedBytecode"]; var sourceMap = bytecodeObj.Value <string>("sourceMap"); var sourceMapDeployed = deployedBytecodeObj.Value <string>("sourceMap"); var opcodes = bytecodeObj.Value <string>("opcodes"); var opcodesDeployed = deployedBytecodeObj.Value <string>("opcodes"); var bytecode = bytecodeObj.Value <string>("object"); var bytecodeDeployed = deployedBytecodeObj.Value <string>("object"); var bytecodeHash = KeccakHash.ComputeHash(HexUtil.HexToBytes(bytecode)).ToHexString(); var bytecodeDeployedHash = KeccakHash.ComputeHash(HexUtil.HexToBytes(bytecodeDeployed)).ToHexString(); solcBytecodeInfos.Add(new SolcBytecodeInfo { FilePath = fileName, ContractName = contractName, SourceMap = sourceMap, Opcodes = opcodes, SourceMapDeployed = sourceMapDeployed, OpcodesDeployed = opcodesDeployed, Bytecode = bytecode, BytecodeDeployed = bytecodeDeployed, BytecodeHash = bytecodeHash, BytecodeDeployedHash = bytecodeDeployedHash }); } } var solcBytecodeInfosJson = JsonConvert.SerializeObject(solcBytecodeInfos, Formatting.Indented); resxWriter.AddEntry("ByteCodeData", solcBytecodeInfosJson); var contractAbis = _solcOutput.ContractsFlattened.ToDictionary(c => c.SolFile + "/" + c.ContractName, c => c.Contract.Abi); var contractAbisJson = JsonConvert.SerializeObject(contractAbis, Formatting.Indented); resxWriter.AddEntry("ContractAbiJson", contractAbisJson); return(resxWriter); }
private static Span <byte> Keccak512(Span <byte> message) { byte[] output = new byte[0x40]; KeccakHash.ComputeHash(message, output, output.Length); return(output); }
public byte[] GetSigningHash() { return(KeccakHash.ComputeHashBytes(RLP.Encode(Serialize(null, MixHash, Nonce)))); }
public byte[] GetMiningHash() { return(KeccakHash.ComputeHashBytes(RLP.Encode(Serialize(ExtraData, null, null)))); }
/// <summary> /// https://github.com/ethereum/EIPs/blob/master/EIPS/eip-55.md /// </summary> public static bool ValidChecksum(string addressHexStr) { bool foundUpper = false, foundLower = false; foreach (var c in addressHexStr) { foundUpper |= c > 64 && c < 71; foundLower |= c > 96 && c < 103; if (foundUpper && foundLower) { break; } } if (!(foundUpper && foundLower)) { return(true); } // get lowercase utf16 buffer Span <byte> addr = stackalloc byte[80]; var addrSpan = addressHexStr.AsSpan(); if (addrSpan[0] == '0' && addrSpan[1] == 'x') { addrSpan = addrSpan.Slice(2); } if (addrSpan.Length != 40) { throw new ArgumentException("Address hex string should be 40 chars long, or 42 with a 0x prefix, was given " + addressHexStr.Length, nameof(addressHexStr)); } addrSpan.ToLowerInvariant(MemoryMarshal.Cast <byte, char>(addr)); // inline buffer conversion from utf16 to ascii for (var i = 0; i < 40; i++) { addr[i] = addr[i * 2]; } // get hash of ascii hex KeccakHash.ComputeHash(addr.Slice(0, 40), addr.Slice(0, 32)); for (var i = 0; i < 40; i++) { char inspectChar = addrSpan[i]; // skip check if character is a number if (inspectChar > 64) { // get character casing flag var c = i % 2 == 0 ? addr[i / 2] >> 4 : addr[i / 2] & 0x0F; bool upperFlag = c >= 8; // verify character is uppercase, otherwise bad checksum if (upperFlag && inspectChar > 96) { return(false); } // verify character is lowercase else if (!upperFlag && inspectChar < 97) { return(false); } } } return(true); }
public static void GetMethodID(Span <byte> buffer, string functionSignature) { var bytes = UTF8.GetBytes(functionSignature); KeccakHash.ComputeHash(bytes).Slice(0, 4).CopyTo(buffer); }
private static Span <byte> Keccak256(Span <byte> message) { return(KeccakHash.ComputeHash(message)); }
protected override string GenerateClassDef() { List <string> eventTypes = new List <string>(); foreach (var item in _contract.Abi) { if (item.Type == AbiType.Event) { eventTypes.Add($"typeof({_namespace}.{_contractName}.{item.Name})"); } } string eventTypesString = string.Empty; if (eventTypes.Any()) { eventTypesString = ", " + string.Join(", ", eventTypes); } string bytecodeHash = KeccakHash.ComputeHash(_contract.Evm.Bytecode.ObjectBytes).ToHexString(); string bytecodeDeployedHash = KeccakHash.ComputeHash(_contract.Evm.DeployedBytecode.ObjectBytes).ToHexString(); string devDocJson = JsonConvert.SerializeObject(_contract.Devdoc).Replace("\"", "\"\"", StringComparison.Ordinal); string userDocJson = JsonConvert.SerializeObject(_contract.Userdoc).Replace("\"", "\"\"", StringComparison.Ordinal); string extraSummaryDoc = GetContractSummaryXmlDoc(); return($@" /// <summary>{extraSummaryDoc}</summary> [{typeof(SolidityContractAttribute).FullName}(typeof({_contractName}), CONTRACT_SOL_FILE, CONTRACT_NAME, CONTRACT_BYTECODE_HASH, CONTRACT_BYTECODE_DEPLOYED_HASH)] public class {_contractName} : {typeof(BaseContract).FullName} {{ public static Lazy<byte[]> BYTECODE_BYTES = new Lazy<byte[]>(() => {typeof(HexUtil).FullName}.HexToBytes(GeneratedSolcData<{_contractName}>.Default.GetSolcBytecodeInfo(CONTRACT_SOL_FILE, CONTRACT_NAME).Bytecode)); public const string CONTRACT_SOL_FILE = ""{_contractSolFileName}""; public const string CONTRACT_NAME = ""{_contractName}""; public const string CONTRACT_BYTECODE_HASH = ""{bytecodeHash}""; public const string CONTRACT_BYTECODE_DEPLOYED_HASH = ""{bytecodeDeployedHash}""; protected override string ContractSolFilePath => CONTRACT_SOL_FILE; protected override string ContractName => CONTRACT_NAME; protected override string ContractBytecodeHash => CONTRACT_BYTECODE_HASH; protected override string ContractBytecodeDeployedHash => CONTRACT_BYTECODE_DEPLOYED_HASH; private {_contractName}({JsonRpcClientType} rpcClient, {typeof(Address).FullName} address, {typeof(Address).FullName} defaultFromAccount) : base(rpcClient, address, defaultFromAccount) {{ {typeof(EventLogUtil).FullName}.{nameof(EventLogUtil.RegisterDeployedContractEventTypes)}( address.GetHexString(hexPrefix: true) {eventTypesString} ); }} public static async Task<{_contractName}> At({JsonRpcClientType} rpcClient, {typeof(Address).FullName} address, {typeof(Address).FullName}? defaultFromAccount = null) {{ defaultFromAccount = defaultFromAccount ?? (await rpcClient.Accounts())[0]; return new {_contractName}(rpcClient, address, defaultFromAccount.Value); }} {GenerateClassMembers()} }} "); }