private static void VerifyIncrementalResult(HashAlgorithm referenceAlgorithm, IncrementalHash incrementalHash) { byte[] referenceHash = referenceAlgorithm.ComputeHash(s_inputBytes); const int StepA = 13; const int StepB = 7; int position = 0; while (position < s_inputBytes.Length - StepA) { incrementalHash.AppendData(s_inputBytes, position, StepA); position += StepA; } incrementalHash.AppendData(s_inputBytes, position, s_inputBytes.Length - position); byte[] incrementalA = incrementalHash.GetHashAndReset(); Assert.Equal(referenceHash, incrementalA); // Now try again, verifying both immune to step size behaviors, and that GetHashAndReset resets. position = 0; while (position < s_inputBytes.Length - StepB) { incrementalHash.AppendData(s_inputBytes, position, StepA); position += StepA; } incrementalHash.AppendData(s_inputBytes, position, s_inputBytes.Length - position); byte[] incrementalB = incrementalHash.GetHashAndReset(); Assert.Equal(referenceHash, incrementalB); }
public static void ModifyAfterHMACDispose() { using (IncrementalHash hash = IncrementalHash.CreateHMAC(HashAlgorithmName.SHA256, s_hmacKey)) { hash.Dispose(); Assert.Throws <ObjectDisposedException>(() => hash.AppendData(Array.Empty <byte>())); Assert.Throws <ObjectDisposedException>(() => hash.GetHashAndReset()); } }
protected override byte[] HashData(byte[] data, int offset, int count, HashAlgorithmName hashAlgorithm) { ValidateKeyDigestCombination(KeySize, hashAlgorithm); using (IncrementalHash hash = IncrementalHash.CreateHash(hashAlgorithm)) { hash.AppendData(data, offset, count); return(hash.GetHashAndReset()); } }
private static void VerifyGetCurrentHash(IncrementalHash single, IncrementalHash accumulated) { Span <byte> buf = stackalloc byte[2048]; Span <byte> fullDigest = stackalloc byte[512 / 8]; Span <byte> curDigest = stackalloc byte[fullDigest.Length]; SequenceFill(buf); int count = 0; const int Step = 13; int writtenA; int writtenB; while (count + Step < buf.Length) { // Accumulate only the current slice accumulated.AppendData(buf.Slice(count, Step)); // The comparison needs the whole thing, since we're // comparing GetHashAndReset vs GetCurrentHash. count += Step; single.AppendData(buf.Slice(0, count)); writtenA = single.GetHashAndReset(fullDigest); writtenB = accumulated.GetCurrentHash(curDigest); Assert.Equal( fullDigest.Slice(0, writtenA).ByteArrayToHex(), curDigest.Slice(0, writtenB).ByteArrayToHex()); } accumulated.AppendData(buf.Slice(count)); single.AppendData(buf); writtenA = single.GetHashAndReset(fullDigest); // Drain/reset accumulated with this last call writtenB = accumulated.GetHashAndReset(curDigest); Assert.Equal( fullDigest.Slice(0, writtenA).ByteArrayToHex(), curDigest.Slice(0, writtenB).ByteArrayToHex()); }
public byte[] GeneratePkgListKey(int key, string packageIndexName) { IncrementalHash md5 = IncrementalHash.CreateHash(HashAlgorithmName.MD5); md5.AppendData(new byte[] { 2, 0, 0, 0 }); if (Convert.ToBoolean(key % 2)) { md5.AppendData(s_PackageListKey[key / 2]); md5.AppendData(Encoding.Default.GetBytes(packageIndexName)); } else { md5.AppendData(Encoding.Default.GetBytes(packageIndexName)); md5.AppendData(s_PackageListKey[key / 2]); } return(md5.GetHashAndReset()); }
public virtual byte[] GetCertHash(HashAlgorithmName hashAlgorithm) { ThrowIfInvalid(); using (IncrementalHash hasher = IncrementalHash.CreateHash(hashAlgorithm)) { hasher.AppendData(Pal.RawData); return(hasher.GetHashAndReset()); } }
private BEncodedString GetToken(Node node, byte[] secret) { //refresh secret if needed if (_lastSecretGeneration.Add(Timeout) < DateTime.UtcNow) { _lastSecretGeneration = DateTime.UtcNow; _secret.CopyTo(_previousSecret, 0); //PORT NOTE: Used GetNonZeroBytes() here before _random.GetBytes(_secret); } byte[] compactNode = node.CompactAddressPort().TextBytes; _sha1.AppendData(compactNode); _sha1.AppendData(secret); return(_sha1.GetHashAndReset()); }
/// <summary> /// Creates a name-based UUID using the algorithm from RFC 4122 §4.3. /// </summary> /// <param name="namespaceId">The ID of the namespace.</param> /// <param name="name">The name (within that namespace).</param> /// <param name="version">The version number of the UUID to create; this value must be either /// 3 (for MD5 hashing) or 5 (for SHA-1 hashing).</param> /// <returns>A UUID derived from the namespace and name.</returns> /// <remarks>See <a href="http://code.logos.com/blog/2011/04/generating_a_deterministic_guid.html">Generating a deterministic GUID</a>.</remarks> public static Guid Create(Guid namespaceId, string name, int version) { if (name == null) { throw new ArgumentNullException(nameof(name)); } if (version != 3 && version != 5) { throw new ArgumentOutOfRangeException(nameof(version), "version must be either 3 or 5."); } // convert the name to a sequence of octets (as defined by the standard or conventions of its namespace) (step 3) // ASSUME: UTF-8 encoding is always appropriate byte[] nameBytes = Encoding.UTF8.GetBytes(name); // convert the namespace UUID to network order (step 3) byte[] namespaceBytes = namespaceId.ToByteArray(); SwapByteOrder(namespaceBytes); // compute the hash of the name space ID concatenated with the name (step 4) byte[] hash; using (IncrementalHash incrementalHash = IncrementalHash.CreateHash(version == 3 ? HashAlgorithmName.MD5 : HashAlgorithmName.SHA1)) { incrementalHash.AppendData(namespaceBytes); incrementalHash.AppendData(nameBytes); hash = incrementalHash.GetHashAndReset(); } // most bytes from the hash are copied straight to the bytes of the new GUID (steps 5-7, 9, 11-12) byte[] newGuid = new byte[16]; Array.Copy(hash, 0, newGuid, 0, 16); // set the four most significant bits (bits 12 through 15) of the time_hi_and_version field to the appropriate 4-bit version number from Section 4.1.3 (step 8) newGuid[6] = (byte)((newGuid[6] & 0x0F) | (version << 4)); // set the two most significant bits (bits 6 and 7) of the clock_seq_hi_and_reserved to zero and one, respectively (step 10) newGuid[8] = (byte)((newGuid[8] & 0x3F) | 0x80); // convert the resulting UUID to local byte order (step 13) SwapByteOrder(newGuid); return(new Guid(newGuid)); }
internal AesCbcHmacSha2Decryptor(string name, byte[] key, byte[] iv, byte[] associatedData, byte[] authenticationTag) { // Split the key to get the AES key, the HMAC key and the HMAC object byte[] aesKey; GetAlgorithmParameters(name, key, out aesKey, out _hmac_key, out _hmac); // Record the tag _tag = authenticationTag; // Create the AES provider _aes = AesCbcHmacSha2.Create(aesKey, iv); _inner = _aes.CreateDecryptor(); _associated_data_length = ConvertToBigEndian(associatedData.Length * 8); // Prime the hash. _hmac.AppendData(associatedData); _hmac.AppendData(iv); }
public override int Read(byte[] buffer, int offset, int count) { var read = inner.Read(buffer, offset, count); if (read > 0) { hasher.AppendData(buffer, offset, read); } return(read); }
private byte[] MakeNtlm2Hash() { byte[] pwHash = new byte[16]; byte[] pwBytes = Encoding.Unicode.GetBytes(_expectedCredential.Password); MD4.HashData(pwBytes, pwHash); using (IncrementalHash hmac = IncrementalHash.CreateHMAC(HashAlgorithmName.MD5, pwHash)) { hmac.AppendData(Encoding.Unicode.GetBytes(_expectedCredential.UserName.ToUpper() + _expectedCredential.Domain)); return(hmac.GetHashAndReset()); } }
public IHttpActionResult eBayAccountDeletionGet(string challenge_code) { string verificationToken = Convert.ToBase64String(Encoding.ASCII.GetBytes("eBayAccountDeletionTokenjim0519")); verificationToken = new string(verificationToken.Where(c => char.IsLetter(c)).ToArray()); //verificationToken = "ZUJheUFjYbnREZWxldGlvblRvaVuamltMDUxOQ"; var endpoint = this.Request.RequestUri.AbsoluteUri.Replace(this.Request.RequestUri.Query, ""); IncrementalHash sha256 = IncrementalHash.CreateHash(HashAlgorithmName.SHA256); sha256.AppendData(Encoding.UTF8.GetBytes(challenge_code)); sha256.AppendData(Encoding.UTF8.GetBytes(verificationToken)); sha256.AppendData(Encoding.UTF8.GetBytes(endpoint)); byte[] bytes = sha256.GetHashAndReset(); var retChallengeResponse = BitConverter.ToString(bytes).Replace("-", string.Empty).ToLower(); return(Json(new { challengeResponse = retChallengeResponse })); //return Ok(); }
public void AddDependency(Guid key, long version) { if (key != default) { try { slimLock.EnterWriteLock(); keys.Add(key.ToString()); hasher.AppendData(key.ToByteArray()); hasher.AppendData(BitConverter.GetBytes(version)); hasDependency = true; } finally { slimLock.ExitWriteLock(); } } }
public static void AppendDataAfterHMACClose() { using (IncrementalHash hash = IncrementalHash.CreateHMAC(HashAlgorithmName.SHA256, s_hmacKey)) { byte[] firstHash = hash.GetHashAndReset(); hash.AppendData(Array.Empty <byte>()); byte[] secondHash = hash.GetHashAndReset(); Assert.Equal(firstHash, secondHash); } }
public void AddDependency(string key, long version) { if (key != default) { try { slimLock.EnterWriteLock(); keys.Add(key); hasher.AppendData(Encoding.Default.GetBytes(key)); hasher.AppendData(BitConverter.GetBytes(version)); hasDependency = true; } finally { slimLock.ExitWriteLock(); } } }
/// <summary> /// Check the given arguments and throw a <see cref="NetMQSecurityException"/> if something is amiss. /// </summary> /// <param name="contentType">This identifies the type of content: ChangeCipherSpec, Handshake, or ApplicationData.</param> /// <param name="seqNum"></param> /// <param name="frameIndex"></param> /// <param name="plainBytes"></param> /// <param name="mac"></param> /// <param name="padding"></param> /// <exception cref="NetMQSecurityException"><see cref="NetMQSecurityErrorCode.MACNotMatched"/>: MAC does not match message.</exception> public void ValidateBytes(ContentType contentType, ulong seqNum, int frameIndex, byte[] plainBytes, byte[] mac, byte[] padding) { if (SecurityParameters.MACAlgorithm != MACAlgorithm.Null) { byte[] seqNumBytes = BitConverter.GetBytes(seqNum); byte[] versionAndType = new[] { (byte)contentType, m_protocolVersion[0], m_protocolVersion[1] }; byte[] messageSize = BitConverter.GetBytes(plainBytes.Length); byte[] frameIndexBytes = BitConverter.GetBytes(frameIndex); //m_decryptionHMAC.Initialize(); //m_decryptionHMAC.TransformBlock(seqNumBytes, 0, seqNumBytes.Length, seqNumBytes, 0); //m_decryptionHMAC.TransformBlock(versionAndType, 0, versionAndType.Length, versionAndType, 0); //m_decryptionHMAC.TransformBlock(messageSize, 0, messageSize.Length, messageSize, 0); //m_decryptionHMAC.TransformBlock(frameIndexBytes, 0, frameIndexBytes.Length, frameIndexBytes, 0); //m_decryptionHMAC.TransformFinalBlock(plainBytes, 0, plainBytes.Length); //var hash = m_decryptionHMAC.Hash; m_decryptionHMAC.AppendData(seqNumBytes); m_decryptionHMAC.AppendData(versionAndType); m_decryptionHMAC.AppendData(messageSize); m_decryptionHMAC.AppendData(frameIndexBytes); m_decryptionHMAC.AppendData(plainBytes); var hash = m_decryptionHMAC.GetHashAndReset(); if (!hash.SequenceEqual(mac)) { throw new NetMQSecurityException(NetMQSecurityErrorCode.MACNotMatched, "MAC does not match message"); } for (int i = 0; i < padding.Length; i++) { if (padding[i] != padding.Length - 1) { throw new NetMQSecurityException(NetMQSecurityErrorCode.MACNotMatched, "MAC not matched message"); } } } }
private void VerifyFiles(String targetFile, UInt32 bufferLength, IncrementalHash sourceHash, IncrementalHash targetHash) { if (this.IsAbort) { return; } using (AccessHandler reader = new AccessHandler(this.logger)) { reader.OpenRead(targetFile); Byte[] buffer = new Byte[bufferLength]; Int32 total = 0; while (!this.IsAbort && reader.ReadChunk(buffer, out Int32 length)) { targetHash.AppendData(buffer, 0, length); total += length; } if (this.IsAbort) { return; } String sourceResult = sourceHash.GetHashAndReset().ToSafeHexString(); String targetResult = targetHash.GetHashAndReset().ToSafeHexString(); if (String.Compare(sourceResult, targetResult) != 0) { this.IsError = true; this.logger.Error( MethodBase.GetCurrentMethod(), "File verification mismatch.", this.GetSourceHashDetail(sourceResult), this.GetTargetHashDetail(targetResult)); return; } if (this.IsAbort) { return; } this.logger.Trace( MethodBase.GetCurrentMethod(), $"Verified file length: {total.ToSafeString(nameof(Byte))}.", this.GetTargetHashDetail(targetResult), this.GetTargetFileDetail(targetFile)); } }
public AMetric Generate(FileInfo file) { byte[] hash = null; using (FileStream fs = file.OpenRead()) { if (fs.Length < 4 * hashedBytesCount) { //if file is small enough just compute hash over the whole file, that way we save us the pain of checking for out of bounds etc using (MD5 md5 = MD5.Create()) { hash = md5.ComputeHash(fs); } } else { using (IncrementalHash incMd5 = IncrementalHash.CreateHash(HashAlgorithmName.MD5)) { byte[] block = new byte[hashedBytesCount]; //start fs.Seek(0, SeekOrigin.Begin); fs.Read(block, 0, block.Length); incMd5.AppendData(block); //middle fs.Seek(fs.Length / 2 - hashedBytesCount / 2, SeekOrigin.Begin); fs.Read(block, 0, block.Length); incMd5.AppendData(block); //end fs.Seek(-hashedBytesCount, SeekOrigin.End); fs.Read(block, 0, block.Length); incMd5.AppendData(block); hash = incMd5.GetHashAndReset(); } } } return(new HashMetric(hash)); }
internal AttachmentCipherOutputStream(byte[] combinedKeyMaterial, Stream outputStream) : base(outputStream) { byte[][] keyParts = Util.Split(combinedKeyMaterial, 32, 32); Aes = Aes.Create(); Aes.Key = keyParts[0]; Aes.Mode = CipherMode.CBC; Aes.Padding = PaddingMode.PKCS7; Encryptor = Aes.CreateEncryptor(); Cipher = new CryptoStream(TmpStream, Encryptor, CryptoStreamMode.Write); Mac = IncrementalHash.CreateHMAC(HashAlgorithmName.SHA256, keyParts[1]); Mac.AppendData(Aes.IV); base.Write(Aes.IV, 0, Aes.IV.Length); }
/// <summary> /// Implement the ICryptoTransform method. /// </summary> public int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset) { // Pass the data stream to the hash algorithm for generating the Auth Code. // This does not change the inputBuffer. Do this before decryption for read mode. if (!_writeMode) { #if NET45 _hmacsha1.AppendData(inputBuffer, inputOffset, inputCount); #endif } // Encrypt with AES in CTR mode. Regards to Dr Brian Gladman for this. int ix = 0; while (ix < inputCount) { if (_encrPos == ENCRYPT_BLOCK) { /* increment encryption nonce */ int j = 0; while (++_counterNonce[j] == 0) { ++j; } /* encrypt the nonce to form next xor buffer */ _encryptor.TransformBlock(_counterNonce, 0, _blockSize, _encryptBuffer, 0); _encrPos = 0; } outputBuffer[ix + outputOffset] = (byte)(inputBuffer[ix + inputOffset] ^ _encryptBuffer[_encrPos++]); // ix++; } if (_writeMode) { // This does not change the buffer. #if NET45 _hmacsha1.AppendData(outputBuffer, outputOffset, inputCount); #endif } return(inputCount); }
public virtual bool TryGetCertHash( HashAlgorithmName hashAlgorithm, Span <byte> destination, out int bytesWritten) { ThrowIfInvalid(); using (IncrementalHash hasher = IncrementalHash.CreateHash(hashAlgorithm)) { hasher.AppendData(Pal.RawData); return(hasher.TryGetHashAndReset(destination, out bytesWritten)); } }
public bool TryCreateHash(ExecuteScriptCommandOptions options, out string hash) { if (options.NoCache) { _logger.Debug($"The script {options.File.Path} was executed with the '--no-cache' flag. Skipping cache."); hash = null; return(false); } var scriptFilesProvider = new ScriptFilesResolver(); var allScriptFiles = scriptFilesProvider.GetScriptFiles(options.File.Path); var projectFile = new ScriptProjectProvider(_logFactory).CreateProjectFileFromScriptFiles(ScriptEnvironment.Default.TargetFramework, allScriptFiles.ToArray()); if (!projectFile.IsCacheable) { _logger.Warning($"The script {options.File.Path} is not cacheable. For caching and optimal performance, ensure that the script only contains NuGet references with pinned/exact versions."); hash = null; return(false); } IncrementalHash incrementalHash = IncrementalHash.CreateHash(HashAlgorithmName.SHA256); foreach (var scriptFile in allScriptFiles) { incrementalHash.AppendData(File.ReadAllBytes(scriptFile)); } var configuration = options.OptimizationLevel.ToString(); incrementalHash.AppendData(Encoding.UTF8.GetBytes(configuration)); // Ensure that we don't run with the deps of an old target framework or SDK version. incrementalHash.AppendData(Encoding.UTF8.GetBytes(ScriptEnvironment.Default.NetCoreVersion.Tfm)); incrementalHash.AppendData(Encoding.UTF8.GetBytes(ScriptEnvironment.Default.NetCoreVersion.Version)); hash = Convert.ToBase64String(incrementalHash.GetHashAndReset()); return(true); }
private byte Verify(byte[] challenge, String password) { byte[] passwordHash = sha256.ComputeHash(Encoding.UTF8.GetBytes(password)); logger.Debug($"Password Hash: !{BitConverter.ToString(passwordHash).Replace("-", string.Empty)}!"); IncrementalHash sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256); sha.AppendData(challenge); sha.AppendData(passwordHash); byte[] proof = sha.GetHashAndReset(); logger.Debug($"Proof: !{BitConverter.ToString(proof).Replace("-", string.Empty)}!"); byte[] proofLength = BitConverter.GetBytes((short)proof.Length); if (BitConverter.IsLittleEndian) { Array.Reverse(proofLength); } PacketWriter proofPacket = new PacketWriter(TyrannyOpcode.AuthProof); proofPacket.Write(proofLength); proofPacket.Write(proof); tcpClient.Send(proofPacket); PacketReader proofAckPacket; if (tcpClient.Read(out proofAckPacket)) { byte ack = proofAckPacket.ReadByte(); logger.Debug($"Got Proof Ack: {ack}"); return(ack); } else { throw new IOException("Failed to receive proof ack"); } }
public async override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { do // allow 0 writes (e.g. empty files) { if (stream == null) { stream = await streamProvider.GetStream(position, cancellationToken).ConfigureAwait(false); hash = IncrementalHash.CreateHash(HashAlgorithmName.SHA256); chunkLength = 0; chunkOffset = position; } int splitPoint = sum.FindNextSplitOffset(buffer, offset, count); if (splitPoint < 0) { hash.AppendData(buffer, offset, count); await stream.WriteAsync(buffer, offset, count).ConfigureAwait(false); chunkLength += count; position += count; return; } int segmentLength = (splitPoint - offset); hash.AppendData(buffer, offset, segmentLength); await stream.WriteAsync(buffer, offset, segmentLength).ConfigureAwait(false); chunkLength += segmentLength; await CompleteCurrent(cancellationToken).ConfigureAwait(false); offset = splitPoint; count -= segmentLength; position += segmentLength; }while (count > 0); }
// Used in cases where we can bail out of getting a normalized hash early // The position of the filestream must match the state of incremental hash // This makes this usable in the case where we stop normalizing // part way through the file and can continue hashing the raw bytes with this function private byte[] GetRawChecksum(FileStream fs, IncrementalHash hash) { // Read the remainder of the stream in 4k chunks byte[] fileBytes = new byte[4096]; int bytesRead = 0; do { bytesRead = fs.Read(fileBytes, 0, fileBytes.Length); hash.AppendData(fileBytes, 0, bytesRead); } while (bytesRead != 0); return(hash.GetHashAndReset()); }
public static void InvalidArguments_Throw() { AssertExtensions.Throws <ArgumentException>("hashAlgorithm", () => IncrementalHash.CreateHash(new HashAlgorithmName(null))); AssertExtensions.Throws <ArgumentException>("hashAlgorithm", () => IncrementalHash.CreateHash(new HashAlgorithmName(""))); AssertExtensions.Throws <ArgumentException>("hashAlgorithm", () => IncrementalHash.CreateHMAC(new HashAlgorithmName(null), new byte[1])); AssertExtensions.Throws <ArgumentException>("hashAlgorithm", () => IncrementalHash.CreateHMAC(new HashAlgorithmName(""), new byte[1])); AssertExtensions.Throws <ArgumentNullException>("key", () => IncrementalHash.CreateHMAC(HashAlgorithmName.SHA512, null)); using (IncrementalHash incrementalHash = IncrementalHash.CreateHash(HashAlgorithmName.SHA512)) { AssertExtensions.Throws <ArgumentNullException>("data", () => incrementalHash.AppendData(null)); AssertExtensions.Throws <ArgumentNullException>("data", () => incrementalHash.AppendData(null, 0, 0)); AssertExtensions.Throws <ArgumentOutOfRangeException>("offset", () => incrementalHash.AppendData(new byte[1], -1, 1)); AssertExtensions.Throws <ArgumentOutOfRangeException>("count", () => incrementalHash.AppendData(new byte[1], 0, -1)); AssertExtensions.Throws <ArgumentOutOfRangeException>("count", () => incrementalHash.AppendData(new byte[1], 0, 2)); Assert.Throws <ArgumentException>(() => incrementalHash.AppendData(new byte[2], 1, 2)); } }
public override int Read(byte[] buffer, int offset, int count) { if (Position != _hashPos) { throw new InvalidOperationException("Reads must be contiguous"); } int numRead = _wrapped.Read(buffer, offset, count); _hashAlg.AppendData(buffer, offset, numRead); _hashPos += numRead; return(numRead); }
public void SignDataVerifyHash_SHA1(DSASignatureFormat signatureFormat) { HashAlgorithmName hashAlgorithm = HashAlgorithmName.SHA1; KeyDescription key = GetKey(); byte[] signature = SignData(key, _typeNameBytes, hashAlgorithm, signatureFormat); CheckLength(key, signature, signatureFormat); using (IncrementalHash hash = IncrementalHash.CreateHash(hashAlgorithm)) { hash.AppendData(_typeNameBytes); Assert.True(VerifyHash(key, hash.GetHashAndReset(), signature, signatureFormat)); } }
/// <summary> /// Compute a hash of the bytes of the buffer within the frames of the given NetMQMessage. /// </summary> /// <param name="hash">the hashing-algorithm to employ</param> /// <param name="message">the NetMQMessage whose frames are to be hashed</param> private static byte[] Hash(IncrementalHash hash, NetMQMessage message) { foreach (var frame in message) { // Access the byte-array that is the frame's buffer. byte[] bytes = frame.ToByteArray(true); // Compute the hash value for the region of the input byte-array (bytes), starting at index 0, // and copy the resulting hash value back into the same byte-array. //hash.TransformBlock(bytes, 0, bytes.Length, bytes, 0); hash.AppendData(bytes); } return(hash.GetHashAndReset()); }
public static string Digest(IncrementalHash hasher, string input) { Span <byte> buffer = stackalloc byte[1024]; var src = input.AsSpan(); while (src.Length > 0) { if (Utf8.FromUtf16(src, buffer, out int read, out int written) != OperationStatus.Done) { throw new Exception(); } hasher.AppendData(buffer.Slice(0, written)); src = src[read..];
public static bool TryHash(IncrementalHash hasher, ReadOnlyMemory <byte> src, long startLayerLength, long endLayerLength, Span <byte> computedHash, out int written) { using var _ = MemoryPool.Default.Rent(((src.Length + 63) / 64) * 32, out Memory <byte> dest); while ((endLayerLength == -1 && src.Length != 32) || (endLayerLength != -1 && startLayerLength < endLayerLength)) { for (int i = 0; i < src.Length / 64; i++) { hasher.AppendData(src.Slice(i * 64, 64)); if (!hasher.TryGetHashAndReset(dest.Slice(i * 32, 32).Span, out written) || written != 32) { return(false); } } if (src.Length % 64 == 32) { hasher.AppendData(src.Slice(src.Length - 32, 32)); hasher.AppendData(FinalLayerHash[startLayerLength]); if (!hasher.TryGetHashAndReset(dest.Slice(dest.Length - 32, 32).Span, out written) || written != 32) { return(false); } } src = dest; dest = dest.Slice(0, ((dest.Length + 63) / 64) * 32); startLayerLength *= 2; } if (src.Length != 32) { throw new InvalidOperationException("Derpo"); } written = 32; src.Span.Slice(0, written).CopyTo(computedHash); return(true); }